1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
44 
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
52 
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
56 
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
61 
62 /* Nonzero means allow operands to be volatile.
63    This should be 0 if you are generating rtl, such as if you are calling
64    the functions in optabs.c and expmed.c (most of the time).
65    This should be 1 if all valid insns need to be recognized,
66    such as in reginfo.c and final.c and reload.c.
67 
68    init_recog and init_recog_no_volatile are responsible for setting this.  */
69 
70 int volatile_ok;
71 
72 struct recog_data_d recog_data;
73 
74 /* Contains a vector of operand_alternative structures, such that
75    operand OP of alternative A is at index A * n_operands + OP.
76    Set up by preprocess_constraints.  */
77 const operand_alternative *recog_op_alt;
78 
79 /* Used to provide recog_op_alt for asms.  */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 				      * MAX_RECOG_ALTERNATIVES];
82 
83 /* On return from `constrain_operands', indicate which alternative
84    was satisfied.  */
85 
86 int which_alternative;
87 
88 /* Nonzero after end of reload pass.
89    Set to 1 or 0 by toplev.c.
90    Controls the significance of (SUBREG (MEM)).  */
91 
92 int reload_completed;
93 
94 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
95 int epilogue_completed;
96 
97 /* Initialize data used by the function `recog'.
98    This must be called once in the compilation of a function
99    before any insn recognition may be done in the function.  */
100 
101 void
init_recog_no_volatile(void)102 init_recog_no_volatile (void)
103 {
104   volatile_ok = 0;
105 }
106 
107 void
init_recog(void)108 init_recog (void)
109 {
110   volatile_ok = 1;
111 }
112 
113 
114 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
115 
116 static bool
asm_labels_ok(rtx body)117 asm_labels_ok (rtx body)
118 {
119   rtx asmop;
120   int i;
121 
122   asmop = extract_asm_operands (body);
123   if (asmop == NULL_RTX)
124     return true;
125 
126   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128       return false;
129 
130   return true;
131 }
132 
133 /* Check that X is an insn-body for an `asm' with operands
134    and that the operands mentioned in it are legitimate.  */
135 
136 int
check_asm_operands(rtx x)137 check_asm_operands (rtx x)
138 {
139   int noperands;
140   rtx *operands;
141   const char **constraints;
142   int i;
143 
144   if (!asm_labels_ok (x))
145     return 0;
146 
147   /* Post-reload, be more strict with things.  */
148   if (reload_completed)
149     {
150       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
151       rtx_insn *insn = make_insn_raw (x);
152       extract_insn (insn);
153       constrain_operands (1, get_enabled_alternatives (insn));
154       return which_alternative >= 0;
155     }
156 
157   noperands = asm_noperands (x);
158   if (noperands < 0)
159     return 0;
160   if (noperands == 0)
161     return 1;
162 
163   operands = XALLOCAVEC (rtx, noperands);
164   constraints = XALLOCAVEC (const char *, noperands);
165 
166   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167 
168   for (i = 0; i < noperands; i++)
169     {
170       const char *c = constraints[i];
171       if (c[0] == '%')
172 	c++;
173       if (! asm_operand_ok (operands[i], c, constraints))
174 	return 0;
175     }
176 
177   return 1;
178 }
179 
180 /* Static data for the next two routines.  */
181 
182 struct change_t
183 {
184   rtx object;
185   int old_code;
186   bool unshare;
187   rtx *loc;
188   rtx old;
189 };
190 
191 static change_t *changes;
192 static int changes_allocated;
193 
194 static int num_changes = 0;
195 
196 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
197    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
198    the change is simply made.
199 
200    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
201    will be called with the address and mode as parameters.  If OBJECT is
202    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
203    the change in place.
204 
205    IN_GROUP is nonzero if this is part of a group of changes that must be
206    performed as a group.  In that case, the changes will be stored.  The
207    function `apply_change_group' will validate and apply the changes.
208 
209    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
210    or validate the memory reference with the change applied.  If the result
211    is not valid for the machine, suppress the change and return zero.
212    Otherwise, perform the change and return 1.  */
213 
214 static bool
validate_change_1(rtx object,rtx * loc,rtx new_rtx,bool in_group,bool unshare)215 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
216 {
217   rtx old = *loc;
218 
219   if (old == new_rtx || rtx_equal_p (old, new_rtx))
220     return 1;
221 
222   gcc_assert (in_group != 0 || num_changes == 0);
223 
224   *loc = new_rtx;
225 
226   /* Save the information describing this change.  */
227   if (num_changes >= changes_allocated)
228     {
229       if (changes_allocated == 0)
230 	/* This value allows for repeated substitutions inside complex
231 	   indexed addresses, or changes in up to 5 insns.  */
232 	changes_allocated = MAX_RECOG_OPERANDS * 5;
233       else
234 	changes_allocated *= 2;
235 
236       changes = XRESIZEVEC (change_t, changes, changes_allocated);
237     }
238 
239   changes[num_changes].object = object;
240   changes[num_changes].loc = loc;
241   changes[num_changes].old = old;
242   changes[num_changes].unshare = unshare;
243 
244   if (object && !MEM_P (object))
245     {
246       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
247 	 case invalid.  */
248       changes[num_changes].old_code = INSN_CODE (object);
249       INSN_CODE (object) = -1;
250     }
251 
252   num_changes++;
253 
254   /* If we are making a group of changes, return 1.  Otherwise, validate the
255      change group we made.  */
256 
257   if (in_group)
258     return 1;
259   else
260     return apply_change_group ();
261 }
262 
263 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
264    UNSHARE to false.  */
265 
266 bool
validate_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)267 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
268 {
269   return validate_change_1 (object, loc, new_rtx, in_group, false);
270 }
271 
272 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
273    UNSHARE to true.  */
274 
275 bool
validate_unshare_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)276 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
277 {
278   return validate_change_1 (object, loc, new_rtx, in_group, true);
279 }
280 
281 
282 /* Keep X canonicalized if some changes have made it non-canonical; only
283    modifies the operands of X, not (for example) its code.  Simplifications
284    are not the job of this routine.
285 
286    Return true if anything was changed.  */
287 bool
canonicalize_change_group(rtx_insn * insn,rtx x)288 canonicalize_change_group (rtx_insn *insn, rtx x)
289 {
290   if (COMMUTATIVE_P (x)
291       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
292     {
293       /* Oops, the caller has made X no longer canonical.
294 	 Let's redo the changes in the correct order.  */
295       rtx tem = XEXP (x, 0);
296       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
297       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
298       return true;
299     }
300   else
301     return false;
302 }
303 
304 
305 /* This subroutine of apply_change_group verifies whether the changes to INSN
306    were valid; i.e. whether INSN can still be recognized.
307 
308    If IN_GROUP is true clobbers which have to be added in order to
309    match the instructions will be added to the current change group.
310    Otherwise the changes will take effect immediately.  */
311 
312 int
insn_invalid_p(rtx_insn * insn,bool in_group)313 insn_invalid_p (rtx_insn *insn, bool in_group)
314 {
315   rtx pat = PATTERN (insn);
316   int num_clobbers = 0;
317   /* If we are before reload and the pattern is a SET, see if we can add
318      clobbers.  */
319   int icode = recog (pat, insn,
320 		     (GET_CODE (pat) == SET
321 		      && ! reload_completed
322                       && ! reload_in_progress)
323 		     ? &num_clobbers : 0);
324   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
325 
326 
327   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
328      this is not an asm and the insn wasn't recognized.  */
329   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
330       || (!is_asm && icode < 0))
331     return 1;
332 
333   /* If we have to add CLOBBERs, fail if we have to add ones that reference
334      hard registers since our callers can't know if they are live or not.
335      Otherwise, add them.  */
336   if (num_clobbers > 0)
337     {
338       rtx newpat;
339 
340       if (added_clobbers_hard_reg_p (icode))
341 	return 1;
342 
343       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
344       XVECEXP (newpat, 0, 0) = pat;
345       add_clobbers (newpat, icode);
346       if (in_group)
347 	validate_change (insn, &PATTERN (insn), newpat, 1);
348       else
349 	PATTERN (insn) = pat = newpat;
350     }
351 
352   /* After reload, verify that all constraints are satisfied.  */
353   if (reload_completed)
354     {
355       extract_insn (insn);
356 
357       if (! constrain_operands (1, get_preferred_alternatives (insn)))
358 	return 1;
359     }
360 
361   INSN_CODE (insn) = icode;
362   return 0;
363 }
364 
365 /* Return number of changes made and not validated yet.  */
366 int
num_changes_pending(void)367 num_changes_pending (void)
368 {
369   return num_changes;
370 }
371 
372 /* Tentatively apply the changes numbered NUM and up.
373    Return 1 if all changes are valid, zero otherwise.  */
374 
375 int
verify_changes(int num)376 verify_changes (int num)
377 {
378   int i;
379   rtx last_validated = NULL_RTX;
380 
381   /* The changes have been applied and all INSN_CODEs have been reset to force
382      rerecognition.
383 
384      The changes are valid if we aren't given an object, or if we are
385      given a MEM and it still is a valid address, or if this is in insn
386      and it is recognized.  In the latter case, if reload has completed,
387      we also require that the operands meet the constraints for
388      the insn.  */
389 
390   for (i = num; i < num_changes; i++)
391     {
392       rtx object = changes[i].object;
393 
394       /* If there is no object to test or if it is the same as the one we
395          already tested, ignore it.  */
396       if (object == 0 || object == last_validated)
397 	continue;
398 
399       if (MEM_P (object))
400 	{
401 	  if (! memory_address_addr_space_p (GET_MODE (object),
402 					     XEXP (object, 0),
403 					     MEM_ADDR_SPACE (object)))
404 	    break;
405 	}
406       else if (/* changes[i].old might be zero, e.g. when putting a
407 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
408 	       changes[i].old
409 	       && REG_P (changes[i].old)
410 	       && asm_noperands (PATTERN (object)) > 0
411 	       && REG_EXPR (changes[i].old) != NULL_TREE
412 	       && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
413 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
414 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
415 	{
416 	  /* Don't allow changes of hard register operands to inline
417 	     assemblies if they have been defined as register asm ("x").  */
418 	  break;
419 	}
420       else if (DEBUG_INSN_P (object))
421 	continue;
422       else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
423 	{
424 	  rtx pat = PATTERN (object);
425 
426 	  /* Perhaps we couldn't recognize the insn because there were
427 	     extra CLOBBERs at the end.  If so, try to re-recognize
428 	     without the last CLOBBER (later iterations will cause each of
429 	     them to be eliminated, in turn).  But don't do this if we
430 	     have an ASM_OPERAND.  */
431 	  if (GET_CODE (pat) == PARALLEL
432 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
433 	      && asm_noperands (PATTERN (object)) < 0)
434 	    {
435 	      rtx newpat;
436 
437 	      if (XVECLEN (pat, 0) == 2)
438 		newpat = XVECEXP (pat, 0, 0);
439 	      else
440 		{
441 		  int j;
442 
443 		  newpat
444 		    = gen_rtx_PARALLEL (VOIDmode,
445 					rtvec_alloc (XVECLEN (pat, 0) - 1));
446 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
447 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
448 		}
449 
450 	      /* Add a new change to this group to replace the pattern
451 		 with this new pattern.  Then consider this change
452 		 as having succeeded.  The change we added will
453 		 cause the entire call to fail if things remain invalid.
454 
455 		 Note that this can lose if a later change than the one
456 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
457 		 but this shouldn't occur.  */
458 
459 	      validate_change (object, &PATTERN (object), newpat, 1);
460 	      continue;
461 	    }
462 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
463 		   || GET_CODE (pat) == VAR_LOCATION)
464 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
465 	       never recognized.  */
466 	    continue;
467 	  else
468 	    break;
469 	}
470       last_validated = object;
471     }
472 
473   return (i == num_changes);
474 }
475 
476 /* A group of changes has previously been issued with validate_change
477    and verified with verify_changes.  Call df_insn_rescan for each of
478    the insn changed and clear num_changes.  */
479 
480 void
confirm_change_group(void)481 confirm_change_group (void)
482 {
483   int i;
484   rtx last_object = NULL;
485 
486   for (i = 0; i < num_changes; i++)
487     {
488       rtx object = changes[i].object;
489 
490       if (changes[i].unshare)
491 	*changes[i].loc = copy_rtx (*changes[i].loc);
492 
493       /* Avoid unnecessary rescanning when multiple changes to same instruction
494          are made.  */
495       if (object)
496 	{
497 	  if (object != last_object && last_object && INSN_P (last_object))
498 	    df_insn_rescan (as_a <rtx_insn *> (last_object));
499 	  last_object = object;
500 	}
501     }
502 
503   if (last_object && INSN_P (last_object))
504     df_insn_rescan (as_a <rtx_insn *> (last_object));
505   num_changes = 0;
506 }
507 
508 /* Apply a group of changes previously issued with `validate_change'.
509    If all changes are valid, call confirm_change_group and return 1,
510    otherwise, call cancel_changes and return 0.  */
511 
512 int
apply_change_group(void)513 apply_change_group (void)
514 {
515   if (verify_changes (0))
516     {
517       confirm_change_group ();
518       return 1;
519     }
520   else
521     {
522       cancel_changes (0);
523       return 0;
524     }
525 }
526 
527 
528 /* Return the number of changes so far in the current group.  */
529 
530 int
num_validated_changes(void)531 num_validated_changes (void)
532 {
533   return num_changes;
534 }
535 
536 /* Retract the changes numbered NUM and up.  */
537 
538 void
cancel_changes(int num)539 cancel_changes (int num)
540 {
541   int i;
542 
543   /* Back out all the changes.  Do this in the opposite order in which
544      they were made.  */
545   for (i = num_changes - 1; i >= num; i--)
546     {
547       *changes[i].loc = changes[i].old;
548       if (changes[i].object && !MEM_P (changes[i].object))
549 	INSN_CODE (changes[i].object) = changes[i].old_code;
550     }
551   num_changes = num;
552 }
553 
554 /* Reduce conditional compilation elsewhere.  */
555 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
556    rtx.  */
557 
558 static void
simplify_while_replacing(rtx * loc,rtx to,rtx_insn * object,machine_mode op0_mode)559 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
560                           machine_mode op0_mode)
561 {
562   rtx x = *loc;
563   enum rtx_code code = GET_CODE (x);
564   rtx new_rtx = NULL_RTX;
565   scalar_int_mode is_mode;
566 
567   if (SWAPPABLE_OPERANDS_P (x)
568       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
569     {
570       validate_unshare_change (object, loc,
571 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
572 					       : swap_condition (code),
573 					       GET_MODE (x), XEXP (x, 1),
574 					       XEXP (x, 0)), 1);
575       x = *loc;
576       code = GET_CODE (x);
577     }
578 
579   /* Canonicalize arithmetics with all constant operands.  */
580   switch (GET_RTX_CLASS (code))
581     {
582     case RTX_UNARY:
583       if (CONSTANT_P (XEXP (x, 0)))
584 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
585 					    op0_mode);
586       break;
587     case RTX_COMM_ARITH:
588     case RTX_BIN_ARITH:
589       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
590 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
591 					     XEXP (x, 1));
592       break;
593     case RTX_COMPARE:
594     case RTX_COMM_COMPARE:
595       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
596 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
597 						 XEXP (x, 0), XEXP (x, 1));
598       break;
599     default:
600       break;
601     }
602   if (new_rtx)
603     {
604       validate_change (object, loc, new_rtx, 1);
605       return;
606     }
607 
608   switch (code)
609     {
610     case PLUS:
611       /* If we have a PLUS whose second operand is now a CONST_INT, use
612          simplify_gen_binary to try to simplify it.
613          ??? We may want later to remove this, once simplification is
614          separated from this function.  */
615       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
616 	validate_change (object, loc,
617 			 simplify_gen_binary
618 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
619       break;
620     case MINUS:
621       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
622 	validate_change (object, loc,
623 			 simplify_gen_binary
624 			 (PLUS, GET_MODE (x), XEXP (x, 0),
625 			  simplify_gen_unary (NEG,
626 					      GET_MODE (x), XEXP (x, 1),
627 					      GET_MODE (x))), 1);
628       break;
629     case ZERO_EXTEND:
630     case SIGN_EXTEND:
631       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
632 	{
633 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
634 				    op0_mode);
635 	  /* If any of the above failed, substitute in something that
636 	     we know won't be recognized.  */
637 	  if (!new_rtx)
638 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
639 	  validate_change (object, loc, new_rtx, 1);
640 	}
641       break;
642     case SUBREG:
643       /* All subregs possible to simplify should be simplified.  */
644       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
645 			     SUBREG_BYTE (x));
646 
647       /* Subregs of VOIDmode operands are incorrect.  */
648       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
649 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
650       if (new_rtx)
651 	validate_change (object, loc, new_rtx, 1);
652       break;
653     case ZERO_EXTRACT:
654     case SIGN_EXTRACT:
655       /* If we are replacing a register with memory, try to change the memory
656          to be the mode required for memory in extract operations (this isn't
657          likely to be an insertion operation; if it was, nothing bad will
658          happen, we might just fail in some cases).  */
659 
660       if (MEM_P (XEXP (x, 0))
661 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
662 	  && CONST_INT_P (XEXP (x, 1))
663 	  && CONST_INT_P (XEXP (x, 2))
664 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
665 					MEM_ADDR_SPACE (XEXP (x, 0)))
666 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
667 	{
668 	  int pos = INTVAL (XEXP (x, 2));
669 	  machine_mode new_mode = is_mode;
670 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
671 	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
672 	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
673 	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
674 	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
675 					 ? word_mode
676 					 : as_a <scalar_int_mode> (new_mode));
677 
678 	  /* If we have a narrower mode, we can do something.  */
679 	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
680 	    {
681 	      int offset = pos / BITS_PER_UNIT;
682 	      rtx newmem;
683 
684 	      /* If the bytes and bits are counted differently, we
685 	         must adjust the offset.  */
686 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
687 		offset =
688 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
689 		   offset);
690 
691 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
692 			  == GET_MODE_BITSIZE (wanted_mode));
693 	      pos %= GET_MODE_BITSIZE (wanted_mode);
694 
695 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
696 
697 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
698 	      validate_change (object, &XEXP (x, 0), newmem, 1);
699 	    }
700 	}
701 
702       break;
703 
704     default:
705       break;
706     }
707 }
708 
709 /* Replace every occurrence of FROM in X with TO.  Mark each change with
710    validate_change passing OBJECT.  */
711 
712 static void
validate_replace_rtx_1(rtx * loc,rtx from,rtx to,rtx_insn * object,bool simplify)713 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
714                         bool simplify)
715 {
716   int i, j;
717   const char *fmt;
718   rtx x = *loc;
719   enum rtx_code code;
720   machine_mode op0_mode = VOIDmode;
721   int prev_changes = num_changes;
722 
723   if (!x)
724     return;
725 
726   code = GET_CODE (x);
727   fmt = GET_RTX_FORMAT (code);
728   if (fmt[0] == 'e')
729     op0_mode = GET_MODE (XEXP (x, 0));
730 
731   /* X matches FROM if it is the same rtx or they are both referring to the
732      same register in the same mode.  Avoid calling rtx_equal_p unless the
733      operands look similar.  */
734 
735   if (x == from
736       || (REG_P (x) && REG_P (from)
737 	  && GET_MODE (x) == GET_MODE (from)
738 	  && REGNO (x) == REGNO (from))
739       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
740 	  && rtx_equal_p (x, from)))
741     {
742       validate_unshare_change (object, loc, to, 1);
743       return;
744     }
745 
746   /* Call ourself recursively to perform the replacements.
747      We must not replace inside already replaced expression, otherwise we
748      get infinite recursion for replacements like (reg X)->(subreg (reg X))
749      so we must special case shared ASM_OPERANDS.  */
750 
751   if (GET_CODE (x) == PARALLEL)
752     {
753       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
754 	{
755 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
756 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
757 	    {
758 	      /* Verify that operands are really shared.  */
759 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
760 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
761 							      (x, 0, j))));
762 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
763 				      from, to, object, simplify);
764 	    }
765 	  else
766 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
767                                     simplify);
768 	}
769     }
770   else
771     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
772       {
773 	if (fmt[i] == 'e')
774 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
775 	else if (fmt[i] == 'E')
776 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
777 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
778                                     simplify);
779       }
780 
781   /* If we didn't substitute, there is nothing more to do.  */
782   if (num_changes == prev_changes)
783     return;
784 
785   /* ??? The regmove is no more, so is this aberration still necessary?  */
786   /* Allow substituted expression to have different mode.  This is used by
787      regmove to change mode of pseudo register.  */
788   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
789     op0_mode = GET_MODE (XEXP (x, 0));
790 
791   /* Do changes needed to keep rtx consistent.  Don't do any other
792      simplifications, as it is not our job.  */
793   if (simplify)
794     simplify_while_replacing (loc, to, object, op0_mode);
795 }
796 
797 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
798    with TO.  After all changes have been made, validate by seeing
799    if INSN is still valid.  */
800 
801 int
validate_replace_rtx_subexp(rtx from,rtx to,rtx_insn * insn,rtx * loc)802 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
803 {
804   validate_replace_rtx_1 (loc, from, to, insn, true);
805   return apply_change_group ();
806 }
807 
808 /* Try replacing every occurrence of FROM in INSN with TO.  After all
809    changes have been made, validate by seeing if INSN is still valid.  */
810 
811 int
validate_replace_rtx(rtx from,rtx to,rtx_insn * insn)812 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
813 {
814   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
815   return apply_change_group ();
816 }
817 
818 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
819    is a part of INSN.  After all changes have been made, validate by seeing if
820    INSN is still valid.
821    validate_replace_rtx (from, to, insn) is equivalent to
822    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
823 
824 int
validate_replace_rtx_part(rtx from,rtx to,rtx * where,rtx_insn * insn)825 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
826 {
827   validate_replace_rtx_1 (where, from, to, insn, true);
828   return apply_change_group ();
829 }
830 
831 /* Same as above, but do not simplify rtx afterwards.  */
832 int
validate_replace_rtx_part_nosimplify(rtx from,rtx to,rtx * where,rtx_insn * insn)833 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
834 				      rtx_insn *insn)
835 {
836   validate_replace_rtx_1 (where, from, to, insn, false);
837   return apply_change_group ();
838 
839 }
840 
841 /* Try replacing every occurrence of FROM in INSN with TO.  This also
842    will replace in REG_EQUAL and REG_EQUIV notes.  */
843 
844 void
validate_replace_rtx_group(rtx from,rtx to,rtx_insn * insn)845 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
846 {
847   rtx note;
848   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
849   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
850     if (REG_NOTE_KIND (note) == REG_EQUAL
851 	|| REG_NOTE_KIND (note) == REG_EQUIV)
852       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
853 }
854 
855 /* Function called by note_uses to replace used subexpressions.  */
856 struct validate_replace_src_data
857 {
858   rtx from;			/* Old RTX */
859   rtx to;			/* New RTX */
860   rtx_insn *insn;			/* Insn in which substitution is occurring.  */
861 };
862 
863 static void
validate_replace_src_1(rtx * x,void * data)864 validate_replace_src_1 (rtx *x, void *data)
865 {
866   struct validate_replace_src_data *d
867     = (struct validate_replace_src_data *) data;
868 
869   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
870 }
871 
872 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
873    SET_DESTs.  */
874 
875 void
validate_replace_src_group(rtx from,rtx to,rtx_insn * insn)876 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
877 {
878   struct validate_replace_src_data d;
879 
880   d.from = from;
881   d.to = to;
882   d.insn = insn;
883   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
884 }
885 
886 /* Try simplify INSN.
887    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
888    pattern and return true if something was simplified.  */
889 
890 bool
validate_simplify_insn(rtx_insn * insn)891 validate_simplify_insn (rtx_insn *insn)
892 {
893   int i;
894   rtx pat = NULL;
895   rtx newpat = NULL;
896 
897   pat = PATTERN (insn);
898 
899   if (GET_CODE (pat) == SET)
900     {
901       newpat = simplify_rtx (SET_SRC (pat));
902       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
903 	validate_change (insn, &SET_SRC (pat), newpat, 1);
904       newpat = simplify_rtx (SET_DEST (pat));
905       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
906 	validate_change (insn, &SET_DEST (pat), newpat, 1);
907     }
908   else if (GET_CODE (pat) == PARALLEL)
909     for (i = 0; i < XVECLEN (pat, 0); i++)
910       {
911 	rtx s = XVECEXP (pat, 0, i);
912 
913 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
914 	  {
915 	    newpat = simplify_rtx (SET_SRC (s));
916 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
917 	      validate_change (insn, &SET_SRC (s), newpat, 1);
918 	    newpat = simplify_rtx (SET_DEST (s));
919 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
920 	      validate_change (insn, &SET_DEST (s), newpat, 1);
921 	  }
922       }
923   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
924 }
925 
926 /* Return 1 if OP is a valid general operand for machine mode MODE.
927    This is either a register reference, a memory reference,
928    or a constant.  In the case of a memory reference, the address
929    is checked for general validity for the target machine.
930 
931    Register and memory references must have mode MODE in order to be valid,
932    but some constants have no machine mode and are valid for any mode.
933 
934    If MODE is VOIDmode, OP is checked for validity for whatever mode
935    it has.
936 
937    The main use of this function is as a predicate in match_operand
938    expressions in the machine description.  */
939 
940 int
general_operand(rtx op,machine_mode mode)941 general_operand (rtx op, machine_mode mode)
942 {
943   enum rtx_code code = GET_CODE (op);
944 
945   if (mode == VOIDmode)
946     mode = GET_MODE (op);
947 
948   /* Don't accept CONST_INT or anything similar
949      if the caller wants something floating.  */
950   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
951       && GET_MODE_CLASS (mode) != MODE_INT
952       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
953     return 0;
954 
955   if (CONST_INT_P (op)
956       && mode != VOIDmode
957       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958     return 0;
959 
960   if (CONSTANT_P (op))
961     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 	     || mode == VOIDmode)
963 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
964 	    && targetm.legitimate_constant_p (mode == VOIDmode
965 					      ? GET_MODE (op)
966 					      : mode, op));
967 
968   /* Except for certain constants with VOIDmode, already checked for,
969      OP's mode must match MODE if MODE specifies a mode.  */
970 
971   if (GET_MODE (op) != mode)
972     return 0;
973 
974   if (code == SUBREG)
975     {
976       rtx sub = SUBREG_REG (op);
977 
978 #ifdef INSN_SCHEDULING
979       /* On machines that have insn scheduling, we want all memory
980 	 reference to be explicit, so outlaw paradoxical SUBREGs.
981 	 However, we must allow them after reload so that they can
982 	 get cleaned up by cleanup_subreg_operands.  */
983       if (!reload_completed && MEM_P (sub)
984 	  && paradoxical_subreg_p (op))
985 	return 0;
986 #endif
987       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
988          may result in incorrect reference.  We should simplify all valid
989          subregs of MEM anyway.  But allow this after reload because we
990 	 might be called from cleanup_subreg_operands.
991 
992 	 ??? This is a kludge.  */
993       if (!reload_completed
994 	  && maybe_ne (SUBREG_BYTE (op), 0)
995 	  && MEM_P (sub))
996 	return 0;
997 
998       if (REG_P (sub)
999 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1000 	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1001 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1002 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1003 	  /* LRA can generate some invalid SUBREGS just for matched
1004 	     operand reload presentation.  LRA needs to treat them as
1005 	     valid.  */
1006 	  && ! LRA_SUBREG_P (op))
1007 	return 0;
1008 
1009       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1010 	 create such rtl, and we must reject it.  */
1011       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1012 	  /* LRA can use subreg to store a floating point value in an
1013 	     integer mode.  Although the floating point and the
1014 	     integer modes need the same number of hard registers, the
1015 	     size of floating point mode can be less than the integer
1016 	     mode.  */
1017 	  && ! lra_in_progress
1018 	  && paradoxical_subreg_p (op))
1019 	return 0;
1020 
1021       op = sub;
1022       code = GET_CODE (op);
1023     }
1024 
1025   if (code == REG)
1026     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1027 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1028 
1029   if (code == MEM)
1030     {
1031       rtx y = XEXP (op, 0);
1032 
1033       if (! volatile_ok && MEM_VOLATILE_P (op))
1034 	return 0;
1035 
1036       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1037 	 generate move insn with invalid addresses which is made valid
1038 	 and efficiently calculated by LRA through further numerous
1039 	 transformations.  */
1040       if (lra_in_progress
1041 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1042 	return 1;
1043     }
1044 
1045   return 0;
1046 }
1047 
1048 /* Return 1 if OP is a valid memory address for a memory reference
1049    of mode MODE.
1050 
1051    The main use of this function is as a predicate in match_operand
1052    expressions in the machine description.  */
1053 
1054 int
address_operand(rtx op,machine_mode mode)1055 address_operand (rtx op, machine_mode mode)
1056 {
1057   /* Wrong mode for an address expr.  */
1058   if (GET_MODE (op) != VOIDmode
1059       && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1060     return false;
1061 
1062   return memory_address_p (mode, op);
1063 }
1064 
1065 /* Return 1 if OP is a register reference of mode MODE.
1066    If MODE is VOIDmode, accept a register in any mode.
1067 
1068    The main use of this function is as a predicate in match_operand
1069    expressions in the machine description.  */
1070 
1071 int
register_operand(rtx op,machine_mode mode)1072 register_operand (rtx op, machine_mode mode)
1073 {
1074   if (GET_CODE (op) == SUBREG)
1075     {
1076       rtx sub = SUBREG_REG (op);
1077 
1078       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1079 	 because it is guaranteed to be reloaded into one.
1080 	 Just make sure the MEM is valid in itself.
1081 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1082 	 but currently it does result from (SUBREG (REG)...) where the
1083 	 reg went on the stack.)  */
1084       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1085 	return 0;
1086     }
1087   else if (!REG_P (op))
1088     return 0;
1089   return general_operand (op, mode);
1090 }
1091 
1092 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1093 
1094 int
pmode_register_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)1095 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1096 {
1097   return register_operand (op, Pmode);
1098 }
1099 
1100 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1101    or a hard register.  */
1102 
1103 int
scratch_operand(rtx op,machine_mode mode)1104 scratch_operand (rtx op, machine_mode mode)
1105 {
1106   if (GET_MODE (op) != mode && mode != VOIDmode)
1107     return 0;
1108 
1109   return (GET_CODE (op) == SCRATCH
1110 	  || (REG_P (op)
1111 	      && (lra_in_progress
1112 		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1113 		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1114 }
1115 
1116 /* Return 1 if OP is a valid immediate operand for mode MODE.
1117 
1118    The main use of this function is as a predicate in match_operand
1119    expressions in the machine description.  */
1120 
1121 int
immediate_operand(rtx op,machine_mode mode)1122 immediate_operand (rtx op, machine_mode mode)
1123 {
1124   /* Don't accept CONST_INT or anything similar
1125      if the caller wants something floating.  */
1126   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1127       && GET_MODE_CLASS (mode) != MODE_INT
1128       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1129     return 0;
1130 
1131   if (CONST_INT_P (op)
1132       && mode != VOIDmode
1133       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1134     return 0;
1135 
1136   return (CONSTANT_P (op)
1137 	  && (GET_MODE (op) == mode || mode == VOIDmode
1138 	      || GET_MODE (op) == VOIDmode)
1139 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1140 	  && targetm.legitimate_constant_p (mode == VOIDmode
1141 					    ? GET_MODE (op)
1142 					    : mode, op));
1143 }
1144 
1145 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1146 
1147 int
const_int_operand(rtx op,machine_mode mode)1148 const_int_operand (rtx op, machine_mode mode)
1149 {
1150   if (!CONST_INT_P (op))
1151     return 0;
1152 
1153   if (mode != VOIDmode
1154       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1155     return 0;
1156 
1157   return 1;
1158 }
1159 
1160 #if TARGET_SUPPORTS_WIDE_INT
1161 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1162    of mode MODE.  */
1163 int
const_scalar_int_operand(rtx op,machine_mode mode)1164 const_scalar_int_operand (rtx op, machine_mode mode)
1165 {
1166   if (!CONST_SCALAR_INT_P (op))
1167     return 0;
1168 
1169   if (CONST_INT_P (op))
1170     return const_int_operand (op, mode);
1171 
1172   if (mode != VOIDmode)
1173     {
1174       scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1175       int prec = GET_MODE_PRECISION (int_mode);
1176       int bitsize = GET_MODE_BITSIZE (int_mode);
1177 
1178       if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1179 	return 0;
1180 
1181       if (prec == bitsize)
1182 	return 1;
1183       else
1184 	{
1185 	  /* Multiword partial int.  */
1186 	  HOST_WIDE_INT x
1187 	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1188 	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1189 	}
1190     }
1191   return 1;
1192 }
1193 
1194 /* Returns 1 if OP is an operand that is a constant integer or constant
1195    floating-point number of MODE.  */
1196 
1197 int
const_double_operand(rtx op,machine_mode mode)1198 const_double_operand (rtx op, machine_mode mode)
1199 {
1200   return (GET_CODE (op) == CONST_DOUBLE)
1201 	  && (GET_MODE (op) == mode || mode == VOIDmode);
1202 }
1203 #else
1204 /* Returns 1 if OP is an operand that is a constant integer or constant
1205    floating-point number of MODE.  */
1206 
1207 int
const_double_operand(rtx op,machine_mode mode)1208 const_double_operand (rtx op, machine_mode mode)
1209 {
1210   /* Don't accept CONST_INT or anything similar
1211      if the caller wants something floating.  */
1212   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1213       && GET_MODE_CLASS (mode) != MODE_INT
1214       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1215     return 0;
1216 
1217   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1218 	  && (mode == VOIDmode || GET_MODE (op) == mode
1219 	      || GET_MODE (op) == VOIDmode));
1220 }
1221 #endif
1222 /* Return 1 if OP is a general operand that is not an immediate
1223    operand of mode MODE.  */
1224 
1225 int
nonimmediate_operand(rtx op,machine_mode mode)1226 nonimmediate_operand (rtx op, machine_mode mode)
1227 {
1228   return (general_operand (op, mode) && ! CONSTANT_P (op));
1229 }
1230 
1231 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1232 
1233 int
nonmemory_operand(rtx op,machine_mode mode)1234 nonmemory_operand (rtx op, machine_mode mode)
1235 {
1236   if (CONSTANT_P (op))
1237     return immediate_operand (op, mode);
1238   return register_operand (op, mode);
1239 }
1240 
1241 /* Return 1 if OP is a valid operand that stands for pushing a
1242    value of mode MODE onto the stack.
1243 
1244    The main use of this function is as a predicate in match_operand
1245    expressions in the machine description.  */
1246 
1247 int
push_operand(rtx op,machine_mode mode)1248 push_operand (rtx op, machine_mode mode)
1249 {
1250   if (!MEM_P (op))
1251     return 0;
1252 
1253   if (mode != VOIDmode && GET_MODE (op) != mode)
1254     return 0;
1255 
1256   poly_int64 rounded_size = GET_MODE_SIZE (mode);
1257 
1258 #ifdef PUSH_ROUNDING
1259   rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1260 #endif
1261 
1262   op = XEXP (op, 0);
1263 
1264   if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1265     {
1266       if (GET_CODE (op) != STACK_PUSH_CODE)
1267 	return 0;
1268     }
1269   else
1270     {
1271       poly_int64 offset;
1272       if (GET_CODE (op) != PRE_MODIFY
1273 	  || GET_CODE (XEXP (op, 1)) != PLUS
1274 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1275 	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1276 	  || (STACK_GROWS_DOWNWARD
1277 	      ? maybe_ne (offset, -rounded_size)
1278 	      : maybe_ne (offset, rounded_size)))
1279 	return 0;
1280     }
1281 
1282   return XEXP (op, 0) == stack_pointer_rtx;
1283 }
1284 
1285 /* Return 1 if OP is a valid operand that stands for popping a
1286    value of mode MODE off the stack.
1287 
1288    The main use of this function is as a predicate in match_operand
1289    expressions in the machine description.  */
1290 
1291 int
pop_operand(rtx op,machine_mode mode)1292 pop_operand (rtx op, machine_mode mode)
1293 {
1294   if (!MEM_P (op))
1295     return 0;
1296 
1297   if (mode != VOIDmode && GET_MODE (op) != mode)
1298     return 0;
1299 
1300   op = XEXP (op, 0);
1301 
1302   if (GET_CODE (op) != STACK_POP_CODE)
1303     return 0;
1304 
1305   return XEXP (op, 0) == stack_pointer_rtx;
1306 }
1307 
1308 /* Return 1 if ADDR is a valid memory address
1309    for mode MODE in address space AS.  */
1310 
1311 int
memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)1312 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1313 			     rtx addr, addr_space_t as)
1314 {
1315 #ifdef GO_IF_LEGITIMATE_ADDRESS
1316   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1317   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1318   return 0;
1319 
1320  win:
1321   return 1;
1322 #else
1323   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1324 #endif
1325 }
1326 
1327 /* Return 1 if OP is a valid memory reference with mode MODE,
1328    including a valid address.
1329 
1330    The main use of this function is as a predicate in match_operand
1331    expressions in the machine description.  */
1332 
1333 int
memory_operand(rtx op,machine_mode mode)1334 memory_operand (rtx op, machine_mode mode)
1335 {
1336   rtx inner;
1337 
1338   if (! reload_completed)
1339     /* Note that no SUBREG is a memory operand before end of reload pass,
1340        because (SUBREG (MEM...)) forces reloading into a register.  */
1341     return MEM_P (op) && general_operand (op, mode);
1342 
1343   if (mode != VOIDmode && GET_MODE (op) != mode)
1344     return 0;
1345 
1346   inner = op;
1347   if (GET_CODE (inner) == SUBREG)
1348     inner = SUBREG_REG (inner);
1349 
1350   return (MEM_P (inner) && general_operand (op, mode));
1351 }
1352 
1353 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1354    that is, a memory reference whose address is a general_operand.  */
1355 
1356 int
indirect_operand(rtx op,machine_mode mode)1357 indirect_operand (rtx op, machine_mode mode)
1358 {
1359   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1360   if (! reload_completed
1361       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1362     {
1363       if (mode != VOIDmode && GET_MODE (op) != mode)
1364 	return 0;
1365 
1366       /* The only way that we can have a general_operand as the resulting
1367 	 address is if OFFSET is zero and the address already is an operand
1368 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1369 	 operand.  */
1370       poly_int64 offset;
1371       rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1372       return (known_eq (offset + SUBREG_BYTE (op), 0)
1373 	      && general_operand (addr, Pmode));
1374     }
1375 
1376   return (MEM_P (op)
1377 	  && memory_operand (op, mode)
1378 	  && general_operand (XEXP (op, 0), Pmode));
1379 }
1380 
1381 /* Return 1 if this is an ordered comparison operator (not including
1382    ORDERED and UNORDERED).  */
1383 
1384 int
ordered_comparison_operator(rtx op,machine_mode mode)1385 ordered_comparison_operator (rtx op, machine_mode mode)
1386 {
1387   if (mode != VOIDmode && GET_MODE (op) != mode)
1388     return false;
1389   switch (GET_CODE (op))
1390     {
1391     case EQ:
1392     case NE:
1393     case LT:
1394     case LTU:
1395     case LE:
1396     case LEU:
1397     case GT:
1398     case GTU:
1399     case GE:
1400     case GEU:
1401       return true;
1402     default:
1403       return false;
1404     }
1405 }
1406 
1407 /* Return 1 if this is a comparison operator.  This allows the use of
1408    MATCH_OPERATOR to recognize all the branch insns.  */
1409 
1410 int
comparison_operator(rtx op,machine_mode mode)1411 comparison_operator (rtx op, machine_mode mode)
1412 {
1413   return ((mode == VOIDmode || GET_MODE (op) == mode)
1414 	  && COMPARISON_P (op));
1415 }
1416 
1417 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1418 
1419 rtx
extract_asm_operands(rtx body)1420 extract_asm_operands (rtx body)
1421 {
1422   rtx tmp;
1423   switch (GET_CODE (body))
1424     {
1425     case ASM_OPERANDS:
1426       return body;
1427 
1428     case SET:
1429       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1430       tmp = SET_SRC (body);
1431       if (GET_CODE (tmp) == ASM_OPERANDS)
1432 	return tmp;
1433       break;
1434 
1435     case PARALLEL:
1436       tmp = XVECEXP (body, 0, 0);
1437       if (GET_CODE (tmp) == ASM_OPERANDS)
1438 	return tmp;
1439       if (GET_CODE (tmp) == SET)
1440 	{
1441 	  tmp = SET_SRC (tmp);
1442 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1443 	    return tmp;
1444 	}
1445       break;
1446 
1447     default:
1448       break;
1449     }
1450   return NULL;
1451 }
1452 
1453 /* If BODY is an insn body that uses ASM_OPERANDS,
1454    return the number of operands (both input and output) in the insn.
1455    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1456    return 0.
1457    Otherwise return -1.  */
1458 
1459 int
asm_noperands(const_rtx body)1460 asm_noperands (const_rtx body)
1461 {
1462   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1463   int i, n_sets = 0;
1464 
1465   if (asm_op == NULL)
1466     {
1467       if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1468 	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1469 	{
1470 	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
1471 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1472 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1473 	      return -1;
1474 	  return 0;
1475 	}
1476       return -1;
1477     }
1478 
1479   if (GET_CODE (body) == SET)
1480     n_sets = 1;
1481   else if (GET_CODE (body) == PARALLEL)
1482     {
1483       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1484 	{
1485 	  /* Multiple output operands, or 1 output plus some clobbers:
1486 	     body is
1487 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1488 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1489 	  for (i = XVECLEN (body, 0); i > 0; i--)
1490 	    {
1491 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1492 		break;
1493 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1494 		return -1;
1495 	    }
1496 
1497 	  /* N_SETS is now number of output operands.  */
1498 	  n_sets = i;
1499 
1500 	  /* Verify that all the SETs we have
1501 	     came from a single original asm_operands insn
1502 	     (so that invalid combinations are blocked).  */
1503 	  for (i = 0; i < n_sets; i++)
1504 	    {
1505 	      rtx elt = XVECEXP (body, 0, i);
1506 	      if (GET_CODE (elt) != SET)
1507 		return -1;
1508 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1509 		return -1;
1510 	      /* If these ASM_OPERANDS rtx's came from different original insns
1511 	         then they aren't allowed together.  */
1512 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1513 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1514 		return -1;
1515 	    }
1516 	}
1517       else
1518 	{
1519 	  /* 0 outputs, but some clobbers:
1520 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1521 	  /* Make sure all the other parallel things really are clobbers.  */
1522 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1523 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1524 	      return -1;
1525 	}
1526     }
1527 
1528   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1529 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1530 }
1531 
1532 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1533    copy its operands (both input and output) into the vector OPERANDS,
1534    the locations of the operands within the insn into the vector OPERAND_LOCS,
1535    and the constraints for the operands into CONSTRAINTS.
1536    Write the modes of the operands into MODES.
1537    Write the location info into LOC.
1538    Return the assembler-template.
1539    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1540    return the basic assembly string.
1541 
1542    If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1543    we don't store that info.  */
1544 
1545 const char *
decode_asm_operands(rtx body,rtx * operands,rtx ** operand_locs,const char ** constraints,machine_mode * modes,location_t * loc)1546 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1547 		     const char **constraints, machine_mode *modes,
1548 		     location_t *loc)
1549 {
1550   int nbase = 0, n, i;
1551   rtx asmop;
1552 
1553   switch (GET_CODE (body))
1554     {
1555     case ASM_OPERANDS:
1556       /* Zero output asm: BODY is (asm_operands ...).  */
1557       asmop = body;
1558       break;
1559 
1560     case SET:
1561       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1562       asmop = SET_SRC (body);
1563 
1564       /* The output is in the SET.
1565 	 Its constraint is in the ASM_OPERANDS itself.  */
1566       if (operands)
1567 	operands[0] = SET_DEST (body);
1568       if (operand_locs)
1569 	operand_locs[0] = &SET_DEST (body);
1570       if (constraints)
1571 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1572       if (modes)
1573 	modes[0] = GET_MODE (SET_DEST (body));
1574       nbase = 1;
1575       break;
1576 
1577     case PARALLEL:
1578       {
1579 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1580 
1581 	asmop = XVECEXP (body, 0, 0);
1582 	if (GET_CODE (asmop) == SET)
1583 	  {
1584 	    asmop = SET_SRC (asmop);
1585 
1586 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1587 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1588 	    for (i = 0; i < nparallel; i++)
1589 	      {
1590 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1591 		  break;		/* Past last SET */
1592 		gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
1593 		if (operands)
1594 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1595 		if (operand_locs)
1596 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1597 		if (constraints)
1598 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1599 		if (modes)
1600 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1601 	      }
1602 	    nbase = i;
1603 	  }
1604 	else if (GET_CODE (asmop) == ASM_INPUT)
1605 	  {
1606 	    if (loc)
1607 	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1608 	    return XSTR (asmop, 0);
1609 	  }
1610 	break;
1611       }
1612 
1613     default:
1614       gcc_unreachable ();
1615     }
1616 
1617   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1618   for (i = 0; i < n; i++)
1619     {
1620       if (operand_locs)
1621 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1622       if (operands)
1623 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1624       if (constraints)
1625 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1626       if (modes)
1627 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1628     }
1629   nbase += n;
1630 
1631   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1632   for (i = 0; i < n; i++)
1633     {
1634       if (operand_locs)
1635 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1636       if (operands)
1637 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1638       if (constraints)
1639 	constraints[nbase + i] = "";
1640       if (modes)
1641 	modes[nbase + i] = Pmode;
1642     }
1643 
1644   if (loc)
1645     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1646 
1647   return ASM_OPERANDS_TEMPLATE (asmop);
1648 }
1649 
1650 /* Parse inline assembly string STRING and determine which operands are
1651    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1652    to true if operand I is referenced.
1653 
1654    This is intended to distinguish barrier-like asms such as:
1655 
1656       asm ("" : "=m" (...));
1657 
1658    from real references such as:
1659 
1660       asm ("sw\t$0, %0" : "=m" (...));  */
1661 
1662 void
get_referenced_operands(const char * string,bool * used,unsigned int noperands)1663 get_referenced_operands (const char *string, bool *used,
1664 			 unsigned int noperands)
1665 {
1666   memset (used, 0, sizeof (bool) * noperands);
1667   const char *p = string;
1668   while (*p)
1669     switch (*p)
1670       {
1671       case '%':
1672 	p += 1;
1673 	/* A letter followed by a digit indicates an operand number.  */
1674 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1675 	  p += 1;
1676 	if (ISDIGIT (*p))
1677 	  {
1678 	    char *endptr;
1679 	    unsigned long opnum = strtoul (p, &endptr, 10);
1680 	    if (endptr != p && opnum < noperands)
1681 	      used[opnum] = true;
1682 	    p = endptr;
1683 	  }
1684 	else
1685 	  p += 1;
1686 	break;
1687 
1688       default:
1689 	p++;
1690 	break;
1691       }
1692 }
1693 
1694 /* Check if an asm_operand matches its constraints.
1695    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1696 
1697 int
asm_operand_ok(rtx op,const char * constraint,const char ** constraints)1698 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1699 {
1700   int result = 0;
1701   bool incdec_ok = false;
1702 
1703   /* Use constrain_operands after reload.  */
1704   gcc_assert (!reload_completed);
1705 
1706   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1707      many alternatives as required to match the other operands.  */
1708   if (*constraint == '\0')
1709     result = 1;
1710 
1711   while (*constraint)
1712     {
1713       enum constraint_num cn;
1714       char c = *constraint;
1715       int len;
1716       switch (c)
1717 	{
1718 	case ',':
1719 	  constraint++;
1720 	  continue;
1721 
1722 	case '0': case '1': case '2': case '3': case '4':
1723 	case '5': case '6': case '7': case '8': case '9':
1724 	  /* If caller provided constraints pointer, look up
1725 	     the matching constraint.  Otherwise, our caller should have
1726 	     given us the proper matching constraint, but we can't
1727 	     actually fail the check if they didn't.  Indicate that
1728 	     results are inconclusive.  */
1729 	  if (constraints)
1730 	    {
1731 	      char *end;
1732 	      unsigned long match;
1733 
1734 	      match = strtoul (constraint, &end, 10);
1735 	      if (!result)
1736 		result = asm_operand_ok (op, constraints[match], NULL);
1737 	      constraint = (const char *) end;
1738 	    }
1739 	  else
1740 	    {
1741 	      do
1742 		constraint++;
1743 	      while (ISDIGIT (*constraint));
1744 	      if (! result)
1745 		result = -1;
1746 	    }
1747 	  continue;
1748 
1749 	  /* The rest of the compiler assumes that reloading the address
1750 	     of a MEM into a register will make it fit an 'o' constraint.
1751 	     That is, if it sees a MEM operand for an 'o' constraint,
1752 	     it assumes that (mem (base-reg)) will fit.
1753 
1754 	     That assumption fails on targets that don't have offsettable
1755 	     addresses at all.  We therefore need to treat 'o' asm
1756 	     constraints as a special case and only accept operands that
1757 	     are already offsettable, thus proving that at least one
1758 	     offsettable address exists.  */
1759 	case 'o': /* offsettable */
1760 	  if (offsettable_nonstrict_memref_p (op))
1761 	    result = 1;
1762 	  break;
1763 
1764 	case 'g':
1765 	  if (general_operand (op, VOIDmode))
1766 	    result = 1;
1767 	  break;
1768 
1769 	case '<':
1770 	case '>':
1771 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1772 	     to exist, excepting those that expand_call created.  Further,
1773 	     on some machines which do not have generalized auto inc/dec,
1774 	     an inc/dec is not a memory_operand.
1775 
1776 	     Match any memory and hope things are resolved after reload.  */
1777 	  incdec_ok = true;
1778 	  /* FALLTHRU */
1779 	default:
1780 	  cn = lookup_constraint (constraint);
1781 	  switch (get_constraint_type (cn))
1782 	    {
1783 	    case CT_REGISTER:
1784 	      if (!result
1785 		  && reg_class_for_constraint (cn) != NO_REGS
1786 		  && GET_MODE (op) != BLKmode
1787 		  && register_operand (op, VOIDmode))
1788 		result = 1;
1789 	      break;
1790 
1791 	    case CT_CONST_INT:
1792 	      if (!result
1793 		  && CONST_INT_P (op)
1794 		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1795 		result = 1;
1796 	      break;
1797 
1798 	    case CT_MEMORY:
1799 	    case CT_SPECIAL_MEMORY:
1800 	      /* Every memory operand can be reloaded to fit.  */
1801 	      result = result || memory_operand (op, VOIDmode);
1802 	      break;
1803 
1804 	    case CT_ADDRESS:
1805 	      /* Every address operand can be reloaded to fit.  */
1806 	      result = result || address_operand (op, VOIDmode);
1807 	      break;
1808 
1809 	    case CT_FIXED_FORM:
1810 	      result = result || constraint_satisfied_p (op, cn);
1811 	      break;
1812 	    }
1813 	  break;
1814 	}
1815       len = CONSTRAINT_LEN (c, constraint);
1816       do
1817 	constraint++;
1818       while (--len && *constraint && *constraint != ',');
1819       if (len)
1820 	return 0;
1821     }
1822 
1823   /* For operands without < or > constraints reject side-effects.  */
1824   if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1825     switch (GET_CODE (XEXP (op, 0)))
1826       {
1827       case PRE_INC:
1828       case POST_INC:
1829       case PRE_DEC:
1830       case POST_DEC:
1831       case PRE_MODIFY:
1832       case POST_MODIFY:
1833 	return 0;
1834       default:
1835 	break;
1836       }
1837 
1838   return result;
1839 }
1840 
1841 /* Given an rtx *P, if it is a sum containing an integer constant term,
1842    return the location (type rtx *) of the pointer to that constant term.
1843    Otherwise, return a null pointer.  */
1844 
1845 rtx *
find_constant_term_loc(rtx * p)1846 find_constant_term_loc (rtx *p)
1847 {
1848   rtx *tem;
1849   enum rtx_code code = GET_CODE (*p);
1850 
1851   /* If *P IS such a constant term, P is its location.  */
1852 
1853   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1854       || code == CONST)
1855     return p;
1856 
1857   /* Otherwise, if not a sum, it has no constant term.  */
1858 
1859   if (GET_CODE (*p) != PLUS)
1860     return 0;
1861 
1862   /* If one of the summands is constant, return its location.  */
1863 
1864   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1865       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1866     return p;
1867 
1868   /* Otherwise, check each summand for containing a constant term.  */
1869 
1870   if (XEXP (*p, 0) != 0)
1871     {
1872       tem = find_constant_term_loc (&XEXP (*p, 0));
1873       if (tem != 0)
1874 	return tem;
1875     }
1876 
1877   if (XEXP (*p, 1) != 0)
1878     {
1879       tem = find_constant_term_loc (&XEXP (*p, 1));
1880       if (tem != 0)
1881 	return tem;
1882     }
1883 
1884   return 0;
1885 }
1886 
1887 /* Return 1 if OP is a memory reference
1888    whose address contains no side effects
1889    and remains valid after the addition
1890    of a positive integer less than the
1891    size of the object being referenced.
1892 
1893    We assume that the original address is valid and do not check it.
1894 
1895    This uses strict_memory_address_p as a subroutine, so
1896    don't use it before reload.  */
1897 
1898 int
offsettable_memref_p(rtx op)1899 offsettable_memref_p (rtx op)
1900 {
1901   return ((MEM_P (op))
1902 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1903 					       MEM_ADDR_SPACE (op)));
1904 }
1905 
1906 /* Similar, but don't require a strictly valid mem ref:
1907    consider pseudo-regs valid as index or base regs.  */
1908 
1909 int
offsettable_nonstrict_memref_p(rtx op)1910 offsettable_nonstrict_memref_p (rtx op)
1911 {
1912   return ((MEM_P (op))
1913 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1914 					       MEM_ADDR_SPACE (op)));
1915 }
1916 
1917 /* Return 1 if Y is a memory address which contains no side effects
1918    and would remain valid for address space AS after the addition of
1919    a positive integer less than the size of that mode.
1920 
1921    We assume that the original address is valid and do not check it.
1922    We do check that it is valid for narrower modes.
1923 
1924    If STRICTP is nonzero, we require a strictly valid address,
1925    for the sake of use in reload.c.  */
1926 
1927 int
offsettable_address_addr_space_p(int strictp,machine_mode mode,rtx y,addr_space_t as)1928 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1929 				  addr_space_t as)
1930 {
1931   enum rtx_code ycode = GET_CODE (y);
1932   rtx z;
1933   rtx y1 = y;
1934   rtx *y2;
1935   int (*addressp) (machine_mode, rtx, addr_space_t) =
1936     (strictp ? strict_memory_address_addr_space_p
1937 	     : memory_address_addr_space_p);
1938   poly_int64 mode_sz = GET_MODE_SIZE (mode);
1939 
1940   if (CONSTANT_ADDRESS_P (y))
1941     return 1;
1942 
1943   /* Adjusting an offsettable address involves changing to a narrower mode.
1944      Make sure that's OK.  */
1945 
1946   if (mode_dependent_address_p (y, as))
1947     return 0;
1948 
1949   machine_mode address_mode = GET_MODE (y);
1950   if (address_mode == VOIDmode)
1951     address_mode = targetm.addr_space.address_mode (as);
1952 #ifdef POINTERS_EXTEND_UNSIGNED
1953   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1954 #endif
1955 
1956   /* ??? How much offset does an offsettable BLKmode reference need?
1957      Clearly that depends on the situation in which it's being used.
1958      However, the current situation in which we test 0xffffffff is
1959      less than ideal.  Caveat user.  */
1960   if (known_eq (mode_sz, 0))
1961     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1962 
1963   /* If the expression contains a constant term,
1964      see if it remains valid when max possible offset is added.  */
1965 
1966   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1967     {
1968       int good;
1969 
1970       y1 = *y2;
1971       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1972       /* Use QImode because an odd displacement may be automatically invalid
1973 	 for any wider mode.  But it should be valid for a single byte.  */
1974       good = (*addressp) (QImode, y, as);
1975 
1976       /* In any case, restore old contents of memory.  */
1977       *y2 = y1;
1978       return good;
1979     }
1980 
1981   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1982     return 0;
1983 
1984   /* The offset added here is chosen as the maximum offset that
1985      any instruction could need to add when operating on something
1986      of the specified mode.  We assume that if Y and Y+c are
1987      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1988      go inside a LO_SUM here, so we do so as well.  */
1989   if (GET_CODE (y) == LO_SUM
1990       && mode != BLKmode
1991       && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
1992     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
1993 			plus_constant (address_mode, XEXP (y, 1),
1994 				       mode_sz - 1));
1995 #ifdef POINTERS_EXTEND_UNSIGNED
1996   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
1997   else if (POINTERS_EXTEND_UNSIGNED > 0
1998 	   && GET_CODE (y) == ZERO_EXTEND
1999 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2000     z = gen_rtx_ZERO_EXTEND (address_mode,
2001 			     plus_constant (pointer_mode, XEXP (y, 0),
2002 					    mode_sz - 1));
2003 #endif
2004   else
2005     z = plus_constant (address_mode, y, mode_sz - 1);
2006 
2007   /* Use QImode because an odd displacement may be automatically invalid
2008      for any wider mode.  But it should be valid for a single byte.  */
2009   return (*addressp) (QImode, z, as);
2010 }
2011 
2012 /* Return 1 if ADDR is an address-expression whose effect depends
2013    on the mode of the memory reference it is used in.
2014 
2015    ADDRSPACE is the address space associated with the address.
2016 
2017    Autoincrement addressing is a typical example of mode-dependence
2018    because the amount of the increment depends on the mode.  */
2019 
2020 bool
mode_dependent_address_p(rtx addr,addr_space_t addrspace)2021 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2022 {
2023   /* Auto-increment addressing with anything other than post_modify
2024      or pre_modify always introduces a mode dependency.  Catch such
2025      cases now instead of deferring to the target.  */
2026   if (GET_CODE (addr) == PRE_INC
2027       || GET_CODE (addr) == POST_INC
2028       || GET_CODE (addr) == PRE_DEC
2029       || GET_CODE (addr) == POST_DEC)
2030     return true;
2031 
2032   return targetm.mode_dependent_address_p (addr, addrspace);
2033 }
2034 
2035 /* Return true if boolean attribute ATTR is supported.  */
2036 
2037 static bool
have_bool_attr(bool_attr attr)2038 have_bool_attr (bool_attr attr)
2039 {
2040   switch (attr)
2041     {
2042     case BA_ENABLED:
2043       return HAVE_ATTR_enabled;
2044     case BA_PREFERRED_FOR_SIZE:
2045       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2046     case BA_PREFERRED_FOR_SPEED:
2047       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2048     }
2049   gcc_unreachable ();
2050 }
2051 
2052 /* Return the value of ATTR for instruction INSN.  */
2053 
2054 static bool
get_bool_attr(rtx_insn * insn,bool_attr attr)2055 get_bool_attr (rtx_insn *insn, bool_attr attr)
2056 {
2057   switch (attr)
2058     {
2059     case BA_ENABLED:
2060       return get_attr_enabled (insn);
2061     case BA_PREFERRED_FOR_SIZE:
2062       return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2063     case BA_PREFERRED_FOR_SPEED:
2064       return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2065     }
2066   gcc_unreachable ();
2067 }
2068 
2069 /* Like get_bool_attr_mask, but don't use the cache.  */
2070 
2071 static alternative_mask
get_bool_attr_mask_uncached(rtx_insn * insn,bool_attr attr)2072 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2073 {
2074   /* Temporarily install enough information for get_attr_<foo> to assume
2075      that the insn operands are already cached.  As above, the attribute
2076      mustn't depend on the values of operands, so we don't provide their
2077      real values here.  */
2078   rtx_insn *old_insn = recog_data.insn;
2079   int old_alternative = which_alternative;
2080 
2081   recog_data.insn = insn;
2082   alternative_mask mask = ALL_ALTERNATIVES;
2083   int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2084   for (int i = 0; i < n_alternatives; i++)
2085     {
2086       which_alternative = i;
2087       if (!get_bool_attr (insn, attr))
2088 	mask &= ~ALTERNATIVE_BIT (i);
2089     }
2090 
2091   recog_data.insn = old_insn;
2092   which_alternative = old_alternative;
2093   return mask;
2094 }
2095 
2096 /* Return the mask of operand alternatives that are allowed for INSN
2097    by boolean attribute ATTR.  This mask depends only on INSN and on
2098    the current target; it does not depend on things like the values of
2099    operands.  */
2100 
2101 static alternative_mask
get_bool_attr_mask(rtx_insn * insn,bool_attr attr)2102 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2103 {
2104   /* Quick exit for asms and for targets that don't use these attributes.  */
2105   int code = INSN_CODE (insn);
2106   if (code < 0 || !have_bool_attr (attr))
2107     return ALL_ALTERNATIVES;
2108 
2109   /* Calling get_attr_<foo> can be expensive, so cache the mask
2110      for speed.  */
2111   if (!this_target_recog->x_bool_attr_masks[code][attr])
2112     this_target_recog->x_bool_attr_masks[code][attr]
2113       = get_bool_attr_mask_uncached (insn, attr);
2114   return this_target_recog->x_bool_attr_masks[code][attr];
2115 }
2116 
2117 /* Return the set of alternatives of INSN that are allowed by the current
2118    target.  */
2119 
2120 alternative_mask
get_enabled_alternatives(rtx_insn * insn)2121 get_enabled_alternatives (rtx_insn *insn)
2122 {
2123   return get_bool_attr_mask (insn, BA_ENABLED);
2124 }
2125 
2126 /* Return the set of alternatives of INSN that are allowed by the current
2127    target and are preferred for the current size/speed optimization
2128    choice.  */
2129 
2130 alternative_mask
get_preferred_alternatives(rtx_insn * insn)2131 get_preferred_alternatives (rtx_insn *insn)
2132 {
2133   if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2134     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2135   else
2136     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2137 }
2138 
2139 /* Return the set of alternatives of INSN that are allowed by the current
2140    target and are preferred for the size/speed optimization choice
2141    associated with BB.  Passing a separate BB is useful if INSN has not
2142    been emitted yet or if we are considering moving it to a different
2143    block.  */
2144 
2145 alternative_mask
get_preferred_alternatives(rtx_insn * insn,basic_block bb)2146 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2147 {
2148   if (optimize_bb_for_speed_p (bb))
2149     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2150   else
2151     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2152 }
2153 
2154 /* Assert that the cached boolean attributes for INSN are still accurate.
2155    The backend is required to define these attributes in a way that only
2156    depends on the current target (rather than operands, compiler phase,
2157    etc.).  */
2158 
2159 bool
check_bool_attrs(rtx_insn * insn)2160 check_bool_attrs (rtx_insn *insn)
2161 {
2162   int code = INSN_CODE (insn);
2163   if (code >= 0)
2164     for (int i = 0; i <= BA_LAST; ++i)
2165       {
2166 	enum bool_attr attr = (enum bool_attr) i;
2167 	if (this_target_recog->x_bool_attr_masks[code][attr])
2168 	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2169 		      == get_bool_attr_mask_uncached (insn, attr));
2170       }
2171   return true;
2172 }
2173 
2174 /* Like extract_insn, but save insn extracted and don't extract again, when
2175    called again for the same insn expecting that recog_data still contain the
2176    valid information.  This is used primary by gen_attr infrastructure that
2177    often does extract insn again and again.  */
2178 void
extract_insn_cached(rtx_insn * insn)2179 extract_insn_cached (rtx_insn *insn)
2180 {
2181   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2182     return;
2183   extract_insn (insn);
2184   recog_data.insn = insn;
2185 }
2186 
2187 /* Do uncached extract_insn, constrain_operands and complain about failures.
2188    This should be used when extracting a pre-existing constrained instruction
2189    if the caller wants to know which alternative was chosen.  */
2190 void
extract_constrain_insn(rtx_insn * insn)2191 extract_constrain_insn (rtx_insn *insn)
2192 {
2193   extract_insn (insn);
2194   if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2195     fatal_insn_not_found (insn);
2196 }
2197 
2198 /* Do cached extract_insn, constrain_operands and complain about failures.
2199    Used by insn_attrtab.  */
2200 void
extract_constrain_insn_cached(rtx_insn * insn)2201 extract_constrain_insn_cached (rtx_insn *insn)
2202 {
2203   extract_insn_cached (insn);
2204   if (which_alternative == -1
2205       && !constrain_operands (reload_completed,
2206 			      get_enabled_alternatives (insn)))
2207     fatal_insn_not_found (insn);
2208 }
2209 
2210 /* Do cached constrain_operands on INSN and complain about failures.  */
2211 int
constrain_operands_cached(rtx_insn * insn,int strict)2212 constrain_operands_cached (rtx_insn *insn, int strict)
2213 {
2214   if (which_alternative == -1)
2215     return constrain_operands (strict, get_enabled_alternatives (insn));
2216   else
2217     return 1;
2218 }
2219 
2220 /* Analyze INSN and fill in recog_data.  */
2221 
2222 void
extract_insn(rtx_insn * insn)2223 extract_insn (rtx_insn *insn)
2224 {
2225   int i;
2226   int icode;
2227   int noperands;
2228   rtx body = PATTERN (insn);
2229 
2230   recog_data.n_operands = 0;
2231   recog_data.n_alternatives = 0;
2232   recog_data.n_dups = 0;
2233   recog_data.is_asm = false;
2234 
2235   switch (GET_CODE (body))
2236     {
2237     case USE:
2238     case CLOBBER:
2239     case ASM_INPUT:
2240     case ADDR_VEC:
2241     case ADDR_DIFF_VEC:
2242     case VAR_LOCATION:
2243     case DEBUG_MARKER:
2244       return;
2245 
2246     case SET:
2247       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2248 	goto asm_insn;
2249       else
2250 	goto normal_insn;
2251     case PARALLEL:
2252       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2253 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2254 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2255 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2256 	goto asm_insn;
2257       else
2258 	goto normal_insn;
2259     case ASM_OPERANDS:
2260     asm_insn:
2261       recog_data.n_operands = noperands = asm_noperands (body);
2262       if (noperands >= 0)
2263 	{
2264 	  /* This insn is an `asm' with operands.  */
2265 
2266 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2267 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2268 
2269 	  /* Now get the operand values and constraints out of the insn.  */
2270 	  decode_asm_operands (body, recog_data.operand,
2271 			       recog_data.operand_loc,
2272 			       recog_data.constraints,
2273 			       recog_data.operand_mode, NULL);
2274 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2275 	  if (noperands > 0)
2276 	    {
2277 	      const char *p =  recog_data.constraints[0];
2278 	      recog_data.n_alternatives = 1;
2279 	      while (*p)
2280 		recog_data.n_alternatives += (*p++ == ',');
2281 	    }
2282 	  recog_data.is_asm = true;
2283 	  break;
2284 	}
2285       fatal_insn_not_found (insn);
2286 
2287     default:
2288     normal_insn:
2289       /* Ordinary insn: recognize it, get the operands via insn_extract
2290 	 and get the constraints.  */
2291 
2292       icode = recog_memoized (insn);
2293       if (icode < 0)
2294 	fatal_insn_not_found (insn);
2295 
2296       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2297       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2298       recog_data.n_dups = insn_data[icode].n_dups;
2299 
2300       insn_extract (insn);
2301 
2302       for (i = 0; i < noperands; i++)
2303 	{
2304 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2305 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2306 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2307 	  /* VOIDmode match_operands gets mode from their real operand.  */
2308 	  if (recog_data.operand_mode[i] == VOIDmode)
2309 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2310 	}
2311     }
2312   for (i = 0; i < noperands; i++)
2313     recog_data.operand_type[i]
2314       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2315 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2316 	 : OP_IN);
2317 
2318   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2319 
2320   recog_data.insn = NULL;
2321   which_alternative = -1;
2322 }
2323 
2324 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2325    operands, N_ALTERNATIVES alternatives and constraint strings
2326    CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2327    and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
2328    if the insn is an asm statement and preprocessing should take the
2329    asm operands into account, e.g. to determine whether they could be
2330    addresses in constraints that require addresses; it should then
2331    point to an array of pointers to each operand.  */
2332 
2333 void
preprocess_constraints(int n_operands,int n_alternatives,const char ** constraints,operand_alternative * op_alt_base,rtx ** oploc)2334 preprocess_constraints (int n_operands, int n_alternatives,
2335 			const char **constraints,
2336 			operand_alternative *op_alt_base,
2337 			rtx **oploc)
2338 {
2339   for (int i = 0; i < n_operands; i++)
2340     {
2341       int j;
2342       struct operand_alternative *op_alt;
2343       const char *p = constraints[i];
2344 
2345       op_alt = op_alt_base;
2346 
2347       for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2348 	{
2349 	  op_alt[i].cl = NO_REGS;
2350 	  op_alt[i].constraint = p;
2351 	  op_alt[i].matches = -1;
2352 	  op_alt[i].matched = -1;
2353 
2354 	  if (*p == '\0' || *p == ',')
2355 	    {
2356 	      op_alt[i].anything_ok = 1;
2357 	      continue;
2358 	    }
2359 
2360 	  for (;;)
2361 	    {
2362 	      char c = *p;
2363 	      if (c == '#')
2364 		do
2365 		  c = *++p;
2366 		while (c != ',' && c != '\0');
2367 	      if (c == ',' || c == '\0')
2368 		{
2369 		  p++;
2370 		  break;
2371 		}
2372 
2373 	      switch (c)
2374 		{
2375 		case '?':
2376 		  op_alt[i].reject += 6;
2377 		  break;
2378 		case '!':
2379 		  op_alt[i].reject += 600;
2380 		  break;
2381 		case '&':
2382 		  op_alt[i].earlyclobber = 1;
2383 		  break;
2384 
2385 		case '0': case '1': case '2': case '3': case '4':
2386 		case '5': case '6': case '7': case '8': case '9':
2387 		  {
2388 		    char *end;
2389 		    op_alt[i].matches = strtoul (p, &end, 10);
2390 		    op_alt[op_alt[i].matches].matched = i;
2391 		    p = end;
2392 		  }
2393 		  continue;
2394 
2395 		case 'X':
2396 		  op_alt[i].anything_ok = 1;
2397 		  break;
2398 
2399 		case 'g':
2400 		  op_alt[i].cl =
2401 		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2402 		  break;
2403 
2404 		default:
2405 		  enum constraint_num cn = lookup_constraint (p);
2406 		  enum reg_class cl;
2407 		  switch (get_constraint_type (cn))
2408 		    {
2409 		    case CT_REGISTER:
2410 		      cl = reg_class_for_constraint (cn);
2411 		      if (cl != NO_REGS)
2412 			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2413 		      break;
2414 
2415 		    case CT_CONST_INT:
2416 		      break;
2417 
2418 		    case CT_MEMORY:
2419 		    case CT_SPECIAL_MEMORY:
2420 		      op_alt[i].memory_ok = 1;
2421 		      break;
2422 
2423 		    case CT_ADDRESS:
2424 		      if (oploc && !address_operand (*oploc[i], VOIDmode))
2425 			break;
2426 
2427 		      op_alt[i].is_address = 1;
2428 		      op_alt[i].cl
2429 			= (reg_class_subunion
2430 			   [(int) op_alt[i].cl]
2431 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2432 						  ADDRESS, SCRATCH)]);
2433 		      break;
2434 
2435 		    case CT_FIXED_FORM:
2436 		      break;
2437 		    }
2438 		  break;
2439 		}
2440 	      p += CONSTRAINT_LEN (c, p);
2441 	    }
2442 	}
2443     }
2444 }
2445 
2446 /* Return an array of operand_alternative instructions for
2447    instruction ICODE.  */
2448 
2449 const operand_alternative *
preprocess_insn_constraints(unsigned int icode)2450 preprocess_insn_constraints (unsigned int icode)
2451 {
2452   gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2453   if (this_target_recog->x_op_alt[icode])
2454     return this_target_recog->x_op_alt[icode];
2455 
2456   int n_operands = insn_data[icode].n_operands;
2457   if (n_operands == 0)
2458     return 0;
2459   /* Always provide at least one alternative so that which_op_alt ()
2460      works correctly.  If the instruction has 0 alternatives (i.e. all
2461      constraint strings are empty) then each operand in this alternative
2462      will have anything_ok set.  */
2463   int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2464   int n_entries = n_operands * n_alternatives;
2465 
2466   operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2467   const char **constraints = XALLOCAVEC (const char *, n_operands);
2468 
2469   for (int i = 0; i < n_operands; ++i)
2470     constraints[i] = insn_data[icode].operand[i].constraint;
2471   preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2472 			  NULL);
2473 
2474   this_target_recog->x_op_alt[icode] = op_alt;
2475   return op_alt;
2476 }
2477 
2478 /* After calling extract_insn, you can use this function to extract some
2479    information from the constraint strings into a more usable form.
2480    The collected data is stored in recog_op_alt.  */
2481 
2482 void
preprocess_constraints(rtx_insn * insn)2483 preprocess_constraints (rtx_insn *insn)
2484 {
2485   int icode = INSN_CODE (insn);
2486   if (icode >= 0)
2487     recog_op_alt = preprocess_insn_constraints (icode);
2488   else
2489     {
2490       int n_operands = recog_data.n_operands;
2491       int n_alternatives = recog_data.n_alternatives;
2492       int n_entries = n_operands * n_alternatives;
2493       memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2494       preprocess_constraints (n_operands, n_alternatives,
2495 			      recog_data.constraints, asm_op_alt,
2496 			      NULL);
2497       recog_op_alt = asm_op_alt;
2498     }
2499 }
2500 
2501 /* Check the operands of an insn against the insn's operand constraints
2502    and return 1 if they match any of the alternatives in ALTERNATIVES.
2503 
2504    The information about the insn's operands, constraints, operand modes
2505    etc. is obtained from the global variables set up by extract_insn.
2506 
2507    WHICH_ALTERNATIVE is set to a number which indicates which
2508    alternative of constraints was matched: 0 for the first alternative,
2509    1 for the next, etc.
2510 
2511    In addition, when two operands are required to match
2512    and it happens that the output operand is (reg) while the
2513    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2514    make the output operand look like the input.
2515    This is because the output operand is the one the template will print.
2516 
2517    This is used in final, just before printing the assembler code and by
2518    the routines that determine an insn's attribute.
2519 
2520    If STRICT is a positive nonzero value, it means that we have been
2521    called after reload has been completed.  In that case, we must
2522    do all checks strictly.  If it is zero, it means that we have been called
2523    before reload has completed.  In that case, we first try to see if we can
2524    find an alternative that matches strictly.  If not, we try again, this
2525    time assuming that reload will fix up the insn.  This provides a "best
2526    guess" for the alternative and is used to compute attributes of insns prior
2527    to reload.  A negative value of STRICT is used for this internal call.  */
2528 
2529 struct funny_match
2530 {
2531   int this_op, other;
2532 };
2533 
2534 int
constrain_operands(int strict,alternative_mask alternatives)2535 constrain_operands (int strict, alternative_mask alternatives)
2536 {
2537   const char *constraints[MAX_RECOG_OPERANDS];
2538   int matching_operands[MAX_RECOG_OPERANDS];
2539   int earlyclobber[MAX_RECOG_OPERANDS];
2540   int c;
2541 
2542   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2543   int funny_match_index;
2544 
2545   which_alternative = 0;
2546   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2547     return 1;
2548 
2549   for (c = 0; c < recog_data.n_operands; c++)
2550     constraints[c] = recog_data.constraints[c];
2551 
2552   do
2553     {
2554       int seen_earlyclobber_at = -1;
2555       int opno;
2556       int lose = 0;
2557       funny_match_index = 0;
2558 
2559       if (!TEST_BIT (alternatives, which_alternative))
2560 	{
2561 	  int i;
2562 
2563 	  for (i = 0; i < recog_data.n_operands; i++)
2564 	    constraints[i] = skip_alternative (constraints[i]);
2565 
2566 	  which_alternative++;
2567 	  continue;
2568 	}
2569 
2570       for (opno = 0; opno < recog_data.n_operands; opno++)
2571 	matching_operands[opno] = -1;
2572 
2573       for (opno = 0; opno < recog_data.n_operands; opno++)
2574 	{
2575 	  rtx op = recog_data.operand[opno];
2576 	  machine_mode mode = GET_MODE (op);
2577 	  const char *p = constraints[opno];
2578 	  int offset = 0;
2579 	  int win = 0;
2580 	  int val;
2581 	  int len;
2582 
2583 	  earlyclobber[opno] = 0;
2584 
2585 	  /* A unary operator may be accepted by the predicate, but it
2586 	     is irrelevant for matching constraints.  */
2587 	  if (UNARY_P (op))
2588 	    op = XEXP (op, 0);
2589 
2590 	  if (GET_CODE (op) == SUBREG)
2591 	    {
2592 	      if (REG_P (SUBREG_REG (op))
2593 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2594 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2595 					      GET_MODE (SUBREG_REG (op)),
2596 					      SUBREG_BYTE (op),
2597 					      GET_MODE (op));
2598 	      op = SUBREG_REG (op);
2599 	    }
2600 
2601 	  /* An empty constraint or empty alternative
2602 	     allows anything which matched the pattern.  */
2603 	  if (*p == 0 || *p == ',')
2604 	    win = 1;
2605 
2606 	  do
2607 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2608 	      {
2609 	      case '\0':
2610 		len = 0;
2611 		break;
2612 	      case ',':
2613 		c = '\0';
2614 		break;
2615 
2616 	      case '#':
2617 		/* Ignore rest of this alternative as far as
2618 		   constraint checking is concerned.  */
2619 		do
2620 		  p++;
2621 		while (*p && *p != ',');
2622 		len = 0;
2623 		break;
2624 
2625 	      case '&':
2626 		earlyclobber[opno] = 1;
2627 		if (seen_earlyclobber_at < 0)
2628 		  seen_earlyclobber_at = opno;
2629 		break;
2630 
2631 	      case '0':  case '1':  case '2':  case '3':  case '4':
2632 	      case '5':  case '6':  case '7':  case '8':  case '9':
2633 		{
2634 		  /* This operand must be the same as a previous one.
2635 		     This kind of constraint is used for instructions such
2636 		     as add when they take only two operands.
2637 
2638 		     Note that the lower-numbered operand is passed first.
2639 
2640 		     If we are not testing strictly, assume that this
2641 		     constraint will be satisfied.  */
2642 
2643 		  char *end;
2644 		  int match;
2645 
2646 		  match = strtoul (p, &end, 10);
2647 		  p = end;
2648 
2649 		  if (strict < 0)
2650 		    val = 1;
2651 		  else
2652 		    {
2653 		      rtx op1 = recog_data.operand[match];
2654 		      rtx op2 = recog_data.operand[opno];
2655 
2656 		      /* A unary operator may be accepted by the predicate,
2657 			 but it is irrelevant for matching constraints.  */
2658 		      if (UNARY_P (op1))
2659 			op1 = XEXP (op1, 0);
2660 		      if (UNARY_P (op2))
2661 			op2 = XEXP (op2, 0);
2662 
2663 		      val = operands_match_p (op1, op2);
2664 		    }
2665 
2666 		  matching_operands[opno] = match;
2667 		  matching_operands[match] = opno;
2668 
2669 		  if (val != 0)
2670 		    win = 1;
2671 
2672 		  /* If output is *x and input is *--x, arrange later
2673 		     to change the output to *--x as well, since the
2674 		     output op is the one that will be printed.  */
2675 		  if (val == 2 && strict > 0)
2676 		    {
2677 		      funny_match[funny_match_index].this_op = opno;
2678 		      funny_match[funny_match_index++].other = match;
2679 		    }
2680 		}
2681 		len = 0;
2682 		break;
2683 
2684 	      case 'p':
2685 		/* p is used for address_operands.  When we are called by
2686 		   gen_reload, no one will have checked that the address is
2687 		   strictly valid, i.e., that all pseudos requiring hard regs
2688 		   have gotten them.  We also want to make sure we have a
2689 		   valid mode.  */
2690 		if ((GET_MODE (op) == VOIDmode
2691 		     || SCALAR_INT_MODE_P (GET_MODE (op)))
2692 		    && (strict <= 0
2693 			|| (strict_memory_address_p
2694 			     (recog_data.operand_mode[opno], op))))
2695 		  win = 1;
2696 		break;
2697 
2698 		/* No need to check general_operand again;
2699 		   it was done in insn-recog.c.  Well, except that reload
2700 		   doesn't check the validity of its replacements, but
2701 		   that should only matter when there's a bug.  */
2702 	      case 'g':
2703 		/* Anything goes unless it is a REG and really has a hard reg
2704 		   but the hard reg is not in the class GENERAL_REGS.  */
2705 		if (REG_P (op))
2706 		  {
2707 		    if (strict < 0
2708 			|| GENERAL_REGS == ALL_REGS
2709 			|| (reload_in_progress
2710 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2711 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2712 		      win = 1;
2713 		  }
2714 		else if (strict < 0 || general_operand (op, mode))
2715 		  win = 1;
2716 		break;
2717 
2718 	      default:
2719 		{
2720 		  enum constraint_num cn = lookup_constraint (p);
2721 		  enum reg_class cl = reg_class_for_constraint (cn);
2722 		  if (cl != NO_REGS)
2723 		    {
2724 		      if (strict < 0
2725 			  || (strict == 0
2726 			      && REG_P (op)
2727 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2728 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2729 			  || (REG_P (op)
2730 			      && reg_fits_class_p (op, cl, offset, mode)))
2731 		        win = 1;
2732 		    }
2733 
2734 		  else if (constraint_satisfied_p (op, cn))
2735 		    win = 1;
2736 
2737 		  else if (insn_extra_memory_constraint (cn)
2738 			   /* Every memory operand can be reloaded to fit.  */
2739 			   && ((strict < 0 && MEM_P (op))
2740 			       /* Before reload, accept what reload can turn
2741 				  into a mem.  */
2742 			       || (strict < 0 && CONSTANT_P (op))
2743 			       /* Before reload, accept a pseudo or hard register,
2744 				  since LRA can turn it into a mem.  */
2745 			       || (strict < 0 && targetm.lra_p () && REG_P (op))
2746 			       /* During reload, accept a pseudo  */
2747 			       || (reload_in_progress && REG_P (op)
2748 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2749 		    win = 1;
2750 		  else if (insn_extra_address_constraint (cn)
2751 			   /* Every address operand can be reloaded to fit.  */
2752 			   && strict < 0)
2753 		    win = 1;
2754 		  /* Cater to architectures like IA-64 that define extra memory
2755 		     constraints without using define_memory_constraint.  */
2756 		  else if (reload_in_progress
2757 			   && REG_P (op)
2758 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2759 			   && reg_renumber[REGNO (op)] < 0
2760 			   && reg_equiv_mem (REGNO (op)) != 0
2761 			   && constraint_satisfied_p
2762 			      (reg_equiv_mem (REGNO (op)), cn))
2763 		    win = 1;
2764 		  break;
2765 		}
2766 	      }
2767 	  while (p += len, c);
2768 
2769 	  constraints[opno] = p;
2770 	  /* If this operand did not win somehow,
2771 	     this alternative loses.  */
2772 	  if (! win)
2773 	    lose = 1;
2774 	}
2775       /* This alternative won; the operands are ok.
2776 	 Change whichever operands this alternative says to change.  */
2777       if (! lose)
2778 	{
2779 	  int opno, eopno;
2780 
2781 	  /* See if any earlyclobber operand conflicts with some other
2782 	     operand.  */
2783 
2784 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2785 	    for (eopno = seen_earlyclobber_at;
2786 		 eopno < recog_data.n_operands;
2787 		 eopno++)
2788 	      /* Ignore earlyclobber operands now in memory,
2789 		 because we would often report failure when we have
2790 		 two memory operands, one of which was formerly a REG.  */
2791 	      if (earlyclobber[eopno]
2792 		  && REG_P (recog_data.operand[eopno]))
2793 		for (opno = 0; opno < recog_data.n_operands; opno++)
2794 		  if ((MEM_P (recog_data.operand[opno])
2795 		       || recog_data.operand_type[opno] != OP_OUT)
2796 		      && opno != eopno
2797 		      /* Ignore things like match_operator operands.  */
2798 		      && *recog_data.constraints[opno] != 0
2799 		      && ! (matching_operands[opno] == eopno
2800 			    && operands_match_p (recog_data.operand[opno],
2801 						 recog_data.operand[eopno]))
2802 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2803 						   recog_data.operand[eopno]))
2804 		    lose = 1;
2805 
2806 	  if (! lose)
2807 	    {
2808 	      while (--funny_match_index >= 0)
2809 		{
2810 		  recog_data.operand[funny_match[funny_match_index].other]
2811 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2812 		}
2813 
2814 	      /* For operands without < or > constraints reject side-effects.  */
2815 	      if (AUTO_INC_DEC && recog_data.is_asm)
2816 		{
2817 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2818 		    if (MEM_P (recog_data.operand[opno]))
2819 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2820 			{
2821 			case PRE_INC:
2822 			case POST_INC:
2823 			case PRE_DEC:
2824 			case POST_DEC:
2825 			case PRE_MODIFY:
2826 			case POST_MODIFY:
2827 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2828 			      && strchr (recog_data.constraints[opno], '>')
2829 				 == NULL)
2830 			    return 0;
2831 			  break;
2832 			default:
2833 			  break;
2834 			}
2835 		}
2836 
2837 	      return 1;
2838 	    }
2839 	}
2840 
2841       which_alternative++;
2842     }
2843   while (which_alternative < recog_data.n_alternatives);
2844 
2845   which_alternative = -1;
2846   /* If we are about to reject this, but we are not to test strictly,
2847      try a very loose test.  Only return failure if it fails also.  */
2848   if (strict == 0)
2849     return constrain_operands (-1, alternatives);
2850   else
2851     return 0;
2852 }
2853 
2854 /* Return true iff OPERAND (assumed to be a REG rtx)
2855    is a hard reg in class CLASS when its regno is offset by OFFSET
2856    and changed to mode MODE.
2857    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2858 
2859 bool
reg_fits_class_p(const_rtx operand,reg_class_t cl,int offset,machine_mode mode)2860 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2861 		  machine_mode mode)
2862 {
2863   unsigned int regno = REGNO (operand);
2864 
2865   if (cl == NO_REGS)
2866     return false;
2867 
2868   /* Regno must not be a pseudo register.  Offset may be negative.  */
2869   return (HARD_REGISTER_NUM_P (regno)
2870 	  && HARD_REGISTER_NUM_P (regno + offset)
2871 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2872 				regno + offset));
2873 }
2874 
2875 /* Split single instruction.  Helper function for split_all_insns and
2876    split_all_insns_noflow.  Return last insn in the sequence if successful,
2877    or NULL if unsuccessful.  */
2878 
2879 static rtx_insn *
split_insn(rtx_insn * insn)2880 split_insn (rtx_insn *insn)
2881 {
2882   /* Split insns here to get max fine-grain parallelism.  */
2883   rtx_insn *first = PREV_INSN (insn);
2884   rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2885   rtx insn_set, last_set, note;
2886 
2887   if (last == insn)
2888     return NULL;
2889 
2890   /* If the original instruction was a single set that was known to be
2891      equivalent to a constant, see if we can say the same about the last
2892      instruction in the split sequence.  The two instructions must set
2893      the same destination.  */
2894   insn_set = single_set (insn);
2895   if (insn_set)
2896     {
2897       last_set = single_set (last);
2898       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2899 	{
2900 	  note = find_reg_equal_equiv_note (insn);
2901 	  if (note && CONSTANT_P (XEXP (note, 0)))
2902 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2903 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2904 	    set_unique_reg_note (last, REG_EQUAL,
2905 				 copy_rtx (SET_SRC (insn_set)));
2906 	}
2907     }
2908 
2909   /* try_split returns the NOTE that INSN became.  */
2910   SET_INSN_DELETED (insn);
2911 
2912   /* ??? Coddle to md files that generate subregs in post-reload
2913      splitters instead of computing the proper hard register.  */
2914   if (reload_completed && first != last)
2915     {
2916       first = NEXT_INSN (first);
2917       for (;;)
2918 	{
2919 	  if (INSN_P (first))
2920 	    cleanup_subreg_operands (first);
2921 	  if (first == last)
2922 	    break;
2923 	  first = NEXT_INSN (first);
2924 	}
2925     }
2926 
2927   return last;
2928 }
2929 
2930 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2931 
2932 void
split_all_insns(void)2933 split_all_insns (void)
2934 {
2935   bool changed;
2936   bool need_cfg_cleanup = false;
2937   basic_block bb;
2938 
2939   auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2940   bitmap_clear (blocks);
2941   changed = false;
2942 
2943   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2944     {
2945       rtx_insn *insn, *next;
2946       bool finish = false;
2947 
2948       rtl_profile_for_bb (bb);
2949       for (insn = BB_HEAD (bb); !finish ; insn = next)
2950 	{
2951 	  /* Can't use `next_real_insn' because that might go across
2952 	     CODE_LABELS and short-out basic blocks.  */
2953 	  next = NEXT_INSN (insn);
2954 	  finish = (insn == BB_END (bb));
2955 
2956 	  /* If INSN has a REG_EH_REGION note and we split INSN, the
2957 	     resulting split may not have/need REG_EH_REGION notes.
2958 
2959 	     If that happens and INSN was the last reference to the
2960 	     given EH region, then the EH region will become unreachable.
2961 	     We cannot leave the unreachable blocks in the CFG as that
2962 	     will trigger a checking failure.
2963 
2964 	     So track if INSN has a REG_EH_REGION note.  If so and we
2965 	     split INSN, then trigger a CFG cleanup.  */
2966 	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2967 	  if (INSN_P (insn))
2968 	    {
2969 	      rtx set = single_set (insn);
2970 
2971 	      /* Don't split no-op move insns.  These should silently
2972 		 disappear later in final.  Splitting such insns would
2973 		 break the code that handles LIBCALL blocks.  */
2974 	      if (set && set_noop_p (set))
2975 		{
2976 		  /* Nops get in the way while scheduling, so delete them
2977 		     now if register allocation has already been done.  It
2978 		     is too risky to try to do this before register
2979 		     allocation, and there are unlikely to be very many
2980 		     nops then anyways.  */
2981 		  if (reload_completed)
2982 		      delete_insn_and_edges (insn);
2983 		  if (note)
2984 		    need_cfg_cleanup = true;
2985 		}
2986 	      else
2987 		{
2988 		  if (split_insn (insn))
2989 		    {
2990 		      bitmap_set_bit (blocks, bb->index);
2991 		      changed = true;
2992 		      if (note)
2993 			need_cfg_cleanup = true;
2994 		    }
2995 		}
2996 	    }
2997 	}
2998     }
2999 
3000   default_rtl_profile ();
3001   if (changed)
3002     {
3003       find_many_sub_basic_blocks (blocks);
3004 
3005       /* Splitting could drop an REG_EH_REGION if it potentially
3006 	 trapped in its original form, but does not in its split
3007 	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
3008 	 store/load pair and -fnon-call-exceptions.  */
3009       if (need_cfg_cleanup)
3010 	cleanup_cfg (0);
3011     }
3012 
3013   checking_verify_flow_info ();
3014 }
3015 
3016 /* Same as split_all_insns, but do not expect CFG to be available.
3017    Used by machine dependent reorg passes.  */
3018 
3019 unsigned int
split_all_insns_noflow(void)3020 split_all_insns_noflow (void)
3021 {
3022   rtx_insn *next, *insn;
3023 
3024   for (insn = get_insns (); insn; insn = next)
3025     {
3026       next = NEXT_INSN (insn);
3027       if (INSN_P (insn))
3028 	{
3029 	  /* Don't split no-op move insns.  These should silently
3030 	     disappear later in final.  Splitting such insns would
3031 	     break the code that handles LIBCALL blocks.  */
3032 	  rtx set = single_set (insn);
3033 	  if (set && set_noop_p (set))
3034 	    {
3035 	      /* Nops get in the way while scheduling, so delete them
3036 		 now if register allocation has already been done.  It
3037 		 is too risky to try to do this before register
3038 		 allocation, and there are unlikely to be very many
3039 		 nops then anyways.
3040 
3041 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3042 	      if (reload_completed)
3043 		delete_insn_and_edges (insn);
3044 	    }
3045 	  else
3046 	    split_insn (insn);
3047 	}
3048     }
3049   return 0;
3050 }
3051 
3052 struct peep2_insn_data
3053 {
3054   rtx_insn *insn;
3055   regset live_before;
3056 };
3057 
3058 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3059 static int peep2_current;
3060 
3061 static bool peep2_do_rebuild_jump_labels;
3062 static bool peep2_do_cleanup_cfg;
3063 
3064 /* The number of instructions available to match a peep2.  */
3065 int peep2_current_count;
3066 
3067 /* A marker indicating the last insn of the block.  The live_before regset
3068    for this element is correct, indicating DF_LIVE_OUT for the block.  */
3069 #define PEEP2_EOB invalid_insn_rtx
3070 
3071 /* Wrap N to fit into the peep2_insn_data buffer.  */
3072 
3073 static int
peep2_buf_position(int n)3074 peep2_buf_position (int n)
3075 {
3076   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3077     n -= MAX_INSNS_PER_PEEP2 + 1;
3078   return n;
3079 }
3080 
3081 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3082    does not exist.  Used by the recognizer to find the next insn to match
3083    in a multi-insn pattern.  */
3084 
3085 rtx_insn *
peep2_next_insn(int n)3086 peep2_next_insn (int n)
3087 {
3088   gcc_assert (n <= peep2_current_count);
3089 
3090   n = peep2_buf_position (peep2_current + n);
3091 
3092   return peep2_insn_data[n].insn;
3093 }
3094 
3095 /* Return true if REGNO is dead before the Nth non-note insn
3096    after `current'.  */
3097 
3098 int
peep2_regno_dead_p(int ofs,int regno)3099 peep2_regno_dead_p (int ofs, int regno)
3100 {
3101   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3102 
3103   ofs = peep2_buf_position (peep2_current + ofs);
3104 
3105   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3106 
3107   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3108 }
3109 
3110 /* Similarly for a REG.  */
3111 
3112 int
peep2_reg_dead_p(int ofs,rtx reg)3113 peep2_reg_dead_p (int ofs, rtx reg)
3114 {
3115   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3116 
3117   ofs = peep2_buf_position (peep2_current + ofs);
3118 
3119   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3120 
3121   unsigned int end_regno = END_REGNO (reg);
3122   for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3123     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3124       return 0;
3125   return 1;
3126 }
3127 
3128 /* Regno offset to be used in the register search.  */
3129 static int search_ofs;
3130 
3131 /* Try to find a hard register of mode MODE, matching the register class in
3132    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3133    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3134    in which case the only condition is that the register must be available
3135    before CURRENT_INSN.
3136    Registers that already have bits set in REG_SET will not be considered.
3137 
3138    If an appropriate register is available, it will be returned and the
3139    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3140    returned.  */
3141 
3142 rtx
peep2_find_free_register(int from,int to,const char * class_str,machine_mode mode,HARD_REG_SET * reg_set)3143 peep2_find_free_register (int from, int to, const char *class_str,
3144 			  machine_mode mode, HARD_REG_SET *reg_set)
3145 {
3146   enum reg_class cl;
3147   HARD_REG_SET live;
3148   df_ref def;
3149   int i;
3150 
3151   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3152   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3153 
3154   from = peep2_buf_position (peep2_current + from);
3155   to = peep2_buf_position (peep2_current + to);
3156 
3157   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3158   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3159 
3160   while (from != to)
3161     {
3162       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3163 
3164       /* Don't use registers set or clobbered by the insn.  */
3165       FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3166 	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3167 
3168       from = peep2_buf_position (from + 1);
3169     }
3170 
3171   cl = reg_class_for_constraint (lookup_constraint (class_str));
3172 
3173   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3174     {
3175       int raw_regno, regno, success, j;
3176 
3177       /* Distribute the free registers as much as possible.  */
3178       raw_regno = search_ofs + i;
3179       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3180 	raw_regno -= FIRST_PSEUDO_REGISTER;
3181 #ifdef REG_ALLOC_ORDER
3182       regno = reg_alloc_order[raw_regno];
3183 #else
3184       regno = raw_regno;
3185 #endif
3186 
3187       /* Can it support the mode we need?  */
3188       if (!targetm.hard_regno_mode_ok (regno, mode))
3189 	continue;
3190 
3191       success = 1;
3192       for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3193 	{
3194 	  /* Don't allocate fixed registers.  */
3195 	  if (fixed_regs[regno + j])
3196 	    {
3197 	      success = 0;
3198 	      break;
3199 	    }
3200 	  /* Don't allocate global registers.  */
3201 	  if (global_regs[regno + j])
3202 	    {
3203 	      success = 0;
3204 	      break;
3205 	    }
3206 	  /* Make sure the register is of the right class.  */
3207 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3208 	    {
3209 	      success = 0;
3210 	      break;
3211 	    }
3212 	  /* And that we don't create an extra save/restore.  */
3213 	  if (! crtl->abi->clobbers_full_reg_p (regno + j)
3214 	      && ! df_regs_ever_live_p (regno + j))
3215 	    {
3216 	      success = 0;
3217 	      break;
3218 	    }
3219 
3220 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3221 	    {
3222 	      success = 0;
3223 	      break;
3224 	    }
3225 
3226 	  /* And we don't clobber traceback for noreturn functions.  */
3227 	  if ((regno + j == FRAME_POINTER_REGNUM
3228 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3229 	      && (! reload_completed || frame_pointer_needed))
3230 	    {
3231 	      success = 0;
3232 	      break;
3233 	    }
3234 
3235 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3236 	      || TEST_HARD_REG_BIT (live, regno + j))
3237 	    {
3238 	      success = 0;
3239 	      break;
3240 	    }
3241 	}
3242 
3243       if (success)
3244 	{
3245 	  add_to_hard_reg_set (reg_set, mode, regno);
3246 
3247 	  /* Start the next search with the next register.  */
3248 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3249 	    raw_regno = 0;
3250 	  search_ofs = raw_regno;
3251 
3252 	  return gen_rtx_REG (mode, regno);
3253 	}
3254     }
3255 
3256   search_ofs = 0;
3257   return NULL_RTX;
3258 }
3259 
3260 /* Forget all currently tracked instructions, only remember current
3261    LIVE regset.  */
3262 
3263 static void
peep2_reinit_state(regset live)3264 peep2_reinit_state (regset live)
3265 {
3266   int i;
3267 
3268   /* Indicate that all slots except the last holds invalid data.  */
3269   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3270     peep2_insn_data[i].insn = NULL;
3271   peep2_current_count = 0;
3272 
3273   /* Indicate that the last slot contains live_after data.  */
3274   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3275   peep2_current = MAX_INSNS_PER_PEEP2;
3276 
3277   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3278 }
3279 
3280 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3281    starting at INSN.  Perform the replacement, removing the old insns and
3282    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3283    if the replacement is rejected.  */
3284 
3285 static rtx_insn *
peep2_attempt(basic_block bb,rtx_insn * insn,int match_len,rtx_insn * attempt)3286 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3287 {
3288   int i;
3289   rtx_insn *last, *before_try, *x;
3290   rtx eh_note, as_note;
3291   rtx_insn *old_insn;
3292   rtx_insn *new_insn;
3293   bool was_call = false;
3294 
3295   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3296      match more than one insn, or to be split into more than one insn.  */
3297   old_insn = peep2_insn_data[peep2_current].insn;
3298   if (RTX_FRAME_RELATED_P (old_insn))
3299     {
3300       bool any_note = false;
3301       rtx note;
3302 
3303       if (match_len != 0)
3304 	return NULL;
3305 
3306       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3307 	 may be in the stream for the purpose of register allocation.  */
3308       if (active_insn_p (attempt))
3309 	new_insn = attempt;
3310       else
3311 	new_insn = next_active_insn (attempt);
3312       if (next_active_insn (new_insn))
3313 	return NULL;
3314 
3315       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3316       RTX_FRAME_RELATED_P (new_insn) = 1;
3317 
3318       /* Allow the backend to fill in a note during the split.  */
3319       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3320 	switch (REG_NOTE_KIND (note))
3321 	  {
3322 	  case REG_FRAME_RELATED_EXPR:
3323 	  case REG_CFA_DEF_CFA:
3324 	  case REG_CFA_ADJUST_CFA:
3325 	  case REG_CFA_OFFSET:
3326 	  case REG_CFA_REGISTER:
3327 	  case REG_CFA_EXPRESSION:
3328 	  case REG_CFA_RESTORE:
3329 	  case REG_CFA_SET_VDRAP:
3330 	    any_note = true;
3331 	    break;
3332 	  default:
3333 	    break;
3334 	  }
3335 
3336       /* If the backend didn't supply a note, copy one over.  */
3337       if (!any_note)
3338         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3339 	  switch (REG_NOTE_KIND (note))
3340 	    {
3341 	    case REG_FRAME_RELATED_EXPR:
3342 	    case REG_CFA_DEF_CFA:
3343 	    case REG_CFA_ADJUST_CFA:
3344 	    case REG_CFA_OFFSET:
3345 	    case REG_CFA_REGISTER:
3346 	    case REG_CFA_EXPRESSION:
3347 	    case REG_CFA_RESTORE:
3348 	    case REG_CFA_SET_VDRAP:
3349 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3350 	      any_note = true;
3351 	      break;
3352 	    default:
3353 	      break;
3354 	    }
3355 
3356       /* If there still isn't a note, make sure the unwind info sees the
3357 	 same expression as before the split.  */
3358       if (!any_note)
3359 	{
3360 	  rtx old_set, new_set;
3361 
3362 	  /* The old insn had better have been simple, or annotated.  */
3363 	  old_set = single_set (old_insn);
3364 	  gcc_assert (old_set != NULL);
3365 
3366 	  new_set = single_set (new_insn);
3367 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3368 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3369 	}
3370 
3371       /* Copy prologue/epilogue status.  This is required in order to keep
3372 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3373       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3374     }
3375 
3376   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3377      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3378      cfg-related call notes.  */
3379   for (i = 0; i <= match_len; ++i)
3380     {
3381       int j;
3382       rtx note;
3383 
3384       j = peep2_buf_position (peep2_current + i);
3385       old_insn = peep2_insn_data[j].insn;
3386       if (!CALL_P (old_insn))
3387 	continue;
3388       was_call = true;
3389 
3390       new_insn = attempt;
3391       while (new_insn != NULL_RTX)
3392 	{
3393 	  if (CALL_P (new_insn))
3394 	    break;
3395 	  new_insn = NEXT_INSN (new_insn);
3396 	}
3397 
3398       gcc_assert (new_insn != NULL_RTX);
3399 
3400       CALL_INSN_FUNCTION_USAGE (new_insn)
3401 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3402       SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3403 
3404       for (note = REG_NOTES (old_insn);
3405 	   note;
3406 	   note = XEXP (note, 1))
3407 	switch (REG_NOTE_KIND (note))
3408 	  {
3409 	  case REG_NORETURN:
3410 	  case REG_SETJMP:
3411 	  case REG_TM:
3412 	  case REG_CALL_NOCF_CHECK:
3413 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3414 			  XEXP (note, 0));
3415 	    break;
3416 	  default:
3417 	    /* Discard all other reg notes.  */
3418 	    break;
3419 	  }
3420 
3421       /* Croak if there is another call in the sequence.  */
3422       while (++i <= match_len)
3423 	{
3424 	  j = peep2_buf_position (peep2_current + i);
3425 	  old_insn = peep2_insn_data[j].insn;
3426 	  gcc_assert (!CALL_P (old_insn));
3427 	}
3428       break;
3429     }
3430 
3431   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3432      move those notes over to the new sequence.  */
3433   as_note = NULL;
3434   for (i = match_len; i >= 0; --i)
3435     {
3436       int j = peep2_buf_position (peep2_current + i);
3437       old_insn = peep2_insn_data[j].insn;
3438 
3439       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3440       if (as_note)
3441 	break;
3442     }
3443 
3444   i = peep2_buf_position (peep2_current + match_len);
3445   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3446 
3447   /* Replace the old sequence with the new.  */
3448   rtx_insn *peepinsn = peep2_insn_data[i].insn;
3449   last = emit_insn_after_setloc (attempt,
3450 				 peep2_insn_data[i].insn,
3451 				 INSN_LOCATION (peepinsn));
3452   if (JUMP_P (peepinsn) && JUMP_P (last))
3453     CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3454   before_try = PREV_INSN (insn);
3455   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3456 
3457   /* Re-insert the EH_REGION notes.  */
3458   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3459     {
3460       edge eh_edge;
3461       edge_iterator ei;
3462 
3463       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3464 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3465 	  break;
3466 
3467       if (eh_note)
3468 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3469 
3470       if (eh_edge)
3471 	for (x = last; x != before_try; x = PREV_INSN (x))
3472 	  if (x != BB_END (bb)
3473 	      && (can_throw_internal (x)
3474 		  || can_nonlocal_goto (x)))
3475 	    {
3476 	      edge nfte, nehe;
3477 	      int flags;
3478 
3479 	      nfte = split_block (bb, x);
3480 	      flags = (eh_edge->flags
3481 		       & (EDGE_EH | EDGE_ABNORMAL));
3482 	      if (CALL_P (x))
3483 		flags |= EDGE_ABNORMAL_CALL;
3484 	      nehe = make_edge (nfte->src, eh_edge->dest,
3485 				flags);
3486 
3487 	      nehe->probability = eh_edge->probability;
3488 	      nfte->probability = nehe->probability.invert ();
3489 
3490 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3491 	      bb = nfte->src;
3492 	      eh_edge = nehe;
3493 	    }
3494 
3495       /* Converting possibly trapping insn to non-trapping is
3496 	 possible.  Zap dummy outgoing edges.  */
3497       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3498     }
3499 
3500   /* Re-insert the ARGS_SIZE notes.  */
3501   if (as_note)
3502     fixup_args_size_notes (before_try, last, get_args_size (as_note));
3503 
3504   /* If we generated a jump instruction, it won't have
3505      JUMP_LABEL set.  Recompute after we're done.  */
3506   for (x = last; x != before_try; x = PREV_INSN (x))
3507     if (JUMP_P (x))
3508       {
3509 	peep2_do_rebuild_jump_labels = true;
3510 	break;
3511       }
3512 
3513   return last;
3514 }
3515 
3516 /* After performing a replacement in basic block BB, fix up the life
3517    information in our buffer.  LAST is the last of the insns that we
3518    emitted as a replacement.  PREV is the insn before the start of
3519    the replacement.  MATCH_LEN is the number of instructions that were
3520    matched, and which now need to be replaced in the buffer.  */
3521 
3522 static void
peep2_update_life(basic_block bb,int match_len,rtx_insn * last,rtx_insn * prev)3523 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3524 		   rtx_insn *prev)
3525 {
3526   int i = peep2_buf_position (peep2_current + match_len + 1);
3527   rtx_insn *x;
3528   regset_head live;
3529 
3530   INIT_REG_SET (&live);
3531   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3532 
3533   gcc_assert (peep2_current_count >= match_len + 1);
3534   peep2_current_count -= match_len + 1;
3535 
3536   x = last;
3537   do
3538     {
3539       if (INSN_P (x))
3540 	{
3541 	  df_insn_rescan (x);
3542 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3543 	    {
3544 	      peep2_current_count++;
3545 	      if (--i < 0)
3546 		i = MAX_INSNS_PER_PEEP2;
3547 	      peep2_insn_data[i].insn = x;
3548 	      df_simulate_one_insn_backwards (bb, x, &live);
3549 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3550 	    }
3551 	}
3552       x = PREV_INSN (x);
3553     }
3554   while (x != prev);
3555   CLEAR_REG_SET (&live);
3556 
3557   peep2_current = i;
3558 }
3559 
3560 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3561    Return true if we added it, false otherwise.  The caller will try to match
3562    peepholes against the buffer if we return false; otherwise it will try to
3563    add more instructions to the buffer.  */
3564 
3565 static bool
peep2_fill_buffer(basic_block bb,rtx_insn * insn,regset live)3566 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3567 {
3568   int pos;
3569 
3570   /* Once we have filled the maximum number of insns the buffer can hold,
3571      allow the caller to match the insns against peepholes.  We wait until
3572      the buffer is full in case the target has similar peepholes of different
3573      length; we always want to match the longest if possible.  */
3574   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3575     return false;
3576 
3577   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3578      any other pattern, lest it change the semantics of the frame info.  */
3579   if (RTX_FRAME_RELATED_P (insn))
3580     {
3581       /* Let the buffer drain first.  */
3582       if (peep2_current_count > 0)
3583 	return false;
3584       /* Now the insn will be the only thing in the buffer.  */
3585     }
3586 
3587   pos = peep2_buf_position (peep2_current + peep2_current_count);
3588   peep2_insn_data[pos].insn = insn;
3589   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3590   peep2_current_count++;
3591 
3592   df_simulate_one_insn_forwards (bb, insn, live);
3593   return true;
3594 }
3595 
3596 /* Perform the peephole2 optimization pass.  */
3597 
3598 static void
peephole2_optimize(void)3599 peephole2_optimize (void)
3600 {
3601   rtx_insn *insn;
3602   bitmap live;
3603   int i;
3604   basic_block bb;
3605 
3606   peep2_do_cleanup_cfg = false;
3607   peep2_do_rebuild_jump_labels = false;
3608 
3609   df_set_flags (DF_LR_RUN_DCE);
3610   df_note_add_problem ();
3611   df_analyze ();
3612 
3613   /* Initialize the regsets we're going to use.  */
3614   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3615     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3616   search_ofs = 0;
3617   live = BITMAP_ALLOC (&reg_obstack);
3618 
3619   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3620     {
3621       bool past_end = false;
3622       int pos;
3623 
3624       rtl_profile_for_bb (bb);
3625 
3626       /* Start up propagation.  */
3627       bitmap_copy (live, DF_LR_IN (bb));
3628       df_simulate_initialize_forwards (bb, live);
3629       peep2_reinit_state (live);
3630 
3631       insn = BB_HEAD (bb);
3632       for (;;)
3633 	{
3634 	  rtx_insn *attempt, *head;
3635 	  int match_len;
3636 
3637 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3638 	    {
3639 	    next_insn:
3640 	      insn = NEXT_INSN (insn);
3641 	      if (insn == NEXT_INSN (BB_END (bb)))
3642 		past_end = true;
3643 	      continue;
3644 	    }
3645 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3646 	    goto next_insn;
3647 
3648 	  /* If we did not fill an empty buffer, it signals the end of the
3649 	     block.  */
3650 	  if (peep2_current_count == 0)
3651 	    break;
3652 
3653 	  /* The buffer filled to the current maximum, so try to match.  */
3654 
3655 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3656 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3657 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3658 
3659 	  /* Match the peephole.  */
3660 	  head = peep2_insn_data[peep2_current].insn;
3661 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3662 	  if (attempt != NULL)
3663 	    {
3664 	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3665 	      if (last)
3666 		{
3667 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3668 		  continue;
3669 		}
3670 	    }
3671 
3672 	  /* No match: advance the buffer by one insn.  */
3673 	  peep2_current = peep2_buf_position (peep2_current + 1);
3674 	  peep2_current_count--;
3675 	}
3676     }
3677 
3678   default_rtl_profile ();
3679   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3680     BITMAP_FREE (peep2_insn_data[i].live_before);
3681   BITMAP_FREE (live);
3682   if (peep2_do_rebuild_jump_labels)
3683     rebuild_jump_labels (get_insns ());
3684   if (peep2_do_cleanup_cfg)
3685     cleanup_cfg (CLEANUP_CFG_CHANGED);
3686 }
3687 
3688 /* Common predicates for use with define_bypass.  */
3689 
3690 /* Helper function for store_data_bypass_p, handle just a single SET
3691    IN_SET.  */
3692 
3693 static bool
store_data_bypass_p_1(rtx_insn * out_insn,rtx in_set)3694 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3695 {
3696   if (!MEM_P (SET_DEST (in_set)))
3697     return false;
3698 
3699   rtx out_set = single_set (out_insn);
3700   if (out_set)
3701     return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3702 
3703   rtx out_pat = PATTERN (out_insn);
3704   if (GET_CODE (out_pat) != PARALLEL)
3705     return false;
3706 
3707   for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3708     {
3709       rtx out_exp = XVECEXP (out_pat, 0, i);
3710 
3711       if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3712 	continue;
3713 
3714       gcc_assert (GET_CODE (out_exp) == SET);
3715 
3716       if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3717 	return false;
3718     }
3719 
3720   return true;
3721 }
3722 
3723 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3724    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3725    must be either a single_set or a PARALLEL with SETs inside.  */
3726 
3727 int
store_data_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3728 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3729 {
3730   rtx in_set = single_set (in_insn);
3731   if (in_set)
3732     return store_data_bypass_p_1 (out_insn, in_set);
3733 
3734   rtx in_pat = PATTERN (in_insn);
3735   if (GET_CODE (in_pat) != PARALLEL)
3736     return false;
3737 
3738   for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3739     {
3740       rtx in_exp = XVECEXP (in_pat, 0, i);
3741 
3742       if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3743 	continue;
3744 
3745       gcc_assert (GET_CODE (in_exp) == SET);
3746 
3747       if (!store_data_bypass_p_1 (out_insn, in_exp))
3748 	return false;
3749     }
3750 
3751   return true;
3752 }
3753 
3754 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3755    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3756    or multiple set; IN_INSN should be single_set for truth, but for convenience
3757    of insn categorization may be any JUMP or CALL insn.  */
3758 
3759 int
if_test_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3760 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3761 {
3762   rtx out_set, in_set;
3763 
3764   in_set = single_set (in_insn);
3765   if (! in_set)
3766     {
3767       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3768       return false;
3769     }
3770 
3771   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3772     return false;
3773   in_set = SET_SRC (in_set);
3774 
3775   out_set = single_set (out_insn);
3776   if (out_set)
3777     {
3778       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3779 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3780 	return false;
3781     }
3782   else
3783     {
3784       rtx out_pat;
3785       int i;
3786 
3787       out_pat = PATTERN (out_insn);
3788       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3789 
3790       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3791 	{
3792 	  rtx exp = XVECEXP (out_pat, 0, i);
3793 
3794 	  if (GET_CODE (exp) == CLOBBER)
3795 	    continue;
3796 
3797 	  gcc_assert (GET_CODE (exp) == SET);
3798 
3799 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3800 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3801 	    return false;
3802 	}
3803     }
3804 
3805   return true;
3806 }
3807 
3808 static unsigned int
rest_of_handle_peephole2(void)3809 rest_of_handle_peephole2 (void)
3810 {
3811   if (HAVE_peephole2)
3812     peephole2_optimize ();
3813 
3814   return 0;
3815 }
3816 
3817 namespace {
3818 
3819 const pass_data pass_data_peephole2 =
3820 {
3821   RTL_PASS, /* type */
3822   "peephole2", /* name */
3823   OPTGROUP_NONE, /* optinfo_flags */
3824   TV_PEEPHOLE2, /* tv_id */
3825   0, /* properties_required */
3826   0, /* properties_provided */
3827   0, /* properties_destroyed */
3828   0, /* todo_flags_start */
3829   TODO_df_finish, /* todo_flags_finish */
3830 };
3831 
3832 class pass_peephole2 : public rtl_opt_pass
3833 {
3834 public:
pass_peephole2(gcc::context * ctxt)3835   pass_peephole2 (gcc::context *ctxt)
3836     : rtl_opt_pass (pass_data_peephole2, ctxt)
3837   {}
3838 
3839   /* opt_pass methods: */
3840   /* The epiphany backend creates a second instance of this pass, so we need
3841      a clone method.  */
clone()3842   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
gate(function *)3843   virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
execute(function *)3844   virtual unsigned int execute (function *)
3845     {
3846       return rest_of_handle_peephole2 ();
3847     }
3848 
3849 }; // class pass_peephole2
3850 
3851 } // anon namespace
3852 
3853 rtl_opt_pass *
make_pass_peephole2(gcc::context * ctxt)3854 make_pass_peephole2 (gcc::context *ctxt)
3855 {
3856   return new pass_peephole2 (ctxt);
3857 }
3858 
3859 namespace {
3860 
3861 const pass_data pass_data_split_all_insns =
3862 {
3863   RTL_PASS, /* type */
3864   "split1", /* name */
3865   OPTGROUP_NONE, /* optinfo_flags */
3866   TV_NONE, /* tv_id */
3867   0, /* properties_required */
3868   PROP_rtl_split_insns, /* properties_provided */
3869   0, /* properties_destroyed */
3870   0, /* todo_flags_start */
3871   0, /* todo_flags_finish */
3872 };
3873 
3874 class pass_split_all_insns : public rtl_opt_pass
3875 {
3876 public:
pass_split_all_insns(gcc::context * ctxt)3877   pass_split_all_insns (gcc::context *ctxt)
3878     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3879   {}
3880 
3881   /* opt_pass methods: */
3882   /* The epiphany backend creates a second instance of this pass, so
3883      we need a clone method.  */
clone()3884   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
execute(function *)3885   virtual unsigned int execute (function *)
3886     {
3887       split_all_insns ();
3888       return 0;
3889     }
3890 
3891 }; // class pass_split_all_insns
3892 
3893 } // anon namespace
3894 
3895 rtl_opt_pass *
make_pass_split_all_insns(gcc::context * ctxt)3896 make_pass_split_all_insns (gcc::context *ctxt)
3897 {
3898   return new pass_split_all_insns (ctxt);
3899 }
3900 
3901 namespace {
3902 
3903 const pass_data pass_data_split_after_reload =
3904 {
3905   RTL_PASS, /* type */
3906   "split2", /* name */
3907   OPTGROUP_NONE, /* optinfo_flags */
3908   TV_NONE, /* tv_id */
3909   0, /* properties_required */
3910   0, /* properties_provided */
3911   0, /* properties_destroyed */
3912   0, /* todo_flags_start */
3913   0, /* todo_flags_finish */
3914 };
3915 
3916 class pass_split_after_reload : public rtl_opt_pass
3917 {
3918 public:
pass_split_after_reload(gcc::context * ctxt)3919   pass_split_after_reload (gcc::context *ctxt)
3920     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3921   {}
3922 
3923   /* opt_pass methods: */
gate(function *)3924   virtual bool gate (function *)
3925     {
3926       /* If optimizing, then go ahead and split insns now.  */
3927       return optimize > 0;
3928     }
3929 
execute(function *)3930   virtual unsigned int execute (function *)
3931     {
3932       split_all_insns ();
3933       return 0;
3934     }
3935 
3936 }; // class pass_split_after_reload
3937 
3938 } // anon namespace
3939 
3940 rtl_opt_pass *
make_pass_split_after_reload(gcc::context * ctxt)3941 make_pass_split_after_reload (gcc::context *ctxt)
3942 {
3943   return new pass_split_after_reload (ctxt);
3944 }
3945 
3946 static bool
enable_split_before_sched2(void)3947 enable_split_before_sched2 (void)
3948 {
3949 #ifdef INSN_SCHEDULING
3950   return optimize > 0 && flag_schedule_insns_after_reload;
3951 #else
3952   return false;
3953 #endif
3954 }
3955 
3956 namespace {
3957 
3958 const pass_data pass_data_split_before_sched2 =
3959 {
3960   RTL_PASS, /* type */
3961   "split3", /* name */
3962   OPTGROUP_NONE, /* optinfo_flags */
3963   TV_NONE, /* tv_id */
3964   0, /* properties_required */
3965   0, /* properties_provided */
3966   0, /* properties_destroyed */
3967   0, /* todo_flags_start */
3968   0, /* todo_flags_finish */
3969 };
3970 
3971 class pass_split_before_sched2 : public rtl_opt_pass
3972 {
3973 public:
pass_split_before_sched2(gcc::context * ctxt)3974   pass_split_before_sched2 (gcc::context *ctxt)
3975     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
3976   {}
3977 
3978   /* opt_pass methods: */
gate(function *)3979   virtual bool gate (function *)
3980     {
3981       return enable_split_before_sched2 ();
3982     }
3983 
execute(function *)3984   virtual unsigned int execute (function *)
3985     {
3986       split_all_insns ();
3987       return 0;
3988     }
3989 
3990 }; // class pass_split_before_sched2
3991 
3992 } // anon namespace
3993 
3994 rtl_opt_pass *
make_pass_split_before_sched2(gcc::context * ctxt)3995 make_pass_split_before_sched2 (gcc::context *ctxt)
3996 {
3997   return new pass_split_before_sched2 (ctxt);
3998 }
3999 
4000 namespace {
4001 
4002 const pass_data pass_data_split_before_regstack =
4003 {
4004   RTL_PASS, /* type */
4005   "split4", /* name */
4006   OPTGROUP_NONE, /* optinfo_flags */
4007   TV_NONE, /* tv_id */
4008   0, /* properties_required */
4009   0, /* properties_provided */
4010   0, /* properties_destroyed */
4011   0, /* todo_flags_start */
4012   0, /* todo_flags_finish */
4013 };
4014 
4015 class pass_split_before_regstack : public rtl_opt_pass
4016 {
4017 public:
pass_split_before_regstack(gcc::context * ctxt)4018   pass_split_before_regstack (gcc::context *ctxt)
4019     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4020   {}
4021 
4022   /* opt_pass methods: */
4023   virtual bool gate (function *);
execute(function *)4024   virtual unsigned int execute (function *)
4025     {
4026       split_all_insns ();
4027       return 0;
4028     }
4029 
4030 }; // class pass_split_before_regstack
4031 
4032 bool
gate(function *)4033 pass_split_before_regstack::gate (function *)
4034 {
4035 #if HAVE_ATTR_length && defined (STACK_REGS)
4036   /* If flow2 creates new instructions which need splitting
4037      and scheduling after reload is not done, they might not be
4038      split until final which doesn't allow splitting
4039      if HAVE_ATTR_length.  */
4040   return !enable_split_before_sched2 ();
4041 #else
4042   return false;
4043 #endif
4044 }
4045 
4046 } // anon namespace
4047 
4048 rtl_opt_pass *
make_pass_split_before_regstack(gcc::context * ctxt)4049 make_pass_split_before_regstack (gcc::context *ctxt)
4050 {
4051   return new pass_split_before_regstack (ctxt);
4052 }
4053 
4054 namespace {
4055 
4056 const pass_data pass_data_split_for_shorten_branches =
4057 {
4058   RTL_PASS, /* type */
4059   "split5", /* name */
4060   OPTGROUP_NONE, /* optinfo_flags */
4061   TV_NONE, /* tv_id */
4062   0, /* properties_required */
4063   0, /* properties_provided */
4064   0, /* properties_destroyed */
4065   0, /* todo_flags_start */
4066   0, /* todo_flags_finish */
4067 };
4068 
4069 class pass_split_for_shorten_branches : public rtl_opt_pass
4070 {
4071 public:
pass_split_for_shorten_branches(gcc::context * ctxt)4072   pass_split_for_shorten_branches (gcc::context *ctxt)
4073     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4074   {}
4075 
4076   /* opt_pass methods: */
gate(function *)4077   virtual bool gate (function *)
4078     {
4079       /* The placement of the splitting that we do for shorten_branches
4080 	 depends on whether regstack is used by the target or not.  */
4081 #if HAVE_ATTR_length && !defined (STACK_REGS)
4082       return true;
4083 #else
4084       return false;
4085 #endif
4086     }
4087 
execute(function *)4088   virtual unsigned int execute (function *)
4089     {
4090       return split_all_insns_noflow ();
4091     }
4092 
4093 }; // class pass_split_for_shorten_branches
4094 
4095 } // anon namespace
4096 
4097 rtl_opt_pass *
make_pass_split_for_shorten_branches(gcc::context * ctxt)4098 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4099 {
4100   return new pass_split_for_shorten_branches (ctxt);
4101 }
4102 
4103 /* (Re)initialize the target information after a change in target.  */
4104 
4105 void
recog_init()4106 recog_init ()
4107 {
4108   /* The information is zero-initialized, so we don't need to do anything
4109      first time round.  */
4110   if (!this_target_recog->x_initialized)
4111     {
4112       this_target_recog->x_initialized = true;
4113       return;
4114     }
4115   memset (this_target_recog->x_bool_attr_masks, 0,
4116 	  sizeof (this_target_recog->x_bool_attr_masks));
4117   for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4118     if (this_target_recog->x_op_alt[i])
4119       {
4120 	free (this_target_recog->x_op_alt[i]);
4121 	this_target_recog->x_op_alt[i] = 0;
4122       }
4123 }
4124