1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
44 
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
52 
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
56 
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
61 
62 /* Nonzero means allow operands to be volatile.
63    This should be 0 if you are generating rtl, such as if you are calling
64    the functions in optabs.c and expmed.c (most of the time).
65    This should be 1 if all valid insns need to be recognized,
66    such as in reginfo.c and final.c and reload.c.
67 
68    init_recog and init_recog_no_volatile are responsible for setting this.  */
69 
70 int volatile_ok;
71 
72 struct recog_data_d recog_data;
73 
74 /* Contains a vector of operand_alternative structures, such that
75    operand OP of alternative A is at index A * n_operands + OP.
76    Set up by preprocess_constraints.  */
77 const operand_alternative *recog_op_alt;
78 
79 /* Used to provide recog_op_alt for asms.  */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 				      * MAX_RECOG_ALTERNATIVES];
82 
83 /* On return from `constrain_operands', indicate which alternative
84    was satisfied.  */
85 
86 int which_alternative;
87 
88 /* Nonzero after end of reload pass.
89    Set to 1 or 0 by toplev.c.
90    Controls the significance of (SUBREG (MEM)).  */
91 
92 int reload_completed;
93 
94 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
95 int epilogue_completed;
96 
97 /* Initialize data used by the function `recog'.
98    This must be called once in the compilation of a function
99    before any insn recognition may be done in the function.  */
100 
101 void
init_recog_no_volatile(void)102 init_recog_no_volatile (void)
103 {
104   volatile_ok = 0;
105 }
106 
107 void
init_recog(void)108 init_recog (void)
109 {
110   volatile_ok = 1;
111 }
112 
113 
114 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
115 
116 static bool
asm_labels_ok(rtx body)117 asm_labels_ok (rtx body)
118 {
119   rtx asmop;
120   int i;
121 
122   asmop = extract_asm_operands (body);
123   if (asmop == NULL_RTX)
124     return true;
125 
126   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128       return false;
129 
130   return true;
131 }
132 
133 /* Check that X is an insn-body for an `asm' with operands
134    and that the operands mentioned in it are legitimate.  */
135 
136 int
check_asm_operands(rtx x)137 check_asm_operands (rtx x)
138 {
139   int noperands;
140   rtx *operands;
141   const char **constraints;
142   int i;
143 
144   if (!asm_labels_ok (x))
145     return 0;
146 
147   /* Post-reload, be more strict with things.  */
148   if (reload_completed)
149     {
150       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
151       rtx_insn *insn = make_insn_raw (x);
152       extract_insn (insn);
153       constrain_operands (1, get_enabled_alternatives (insn));
154       return which_alternative >= 0;
155     }
156 
157   noperands = asm_noperands (x);
158   if (noperands < 0)
159     return 0;
160   if (noperands == 0)
161     return 1;
162 
163   operands = XALLOCAVEC (rtx, noperands);
164   constraints = XALLOCAVEC (const char *, noperands);
165 
166   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167 
168   for (i = 0; i < noperands; i++)
169     {
170       const char *c = constraints[i];
171       if (c[0] == '%')
172 	c++;
173       if (! asm_operand_ok (operands[i], c, constraints))
174 	return 0;
175     }
176 
177   return 1;
178 }
179 
180 /* Static data for the next two routines.  */
181 
182 struct change_t
183 {
184   rtx object;
185   int old_code;
186   int old_len;
187   bool unshare;
188   rtx *loc;
189   rtx old;
190 };
191 
192 static change_t *changes;
193 static int changes_allocated;
194 
195 static int num_changes = 0;
196 static int temporarily_undone_changes = 0;
197 
198 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
199    at which NEW_RTX will be placed.  If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200    will also be changed to NEW_LEN, which is no greater than the current
201    XVECLEN.  If OBJECT is zero, no validation is done, the change is
202    simply made.
203 
204    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
205    will be called with the address and mode as parameters.  If OBJECT is
206    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
207    the change in place.
208 
209    IN_GROUP is nonzero if this is part of a group of changes that must be
210    performed as a group.  In that case, the changes will be stored.  The
211    function `apply_change_group' will validate and apply the changes.
212 
213    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
214    or validate the memory reference with the change applied.  If the result
215    is not valid for the machine, suppress the change and return zero.
216    Otherwise, perform the change and return 1.  */
217 
218 static bool
219 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
220 		   bool unshare, int new_len = -1)
221 {
222   gcc_assert (temporarily_undone_changes == 0);
223   rtx old = *loc;
224 
225   /* Single-element parallels aren't valid and won't match anything.
226      Replace them with the single element.  */
227   if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
228     {
229       new_rtx = XVECEXP (new_rtx, 0, 0);
230       new_len = -1;
231     }
232 
233   if ((old == new_rtx || rtx_equal_p (old, new_rtx))
234       && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
235     return 1;
236 
237   gcc_assert ((in_group != 0 || num_changes == 0)
238 	      && (new_len < 0 || new_rtx == *loc));
239 
240   *loc = new_rtx;
241 
242   /* Save the information describing this change.  */
243   if (num_changes >= changes_allocated)
244     {
245       if (changes_allocated == 0)
246 	/* This value allows for repeated substitutions inside complex
247 	   indexed addresses, or changes in up to 5 insns.  */
248 	changes_allocated = MAX_RECOG_OPERANDS * 5;
249       else
250 	changes_allocated *= 2;
251 
252       changes = XRESIZEVEC (change_t, changes, changes_allocated);
253     }
254 
255   changes[num_changes].object = object;
256   changes[num_changes].loc = loc;
257   changes[num_changes].old = old;
258   changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
259   changes[num_changes].unshare = unshare;
260 
261   if (new_len >= 0)
262     XVECLEN (new_rtx, 0) = new_len;
263 
264   if (object && !MEM_P (object))
265     {
266       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
267 	 case invalid.  */
268       changes[num_changes].old_code = INSN_CODE (object);
269       INSN_CODE (object) = -1;
270     }
271 
272   num_changes++;
273 
274   /* If we are making a group of changes, return 1.  Otherwise, validate the
275      change group we made.  */
276 
277   if (in_group)
278     return 1;
279   else
280     return apply_change_group ();
281 }
282 
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284    UNSHARE to false.  */
285 
286 bool
validate_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)287 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
288 {
289   return validate_change_1 (object, loc, new_rtx, in_group, false);
290 }
291 
292 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
293    UNSHARE to true.  */
294 
295 bool
validate_unshare_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)296 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
297 {
298   return validate_change_1 (object, loc, new_rtx, in_group, true);
299 }
300 
301 /* Change XVECLEN (*LOC, 0) to NEW_LEN.  OBJECT, IN_GROUP and the return
302    value are as for validate_change_1.  */
303 
304 bool
validate_change_xveclen(rtx object,rtx * loc,int new_len,bool in_group)305 validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
306 {
307   return validate_change_1 (object, loc, *loc, in_group, false, new_len);
308 }
309 
310 /* Keep X canonicalized if some changes have made it non-canonical; only
311    modifies the operands of X, not (for example) its code.  Simplifications
312    are not the job of this routine.
313 
314    Return true if anything was changed.  */
315 bool
canonicalize_change_group(rtx_insn * insn,rtx x)316 canonicalize_change_group (rtx_insn *insn, rtx x)
317 {
318   if (COMMUTATIVE_P (x)
319       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
320     {
321       /* Oops, the caller has made X no longer canonical.
322 	 Let's redo the changes in the correct order.  */
323       rtx tem = XEXP (x, 0);
324       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
325       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
326       return true;
327     }
328   else
329     return false;
330 }
331 
332 
333 /* This subroutine of apply_change_group verifies whether the changes to INSN
334    were valid; i.e. whether INSN can still be recognized.
335 
336    If IN_GROUP is true clobbers which have to be added in order to
337    match the instructions will be added to the current change group.
338    Otherwise the changes will take effect immediately.  */
339 
340 int
insn_invalid_p(rtx_insn * insn,bool in_group)341 insn_invalid_p (rtx_insn *insn, bool in_group)
342 {
343   rtx pat = PATTERN (insn);
344   int num_clobbers = 0;
345   /* If we are before reload and the pattern is a SET, see if we can add
346      clobbers.  */
347   int icode = recog (pat, insn,
348 		     (GET_CODE (pat) == SET
349 		      && ! reload_completed
350                       && ! reload_in_progress)
351 		     ? &num_clobbers : 0);
352   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
353 
354 
355   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
356      this is not an asm and the insn wasn't recognized.  */
357   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
358       || (!is_asm && icode < 0))
359     return 1;
360 
361   /* If we have to add CLOBBERs, fail if we have to add ones that reference
362      hard registers since our callers can't know if they are live or not.
363      Otherwise, add them.  */
364   if (num_clobbers > 0)
365     {
366       rtx newpat;
367 
368       if (added_clobbers_hard_reg_p (icode))
369 	return 1;
370 
371       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
372       XVECEXP (newpat, 0, 0) = pat;
373       add_clobbers (newpat, icode);
374       if (in_group)
375 	validate_change (insn, &PATTERN (insn), newpat, 1);
376       else
377 	PATTERN (insn) = pat = newpat;
378     }
379 
380   /* After reload, verify that all constraints are satisfied.  */
381   if (reload_completed)
382     {
383       extract_insn (insn);
384 
385       if (! constrain_operands (1, get_preferred_alternatives (insn)))
386 	return 1;
387     }
388 
389   INSN_CODE (insn) = icode;
390   return 0;
391 }
392 
393 /* Return number of changes made and not validated yet.  */
394 int
num_changes_pending(void)395 num_changes_pending (void)
396 {
397   return num_changes;
398 }
399 
400 /* Tentatively apply the changes numbered NUM and up.
401    Return 1 if all changes are valid, zero otherwise.  */
402 
403 int
verify_changes(int num)404 verify_changes (int num)
405 {
406   int i;
407   rtx last_validated = NULL_RTX;
408 
409   /* The changes have been applied and all INSN_CODEs have been reset to force
410      rerecognition.
411 
412      The changes are valid if we aren't given an object, or if we are
413      given a MEM and it still is a valid address, or if this is in insn
414      and it is recognized.  In the latter case, if reload has completed,
415      we also require that the operands meet the constraints for
416      the insn.  */
417 
418   for (i = num; i < num_changes; i++)
419     {
420       rtx object = changes[i].object;
421 
422       /* If there is no object to test or if it is the same as the one we
423          already tested, ignore it.  */
424       if (object == 0 || object == last_validated)
425 	continue;
426 
427       if (MEM_P (object))
428 	{
429 	  if (! memory_address_addr_space_p (GET_MODE (object),
430 					     XEXP (object, 0),
431 					     MEM_ADDR_SPACE (object)))
432 	    break;
433 	}
434       else if (/* changes[i].old might be zero, e.g. when putting a
435 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
436 	       changes[i].old
437 	       && REG_P (changes[i].old)
438 	       && asm_noperands (PATTERN (object)) > 0
439 	       && register_asm_p (changes[i].old))
440 	{
441 	  /* Don't allow changes of hard register operands to inline
442 	     assemblies if they have been defined as register asm ("x").  */
443 	  break;
444 	}
445       else if (DEBUG_INSN_P (object))
446 	continue;
447       else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
448 	{
449 	  rtx pat = PATTERN (object);
450 
451 	  /* Perhaps we couldn't recognize the insn because there were
452 	     extra CLOBBERs at the end.  If so, try to re-recognize
453 	     without the last CLOBBER (later iterations will cause each of
454 	     them to be eliminated, in turn).  But don't do this if we
455 	     have an ASM_OPERAND.  */
456 	  if (GET_CODE (pat) == PARALLEL
457 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
458 	      && asm_noperands (PATTERN (object)) < 0)
459 	    {
460 	      rtx newpat;
461 
462 	      if (XVECLEN (pat, 0) == 2)
463 		newpat = XVECEXP (pat, 0, 0);
464 	      else
465 		{
466 		  int j;
467 
468 		  newpat
469 		    = gen_rtx_PARALLEL (VOIDmode,
470 					rtvec_alloc (XVECLEN (pat, 0) - 1));
471 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
472 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
473 		}
474 
475 	      /* Add a new change to this group to replace the pattern
476 		 with this new pattern.  Then consider this change
477 		 as having succeeded.  The change we added will
478 		 cause the entire call to fail if things remain invalid.
479 
480 		 Note that this can lose if a later change than the one
481 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
482 		 but this shouldn't occur.  */
483 
484 	      validate_change (object, &PATTERN (object), newpat, 1);
485 	      continue;
486 	    }
487 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
488 		   || GET_CODE (pat) == VAR_LOCATION)
489 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
490 	       never recognized.  */
491 	    continue;
492 	  else
493 	    break;
494 	}
495       last_validated = object;
496     }
497 
498   return (i == num_changes);
499 }
500 
501 /* A group of changes has previously been issued with validate_change
502    and verified with verify_changes.  Call df_insn_rescan for each of
503    the insn changed and clear num_changes.  */
504 
505 void
confirm_change_group(void)506 confirm_change_group (void)
507 {
508   int i;
509   rtx last_object = NULL;
510 
511   gcc_assert (temporarily_undone_changes == 0);
512   for (i = 0; i < num_changes; i++)
513     {
514       rtx object = changes[i].object;
515 
516       if (changes[i].unshare)
517 	*changes[i].loc = copy_rtx (*changes[i].loc);
518 
519       /* Avoid unnecessary rescanning when multiple changes to same instruction
520          are made.  */
521       if (object)
522 	{
523 	  if (object != last_object && last_object && INSN_P (last_object))
524 	    df_insn_rescan (as_a <rtx_insn *> (last_object));
525 	  last_object = object;
526 	}
527     }
528 
529   if (last_object && INSN_P (last_object))
530     df_insn_rescan (as_a <rtx_insn *> (last_object));
531   num_changes = 0;
532 }
533 
534 /* Apply a group of changes previously issued with `validate_change'.
535    If all changes are valid, call confirm_change_group and return 1,
536    otherwise, call cancel_changes and return 0.  */
537 
538 int
apply_change_group(void)539 apply_change_group (void)
540 {
541   if (verify_changes (0))
542     {
543       confirm_change_group ();
544       return 1;
545     }
546   else
547     {
548       cancel_changes (0);
549       return 0;
550     }
551 }
552 
553 
554 /* Return the number of changes so far in the current group.  */
555 
556 int
num_validated_changes(void)557 num_validated_changes (void)
558 {
559   return num_changes;
560 }
561 
562 /* Retract the changes numbered NUM and up.  */
563 
564 void
cancel_changes(int num)565 cancel_changes (int num)
566 {
567   gcc_assert (temporarily_undone_changes == 0);
568   int i;
569 
570   /* Back out all the changes.  Do this in the opposite order in which
571      they were made.  */
572   for (i = num_changes - 1; i >= num; i--)
573     {
574       if (changes[i].old_len >= 0)
575 	XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
576       else
577 	*changes[i].loc = changes[i].old;
578       if (changes[i].object && !MEM_P (changes[i].object))
579 	INSN_CODE (changes[i].object) = changes[i].old_code;
580     }
581   num_changes = num;
582 }
583 
584 /* Swap the status of change NUM from being applied to not being applied,
585    or vice versa.  */
586 
587 static void
swap_change(int num)588 swap_change (int num)
589 {
590   if (changes[num].old_len >= 0)
591     std::swap (XVECLEN (*changes[num].loc, 0), changes[num].old_len);
592   else
593     std::swap (*changes[num].loc, changes[num].old);
594   if (changes[num].object && !MEM_P (changes[num].object))
595     std::swap (INSN_CODE (changes[num].object), changes[num].old_code);
596 }
597 
598 /* Temporarily undo all the changes numbered NUM and up, with a view
599    to reapplying them later.  The next call to the changes machinery
600    must be:
601 
602       redo_changes (NUM)
603 
604    otherwise things will end up in an invalid state.  */
605 
606 void
temporarily_undo_changes(int num)607 temporarily_undo_changes (int num)
608 {
609   gcc_assert (temporarily_undone_changes == 0 && num <= num_changes);
610   for (int i = num_changes - 1; i >= num; i--)
611     swap_change (i);
612   temporarily_undone_changes = num_changes - num;
613 }
614 
615 /* Redo the changes that were temporarily undone by:
616 
617       temporarily_undo_changes (NUM).  */
618 
619 void
redo_changes(int num)620 redo_changes (int num)
621 {
622   gcc_assert (temporarily_undone_changes == num_changes - num);
623   for (int i = num; i < num_changes; ++i)
624     swap_change (i);
625   temporarily_undone_changes = 0;
626 }
627 
628 /* Reduce conditional compilation elsewhere.  */
629 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
630    rtx.  */
631 
632 static void
simplify_while_replacing(rtx * loc,rtx to,rtx_insn * object,machine_mode op0_mode)633 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
634                           machine_mode op0_mode)
635 {
636   rtx x = *loc;
637   enum rtx_code code = GET_CODE (x);
638   rtx new_rtx = NULL_RTX;
639   scalar_int_mode is_mode;
640 
641   if (SWAPPABLE_OPERANDS_P (x)
642       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
643     {
644       validate_unshare_change (object, loc,
645 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
646 					       : swap_condition (code),
647 					       GET_MODE (x), XEXP (x, 1),
648 					       XEXP (x, 0)), 1);
649       x = *loc;
650       code = GET_CODE (x);
651     }
652 
653   /* Canonicalize arithmetics with all constant operands.  */
654   switch (GET_RTX_CLASS (code))
655     {
656     case RTX_UNARY:
657       if (CONSTANT_P (XEXP (x, 0)))
658 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
659 					    op0_mode);
660       break;
661     case RTX_COMM_ARITH:
662     case RTX_BIN_ARITH:
663       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
664 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
665 					     XEXP (x, 1));
666       break;
667     case RTX_COMPARE:
668     case RTX_COMM_COMPARE:
669       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
670 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
671 						 XEXP (x, 0), XEXP (x, 1));
672       break;
673     default:
674       break;
675     }
676   if (new_rtx)
677     {
678       validate_change (object, loc, new_rtx, 1);
679       return;
680     }
681 
682   switch (code)
683     {
684     case PLUS:
685       /* If we have a PLUS whose second operand is now a CONST_INT, use
686          simplify_gen_binary to try to simplify it.
687          ??? We may want later to remove this, once simplification is
688          separated from this function.  */
689       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
690 	validate_change (object, loc,
691 			 simplify_gen_binary
692 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
693       break;
694     case MINUS:
695       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
696 	validate_change (object, loc,
697 			 simplify_gen_binary
698 			 (PLUS, GET_MODE (x), XEXP (x, 0),
699 			  simplify_gen_unary (NEG,
700 					      GET_MODE (x), XEXP (x, 1),
701 					      GET_MODE (x))), 1);
702       break;
703     case ZERO_EXTEND:
704     case SIGN_EXTEND:
705       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
706 	{
707 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
708 				    op0_mode);
709 	  /* If any of the above failed, substitute in something that
710 	     we know won't be recognized.  */
711 	  if (!new_rtx)
712 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
713 	  validate_change (object, loc, new_rtx, 1);
714 	}
715       break;
716     case SUBREG:
717       /* All subregs possible to simplify should be simplified.  */
718       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
719 			     SUBREG_BYTE (x));
720 
721       /* Subregs of VOIDmode operands are incorrect.  */
722       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
723 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
724       if (new_rtx)
725 	validate_change (object, loc, new_rtx, 1);
726       break;
727     case ZERO_EXTRACT:
728     case SIGN_EXTRACT:
729       /* If we are replacing a register with memory, try to change the memory
730          to be the mode required for memory in extract operations (this isn't
731          likely to be an insertion operation; if it was, nothing bad will
732          happen, we might just fail in some cases).  */
733 
734       if (MEM_P (XEXP (x, 0))
735 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
736 	  && CONST_INT_P (XEXP (x, 1))
737 	  && CONST_INT_P (XEXP (x, 2))
738 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
739 					MEM_ADDR_SPACE (XEXP (x, 0)))
740 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
741 	{
742 	  int pos = INTVAL (XEXP (x, 2));
743 	  machine_mode new_mode = is_mode;
744 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
745 	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
746 	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
747 	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
748 	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
749 					 ? word_mode
750 					 : as_a <scalar_int_mode> (new_mode));
751 
752 	  /* If we have a narrower mode, we can do something.  */
753 	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
754 	    {
755 	      int offset = pos / BITS_PER_UNIT;
756 	      rtx newmem;
757 
758 	      /* If the bytes and bits are counted differently, we
759 	         must adjust the offset.  */
760 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
761 		offset =
762 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
763 		   offset);
764 
765 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
766 			  == GET_MODE_BITSIZE (wanted_mode));
767 	      pos %= GET_MODE_BITSIZE (wanted_mode);
768 
769 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
770 
771 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
772 	      validate_change (object, &XEXP (x, 0), newmem, 1);
773 	    }
774 	}
775 
776       break;
777 
778     default:
779       break;
780     }
781 }
782 
783 /* Replace every occurrence of FROM in X with TO.  Mark each change with
784    validate_change passing OBJECT.  */
785 
786 static void
validate_replace_rtx_1(rtx * loc,rtx from,rtx to,rtx_insn * object,bool simplify)787 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
788                         bool simplify)
789 {
790   int i, j;
791   const char *fmt;
792   rtx x = *loc;
793   enum rtx_code code;
794   machine_mode op0_mode = VOIDmode;
795   int prev_changes = num_changes;
796 
797   if (!x)
798     return;
799 
800   code = GET_CODE (x);
801   fmt = GET_RTX_FORMAT (code);
802   if (fmt[0] == 'e')
803     op0_mode = GET_MODE (XEXP (x, 0));
804 
805   /* X matches FROM if it is the same rtx or they are both referring to the
806      same register in the same mode.  Avoid calling rtx_equal_p unless the
807      operands look similar.  */
808 
809   if (x == from
810       || (REG_P (x) && REG_P (from)
811 	  && GET_MODE (x) == GET_MODE (from)
812 	  && REGNO (x) == REGNO (from))
813       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
814 	  && rtx_equal_p (x, from)))
815     {
816       validate_unshare_change (object, loc, to, 1);
817       return;
818     }
819 
820   /* Call ourself recursively to perform the replacements.
821      We must not replace inside already replaced expression, otherwise we
822      get infinite recursion for replacements like (reg X)->(subreg (reg X))
823      so we must special case shared ASM_OPERANDS.  */
824 
825   if (GET_CODE (x) == PARALLEL)
826     {
827       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
828 	{
829 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
830 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
831 	    {
832 	      /* Verify that operands are really shared.  */
833 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
834 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
835 							      (x, 0, j))));
836 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
837 				      from, to, object, simplify);
838 	    }
839 	  else
840 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
841                                     simplify);
842 	}
843     }
844   else
845     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
846       {
847 	if (fmt[i] == 'e')
848 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
849 	else if (fmt[i] == 'E')
850 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
851 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
852                                     simplify);
853       }
854 
855   /* If we didn't substitute, there is nothing more to do.  */
856   if (num_changes == prev_changes)
857     return;
858 
859   /* ??? The regmove is no more, so is this aberration still necessary?  */
860   /* Allow substituted expression to have different mode.  This is used by
861      regmove to change mode of pseudo register.  */
862   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
863     op0_mode = GET_MODE (XEXP (x, 0));
864 
865   /* Do changes needed to keep rtx consistent.  Don't do any other
866      simplifications, as it is not our job.  */
867   if (simplify)
868     simplify_while_replacing (loc, to, object, op0_mode);
869 }
870 
871 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
872    with TO.  After all changes have been made, validate by seeing
873    if INSN is still valid.  */
874 
875 int
validate_replace_rtx_subexp(rtx from,rtx to,rtx_insn * insn,rtx * loc)876 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
877 {
878   validate_replace_rtx_1 (loc, from, to, insn, true);
879   return apply_change_group ();
880 }
881 
882 /* Try replacing every occurrence of FROM in INSN with TO.  After all
883    changes have been made, validate by seeing if INSN is still valid.  */
884 
885 int
validate_replace_rtx(rtx from,rtx to,rtx_insn * insn)886 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
887 {
888   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
889   return apply_change_group ();
890 }
891 
892 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
893    is a part of INSN.  After all changes have been made, validate by seeing if
894    INSN is still valid.
895    validate_replace_rtx (from, to, insn) is equivalent to
896    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
897 
898 int
validate_replace_rtx_part(rtx from,rtx to,rtx * where,rtx_insn * insn)899 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
900 {
901   validate_replace_rtx_1 (where, from, to, insn, true);
902   return apply_change_group ();
903 }
904 
905 /* Same as above, but do not simplify rtx afterwards.  */
906 int
validate_replace_rtx_part_nosimplify(rtx from,rtx to,rtx * where,rtx_insn * insn)907 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
908 				      rtx_insn *insn)
909 {
910   validate_replace_rtx_1 (where, from, to, insn, false);
911   return apply_change_group ();
912 
913 }
914 
915 /* Try replacing every occurrence of FROM in INSN with TO.  This also
916    will replace in REG_EQUAL and REG_EQUIV notes.  */
917 
918 void
validate_replace_rtx_group(rtx from,rtx to,rtx_insn * insn)919 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
920 {
921   rtx note;
922   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
923   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
924     if (REG_NOTE_KIND (note) == REG_EQUAL
925 	|| REG_NOTE_KIND (note) == REG_EQUIV)
926       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
927 }
928 
929 /* Function called by note_uses to replace used subexpressions.  */
930 struct validate_replace_src_data
931 {
932   rtx from;			/* Old RTX */
933   rtx to;			/* New RTX */
934   rtx_insn *insn;			/* Insn in which substitution is occurring.  */
935 };
936 
937 static void
validate_replace_src_1(rtx * x,void * data)938 validate_replace_src_1 (rtx *x, void *data)
939 {
940   struct validate_replace_src_data *d
941     = (struct validate_replace_src_data *) data;
942 
943   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
944 }
945 
946 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
947    SET_DESTs.  */
948 
949 void
validate_replace_src_group(rtx from,rtx to,rtx_insn * insn)950 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
951 {
952   struct validate_replace_src_data d;
953 
954   d.from = from;
955   d.to = to;
956   d.insn = insn;
957   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
958 }
959 
960 /* Try simplify INSN.
961    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
962    pattern and return true if something was simplified.  */
963 
964 bool
validate_simplify_insn(rtx_insn * insn)965 validate_simplify_insn (rtx_insn *insn)
966 {
967   int i;
968   rtx pat = NULL;
969   rtx newpat = NULL;
970 
971   pat = PATTERN (insn);
972 
973   if (GET_CODE (pat) == SET)
974     {
975       newpat = simplify_rtx (SET_SRC (pat));
976       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
977 	validate_change (insn, &SET_SRC (pat), newpat, 1);
978       newpat = simplify_rtx (SET_DEST (pat));
979       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
980 	validate_change (insn, &SET_DEST (pat), newpat, 1);
981     }
982   else if (GET_CODE (pat) == PARALLEL)
983     for (i = 0; i < XVECLEN (pat, 0); i++)
984       {
985 	rtx s = XVECEXP (pat, 0, i);
986 
987 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
988 	  {
989 	    newpat = simplify_rtx (SET_SRC (s));
990 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
991 	      validate_change (insn, &SET_SRC (s), newpat, 1);
992 	    newpat = simplify_rtx (SET_DEST (s));
993 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
994 	      validate_change (insn, &SET_DEST (s), newpat, 1);
995 	  }
996       }
997   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
998 }
999 
1000 /* Try to process the address of memory expression MEM.  Return true on
1001    success; leave the caller to clean up on failure.  */
1002 
1003 bool
apply_to_mem_1(rtx mem)1004 insn_propagation::apply_to_mem_1 (rtx mem)
1005 {
1006   auto old_num_changes = num_validated_changes ();
1007   mem_depth += 1;
1008   bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1009   mem_depth -= 1;
1010   if (!res)
1011     return false;
1012 
1013   if (old_num_changes != num_validated_changes ()
1014       && should_check_mems
1015       && !check_mem (old_num_changes, mem))
1016     return false;
1017 
1018   return true;
1019 }
1020 
1021 /* Try to process the rvalue expression at *LOC.  Return true on success;
1022    leave the caller to clean up on failure.  */
1023 
1024 bool
apply_to_rvalue_1(rtx * loc)1025 insn_propagation::apply_to_rvalue_1 (rtx *loc)
1026 {
1027   rtx x = *loc;
1028   enum rtx_code code = GET_CODE (x);
1029   machine_mode mode = GET_MODE (x);
1030 
1031   auto old_num_changes = num_validated_changes ();
1032   if (from && GET_CODE (x) == GET_CODE (from) && rtx_equal_p (x, from))
1033     {
1034       /* Don't replace register asms in asm statements; we mustn't
1035 	 change the user's register allocation.  */
1036       if (REG_P (x)
1037 	  && HARD_REGISTER_P (x)
1038 	  && register_asm_p (x)
1039 	  && asm_noperands (PATTERN (insn)) > 0)
1040 	return false;
1041 
1042       if (should_unshare)
1043 	validate_unshare_change (insn, loc, to, 1);
1044       else
1045 	validate_change (insn, loc, to, 1);
1046       if (mem_depth && !REG_P (to) && !CONSTANT_P (to))
1047 	{
1048 	  /* We're substituting into an address, but TO will have the
1049 	     form expected outside an address.  Canonicalize it if
1050 	     necessary.  */
1051 	  insn_propagation subprop (insn);
1052 	  subprop.mem_depth += 1;
1053 	  if (!subprop.apply_to_rvalue (loc))
1054 	    gcc_unreachable ();
1055 	  if (should_unshare
1056 	      && num_validated_changes () != old_num_changes + 1)
1057 	    {
1058 	      /* TO is owned by someone else, so create a copy and
1059 		 return TO to its original form.  */
1060 	      rtx to = copy_rtx (*loc);
1061 	      cancel_changes (old_num_changes);
1062 	      validate_change (insn, loc, to, 1);
1063 	    }
1064 	}
1065       num_replacements += 1;
1066       should_unshare = true;
1067       result_flags |= UNSIMPLIFIED;
1068       return true;
1069     }
1070 
1071   /* Recursively apply the substitution and see if we can simplify
1072      the result.  This specifically shouldn't use simplify_gen_* for
1073      speculative simplifications, since we want to avoid generating new
1074      expressions where possible.  */
1075   auto old_result_flags = result_flags;
1076   rtx newx = NULL_RTX;
1077   bool recurse_p = false;
1078   switch (GET_RTX_CLASS (code))
1079     {
1080     case RTX_UNARY:
1081       {
1082 	machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1083 	if (!apply_to_rvalue_1 (&XEXP (x, 0)))
1084 	  return false;
1085 	if (from && old_num_changes == num_validated_changes ())
1086 	  return true;
1087 
1088 	newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1089 	break;
1090       }
1091 
1092     case RTX_BIN_ARITH:
1093     case RTX_COMM_ARITH:
1094       {
1095 	if (!apply_to_rvalue_1 (&XEXP (x, 0))
1096 	    || !apply_to_rvalue_1 (&XEXP (x, 1)))
1097 	  return false;
1098 	if (from && old_num_changes == num_validated_changes ())
1099 	  return true;
1100 
1101 	if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1102 	    && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1103 	  newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1104 	else
1105 	  newx = simplify_binary_operation (code, mode,
1106 					    XEXP (x, 0), XEXP (x, 1));
1107 	break;
1108       }
1109 
1110     case RTX_COMPARE:
1111     case RTX_COMM_COMPARE:
1112       {
1113 	machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1114 				? GET_MODE (XEXP (x, 0))
1115 				: GET_MODE (XEXP (x, 1)));
1116 	if (!apply_to_rvalue_1 (&XEXP (x, 0))
1117 	    || !apply_to_rvalue_1 (&XEXP (x, 1)))
1118 	  return false;
1119 	if (from && old_num_changes == num_validated_changes ())
1120 	  return true;
1121 
1122 	newx = simplify_relational_operation (code, mode, op_mode,
1123 					      XEXP (x, 0), XEXP (x, 1));
1124 	break;
1125       }
1126 
1127     case RTX_TERNARY:
1128     case RTX_BITFIELD_OPS:
1129       {
1130 	machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1131 	if (!apply_to_rvalue_1 (&XEXP (x, 0))
1132 	    || !apply_to_rvalue_1 (&XEXP (x, 1))
1133 	    || !apply_to_rvalue_1 (&XEXP (x, 2)))
1134 	  return false;
1135 	if (from && old_num_changes == num_validated_changes ())
1136 	  return true;
1137 
1138 	newx = simplify_ternary_operation (code, mode, op0_mode,
1139 					   XEXP (x, 0), XEXP (x, 1),
1140 					   XEXP (x, 2));
1141 	break;
1142       }
1143 
1144     case RTX_EXTRA:
1145       if (code == SUBREG)
1146 	{
1147 	  machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1148 	  if (!apply_to_rvalue_1 (&SUBREG_REG (x)))
1149 	    return false;
1150 	  if (from && old_num_changes == num_validated_changes ())
1151 	    return true;
1152 
1153 	  rtx inner = SUBREG_REG (x);
1154 	  newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1155 	  /* Reject the same cases that simplify_gen_subreg would.  */
1156 	  if (!newx
1157 	      && (GET_CODE (inner) == SUBREG
1158 		  || GET_CODE (inner) == CONCAT
1159 		  || GET_MODE (inner) == VOIDmode
1160 		  || !validate_subreg (mode, inner_mode,
1161 				       inner, SUBREG_BYTE (x))))
1162 	    {
1163 	      failure_reason = "would create an invalid subreg";
1164 	      return false;
1165 	    }
1166 	  break;
1167 	}
1168       else
1169 	recurse_p = true;
1170       break;
1171 
1172     case RTX_OBJ:
1173       if (code == LO_SUM)
1174 	{
1175 	  if (!apply_to_rvalue_1 (&XEXP (x, 0))
1176 	      || !apply_to_rvalue_1 (&XEXP (x, 1)))
1177 	    return false;
1178 	  if (from && old_num_changes == num_validated_changes ())
1179 	    return true;
1180 
1181 	  /* (lo_sum (high x) y) -> y where x and y have the same base.  */
1182 	  rtx op0 = XEXP (x, 0);
1183 	  rtx op1 = XEXP (x, 1);
1184 	  if (GET_CODE (op0) == HIGH)
1185 	    {
1186 	      rtx base0, base1, offset0, offset1;
1187 	      split_const (XEXP (op0, 0), &base0, &offset0);
1188 	      split_const (op1, &base1, &offset1);
1189 	      if (rtx_equal_p (base0, base1))
1190 		newx = op1;
1191 	    }
1192 	}
1193       else if (code == REG)
1194 	{
1195 	  if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1196 	    {
1197 	      failure_reason = "inexact register overlap";
1198 	      return false;
1199 	    }
1200 	}
1201       else if (code == MEM)
1202 	return apply_to_mem_1 (x);
1203       else
1204 	recurse_p = true;
1205       break;
1206 
1207     case RTX_CONST_OBJ:
1208       break;
1209 
1210     case RTX_AUTOINC:
1211       if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1212 	{
1213 	  failure_reason = "is subject to autoinc";
1214 	  return false;
1215 	}
1216       recurse_p = true;
1217       break;
1218 
1219     case RTX_MATCH:
1220     case RTX_INSN:
1221       gcc_unreachable ();
1222     }
1223 
1224   if (recurse_p)
1225     {
1226       const char *fmt = GET_RTX_FORMAT (code);
1227       for (int i = 0; fmt[i]; i++)
1228 	switch (fmt[i])
1229 	  {
1230 	  case 'E':
1231 	    for (int j = 0; j < XVECLEN (x, i); j++)
1232 	      if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)))
1233 		return false;
1234 	    break;
1235 
1236 	  case 'e':
1237 	    if (XEXP (x, i) && !apply_to_rvalue_1 (&XEXP (x, i)))
1238 	      return false;
1239 	    break;
1240 	  }
1241     }
1242   else if (newx && !rtx_equal_p (x, newx))
1243     {
1244       /* All substitutions made by OLD_NUM_CHANGES onwards have been
1245 	 simplified.  */
1246       result_flags = ((result_flags & ~UNSIMPLIFIED)
1247 		      | (old_result_flags & UNSIMPLIFIED));
1248 
1249       if (should_note_simplifications)
1250 	note_simplification (old_num_changes, old_result_flags, x, newx);
1251 
1252       /* There's no longer any point unsharing the substitutions made
1253 	 for subexpressions, since we'll just copy this one instead.  */
1254       bool unshare = false;
1255       for (int i = old_num_changes; i < num_changes; ++i)
1256 	{
1257 	  unshare |= changes[i].unshare;
1258 	  changes[i].unshare = false;
1259 	}
1260       if (unshare)
1261 	validate_unshare_change (insn, loc, newx, 1);
1262       else
1263 	validate_change (insn, loc, newx, 1);
1264     }
1265 
1266   return true;
1267 }
1268 
1269 /* Try to process the lvalue expression at *LOC.  Return true on success;
1270    leave the caller to clean up on failure.  */
1271 
1272 bool
apply_to_lvalue_1(rtx dest)1273 insn_propagation::apply_to_lvalue_1 (rtx dest)
1274 {
1275   rtx old_dest = dest;
1276   while (GET_CODE (dest) == SUBREG
1277 	 || GET_CODE (dest) == ZERO_EXTRACT
1278 	 || GET_CODE (dest) == STRICT_LOW_PART)
1279     {
1280       if (GET_CODE (dest) == ZERO_EXTRACT
1281 	  && (!apply_to_rvalue_1 (&XEXP (dest, 1))
1282 	      || !apply_to_rvalue_1 (&XEXP (dest, 2))))
1283 	return false;
1284       dest = XEXP (dest, 0);
1285     }
1286 
1287   if (MEM_P (dest))
1288     return apply_to_mem_1 (dest);
1289 
1290   /* Check whether the substitution is safe in the presence of this lvalue.  */
1291   if (!from
1292       || dest == old_dest
1293       || !REG_P (dest)
1294       || !reg_overlap_mentioned_p (dest, from))
1295     return true;
1296 
1297   if (SUBREG_P (old_dest)
1298       && SUBREG_REG (old_dest) == dest
1299       && !read_modify_subreg_p (old_dest))
1300     return true;
1301 
1302   failure_reason = "is part of a read-write destination";
1303   return false;
1304 }
1305 
1306 /* Try to process the instruction pattern at *LOC.  Return true on success;
1307    leave the caller to clean up on failure.  */
1308 
1309 bool
apply_to_pattern_1(rtx * loc)1310 insn_propagation::apply_to_pattern_1 (rtx *loc)
1311 {
1312   rtx body = *loc;
1313   switch (GET_CODE (body))
1314     {
1315     case COND_EXEC:
1316       return (apply_to_rvalue_1 (&COND_EXEC_TEST (body))
1317 	      && apply_to_pattern_1 (&COND_EXEC_CODE (body)));
1318 
1319     case PARALLEL:
1320       {
1321 	int last = XVECLEN (body, 0) - 1;
1322 	for (int i = 0; i < last; ++i)
1323 	  if (!apply_to_pattern_1 (&XVECEXP (body, 0, i)))
1324 	    return false;
1325 	return apply_to_pattern_1 (&XVECEXP (body, 0, last));
1326       }
1327 
1328     case ASM_OPERANDS:
1329       for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1330 	if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)))
1331 	  return false;
1332       return true;
1333 
1334     case CLOBBER:
1335       return apply_to_lvalue_1 (XEXP (body, 0));
1336 
1337     case SET:
1338       return (apply_to_lvalue_1 (SET_DEST (body))
1339 	      && apply_to_rvalue_1 (&SET_SRC (body)));
1340 
1341     default:
1342       /* All the other possibilities never store and can use a normal
1343 	 rtx walk.  This includes:
1344 
1345 	 - USE
1346 	 - TRAP_IF
1347 	 - PREFETCH
1348 	 - UNSPEC
1349 	 - UNSPEC_VOLATILE.  */
1350       return apply_to_rvalue_1 (loc);
1351     }
1352 }
1353 
1354 /* Apply this insn_propagation object's simplification or substitution
1355    to the instruction pattern at LOC.  */
1356 
1357 bool
apply_to_pattern(rtx * loc)1358 insn_propagation::apply_to_pattern (rtx *loc)
1359 {
1360   unsigned int num_changes = num_validated_changes ();
1361   bool res = apply_to_pattern_1 (loc);
1362   if (!res)
1363     cancel_changes (num_changes);
1364   return res;
1365 }
1366 
1367 /* Apply this insn_propagation object's simplification or substitution
1368    to the rvalue expression at LOC.  */
1369 
1370 bool
apply_to_rvalue(rtx * loc)1371 insn_propagation::apply_to_rvalue (rtx *loc)
1372 {
1373   unsigned int num_changes = num_validated_changes ();
1374   bool res = apply_to_rvalue_1 (loc);
1375   if (!res)
1376     cancel_changes (num_changes);
1377   return res;
1378 }
1379 
1380 /* Check whether INSN matches a specific alternative of an .md pattern.  */
1381 
1382 bool
valid_insn_p(rtx_insn * insn)1383 valid_insn_p (rtx_insn *insn)
1384 {
1385   recog_memoized (insn);
1386   if (INSN_CODE (insn) < 0)
1387     return false;
1388   extract_insn (insn);
1389   /* We don't know whether the insn will be in code that is optimized
1390      for size or speed, so consider all enabled alternatives.  */
1391   if (!constrain_operands (1, get_enabled_alternatives (insn)))
1392     return false;
1393   return true;
1394 }
1395 
1396 /* Return 1 if OP is a valid general operand for machine mode MODE.
1397    This is either a register reference, a memory reference,
1398    or a constant.  In the case of a memory reference, the address
1399    is checked for general validity for the target machine.
1400 
1401    Register and memory references must have mode MODE in order to be valid,
1402    but some constants have no machine mode and are valid for any mode.
1403 
1404    If MODE is VOIDmode, OP is checked for validity for whatever mode
1405    it has.
1406 
1407    The main use of this function is as a predicate in match_operand
1408    expressions in the machine description.  */
1409 
1410 int
general_operand(rtx op,machine_mode mode)1411 general_operand (rtx op, machine_mode mode)
1412 {
1413   enum rtx_code code = GET_CODE (op);
1414 
1415   if (mode == VOIDmode)
1416     mode = GET_MODE (op);
1417 
1418   /* Don't accept CONST_INT or anything similar
1419      if the caller wants something floating.  */
1420   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1421       && GET_MODE_CLASS (mode) != MODE_INT
1422       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1423     return 0;
1424 
1425   if (CONST_INT_P (op)
1426       && mode != VOIDmode
1427       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1428     return 0;
1429 
1430   if (CONSTANT_P (op))
1431     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1432 	     || mode == VOIDmode)
1433 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1434 	    && targetm.legitimate_constant_p (mode == VOIDmode
1435 					      ? GET_MODE (op)
1436 					      : mode, op));
1437 
1438   /* Except for certain constants with VOIDmode, already checked for,
1439      OP's mode must match MODE if MODE specifies a mode.  */
1440 
1441   if (GET_MODE (op) != mode)
1442     return 0;
1443 
1444   if (code == SUBREG)
1445     {
1446       rtx sub = SUBREG_REG (op);
1447 
1448 #ifdef INSN_SCHEDULING
1449       /* On machines that have insn scheduling, we want all memory
1450 	 reference to be explicit, so outlaw paradoxical SUBREGs.
1451 	 However, we must allow them after reload so that they can
1452 	 get cleaned up by cleanup_subreg_operands.  */
1453       if (!reload_completed && MEM_P (sub)
1454 	  && paradoxical_subreg_p (op))
1455 	return 0;
1456 #endif
1457       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1458          may result in incorrect reference.  We should simplify all valid
1459          subregs of MEM anyway.  But allow this after reload because we
1460 	 might be called from cleanup_subreg_operands.
1461 
1462 	 ??? This is a kludge.  */
1463       if (!reload_completed
1464 	  && maybe_ne (SUBREG_BYTE (op), 0)
1465 	  && MEM_P (sub))
1466 	return 0;
1467 
1468       if (REG_P (sub)
1469 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1470 	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1471 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1472 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1473 	  /* LRA can generate some invalid SUBREGS just for matched
1474 	     operand reload presentation.  LRA needs to treat them as
1475 	     valid.  */
1476 	  && ! LRA_SUBREG_P (op))
1477 	return 0;
1478 
1479       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1480 	 create such rtl, and we must reject it.  */
1481       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1482 	  /* LRA can use subreg to store a floating point value in an
1483 	     integer mode.  Although the floating point and the
1484 	     integer modes need the same number of hard registers, the
1485 	     size of floating point mode can be less than the integer
1486 	     mode.  */
1487 	  && ! lra_in_progress
1488 	  && paradoxical_subreg_p (op))
1489 	return 0;
1490 
1491       op = sub;
1492       code = GET_CODE (op);
1493     }
1494 
1495   if (code == REG)
1496     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1497 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1498 
1499   if (code == MEM)
1500     {
1501       rtx y = XEXP (op, 0);
1502 
1503       if (! volatile_ok && MEM_VOLATILE_P (op))
1504 	return 0;
1505 
1506       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1507 	 generate move insn with invalid addresses which is made valid
1508 	 and efficiently calculated by LRA through further numerous
1509 	 transformations.  */
1510       if (lra_in_progress
1511 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1512 	return 1;
1513     }
1514 
1515   return 0;
1516 }
1517 
1518 /* Return 1 if OP is a valid memory address for a memory reference
1519    of mode MODE.
1520 
1521    The main use of this function is as a predicate in match_operand
1522    expressions in the machine description.  */
1523 
1524 int
address_operand(rtx op,machine_mode mode)1525 address_operand (rtx op, machine_mode mode)
1526 {
1527   /* Wrong mode for an address expr.  */
1528   if (GET_MODE (op) != VOIDmode
1529       && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1530     return false;
1531 
1532   return memory_address_p (mode, op);
1533 }
1534 
1535 /* Return 1 if OP is a register reference of mode MODE.
1536    If MODE is VOIDmode, accept a register in any mode.
1537 
1538    The main use of this function is as a predicate in match_operand
1539    expressions in the machine description.  */
1540 
1541 int
register_operand(rtx op,machine_mode mode)1542 register_operand (rtx op, machine_mode mode)
1543 {
1544   if (GET_CODE (op) == SUBREG)
1545     {
1546       rtx sub = SUBREG_REG (op);
1547 
1548       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1549 	 because it is guaranteed to be reloaded into one.
1550 	 Just make sure the MEM is valid in itself.
1551 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1552 	 but currently it does result from (SUBREG (REG)...) where the
1553 	 reg went on the stack.)  */
1554       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1555 	return 0;
1556     }
1557   else if (!REG_P (op))
1558     return 0;
1559   return general_operand (op, mode);
1560 }
1561 
1562 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1563 
1564 int
pmode_register_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)1565 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1566 {
1567   return register_operand (op, Pmode);
1568 }
1569 
1570 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1571    or a hard register.  */
1572 
1573 int
scratch_operand(rtx op,machine_mode mode)1574 scratch_operand (rtx op, machine_mode mode)
1575 {
1576   if (GET_MODE (op) != mode && mode != VOIDmode)
1577     return 0;
1578 
1579   return (GET_CODE (op) == SCRATCH
1580 	  || (REG_P (op)
1581 	      && (lra_in_progress
1582 		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1583 		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1584 }
1585 
1586 /* Return 1 if OP is a valid immediate operand for mode MODE.
1587 
1588    The main use of this function is as a predicate in match_operand
1589    expressions in the machine description.  */
1590 
1591 int
immediate_operand(rtx op,machine_mode mode)1592 immediate_operand (rtx op, machine_mode mode)
1593 {
1594   /* Don't accept CONST_INT or anything similar
1595      if the caller wants something floating.  */
1596   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1597       && GET_MODE_CLASS (mode) != MODE_INT
1598       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1599     return 0;
1600 
1601   if (CONST_INT_P (op)
1602       && mode != VOIDmode
1603       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1604     return 0;
1605 
1606   return (CONSTANT_P (op)
1607 	  && (GET_MODE (op) == mode || mode == VOIDmode
1608 	      || GET_MODE (op) == VOIDmode)
1609 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1610 	  && targetm.legitimate_constant_p (mode == VOIDmode
1611 					    ? GET_MODE (op)
1612 					    : mode, op));
1613 }
1614 
1615 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1616 
1617 int
const_int_operand(rtx op,machine_mode mode)1618 const_int_operand (rtx op, machine_mode mode)
1619 {
1620   if (!CONST_INT_P (op))
1621     return 0;
1622 
1623   if (mode != VOIDmode
1624       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1625     return 0;
1626 
1627   return 1;
1628 }
1629 
1630 #if TARGET_SUPPORTS_WIDE_INT
1631 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1632    of mode MODE.  */
1633 int
const_scalar_int_operand(rtx op,machine_mode mode)1634 const_scalar_int_operand (rtx op, machine_mode mode)
1635 {
1636   if (!CONST_SCALAR_INT_P (op))
1637     return 0;
1638 
1639   if (CONST_INT_P (op))
1640     return const_int_operand (op, mode);
1641 
1642   if (mode != VOIDmode)
1643     {
1644       scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1645       int prec = GET_MODE_PRECISION (int_mode);
1646       int bitsize = GET_MODE_BITSIZE (int_mode);
1647 
1648       if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1649 	return 0;
1650 
1651       if (prec == bitsize)
1652 	return 1;
1653       else
1654 	{
1655 	  /* Multiword partial int.  */
1656 	  HOST_WIDE_INT x
1657 	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1658 	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1659 	}
1660     }
1661   return 1;
1662 }
1663 
1664 /* Returns 1 if OP is an operand that is a constant integer or constant
1665    floating-point number of MODE.  */
1666 
1667 int
const_double_operand(rtx op,machine_mode mode)1668 const_double_operand (rtx op, machine_mode mode)
1669 {
1670   return (GET_CODE (op) == CONST_DOUBLE)
1671 	  && (GET_MODE (op) == mode || mode == VOIDmode);
1672 }
1673 #else
1674 /* Returns 1 if OP is an operand that is a constant integer or constant
1675    floating-point number of MODE.  */
1676 
1677 int
const_double_operand(rtx op,machine_mode mode)1678 const_double_operand (rtx op, machine_mode mode)
1679 {
1680   /* Don't accept CONST_INT or anything similar
1681      if the caller wants something floating.  */
1682   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1683       && GET_MODE_CLASS (mode) != MODE_INT
1684       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1685     return 0;
1686 
1687   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1688 	  && (mode == VOIDmode || GET_MODE (op) == mode
1689 	      || GET_MODE (op) == VOIDmode));
1690 }
1691 #endif
1692 /* Return 1 if OP is a general operand that is not an immediate
1693    operand of mode MODE.  */
1694 
1695 int
nonimmediate_operand(rtx op,machine_mode mode)1696 nonimmediate_operand (rtx op, machine_mode mode)
1697 {
1698   return (general_operand (op, mode) && ! CONSTANT_P (op));
1699 }
1700 
1701 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1702 
1703 int
nonmemory_operand(rtx op,machine_mode mode)1704 nonmemory_operand (rtx op, machine_mode mode)
1705 {
1706   if (CONSTANT_P (op))
1707     return immediate_operand (op, mode);
1708   return register_operand (op, mode);
1709 }
1710 
1711 /* Return 1 if OP is a valid operand that stands for pushing a
1712    value of mode MODE onto the stack.
1713 
1714    The main use of this function is as a predicate in match_operand
1715    expressions in the machine description.  */
1716 
1717 int
push_operand(rtx op,machine_mode mode)1718 push_operand (rtx op, machine_mode mode)
1719 {
1720   if (!MEM_P (op))
1721     return 0;
1722 
1723   if (mode != VOIDmode && GET_MODE (op) != mode)
1724     return 0;
1725 
1726   poly_int64 rounded_size = GET_MODE_SIZE (mode);
1727 
1728 #ifdef PUSH_ROUNDING
1729   rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1730 #endif
1731 
1732   op = XEXP (op, 0);
1733 
1734   if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1735     {
1736       if (GET_CODE (op) != STACK_PUSH_CODE)
1737 	return 0;
1738     }
1739   else
1740     {
1741       poly_int64 offset;
1742       if (GET_CODE (op) != PRE_MODIFY
1743 	  || GET_CODE (XEXP (op, 1)) != PLUS
1744 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1745 	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1746 	  || (STACK_GROWS_DOWNWARD
1747 	      ? maybe_ne (offset, -rounded_size)
1748 	      : maybe_ne (offset, rounded_size)))
1749 	return 0;
1750     }
1751 
1752   return XEXP (op, 0) == stack_pointer_rtx;
1753 }
1754 
1755 /* Return 1 if OP is a valid operand that stands for popping a
1756    value of mode MODE off the stack.
1757 
1758    The main use of this function is as a predicate in match_operand
1759    expressions in the machine description.  */
1760 
1761 int
pop_operand(rtx op,machine_mode mode)1762 pop_operand (rtx op, machine_mode mode)
1763 {
1764   if (!MEM_P (op))
1765     return 0;
1766 
1767   if (mode != VOIDmode && GET_MODE (op) != mode)
1768     return 0;
1769 
1770   op = XEXP (op, 0);
1771 
1772   if (GET_CODE (op) != STACK_POP_CODE)
1773     return 0;
1774 
1775   return XEXP (op, 0) == stack_pointer_rtx;
1776 }
1777 
1778 /* Return 1 if ADDR is a valid memory address
1779    for mode MODE in address space AS.  */
1780 
1781 int
memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)1782 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1783 			     rtx addr, addr_space_t as)
1784 {
1785 #ifdef GO_IF_LEGITIMATE_ADDRESS
1786   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1787   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1788   return 0;
1789 
1790  win:
1791   return 1;
1792 #else
1793   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1794 #endif
1795 }
1796 
1797 /* Return 1 if OP is a valid memory reference with mode MODE,
1798    including a valid address.
1799 
1800    The main use of this function is as a predicate in match_operand
1801    expressions in the machine description.  */
1802 
1803 int
memory_operand(rtx op,machine_mode mode)1804 memory_operand (rtx op, machine_mode mode)
1805 {
1806   rtx inner;
1807 
1808   if (! reload_completed)
1809     /* Note that no SUBREG is a memory operand before end of reload pass,
1810        because (SUBREG (MEM...)) forces reloading into a register.  */
1811     return MEM_P (op) && general_operand (op, mode);
1812 
1813   if (mode != VOIDmode && GET_MODE (op) != mode)
1814     return 0;
1815 
1816   inner = op;
1817   if (GET_CODE (inner) == SUBREG)
1818     inner = SUBREG_REG (inner);
1819 
1820   return (MEM_P (inner) && general_operand (op, mode));
1821 }
1822 
1823 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1824    that is, a memory reference whose address is a general_operand.  */
1825 
1826 int
indirect_operand(rtx op,machine_mode mode)1827 indirect_operand (rtx op, machine_mode mode)
1828 {
1829   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1830   if (! reload_completed
1831       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1832     {
1833       if (mode != VOIDmode && GET_MODE (op) != mode)
1834 	return 0;
1835 
1836       /* The only way that we can have a general_operand as the resulting
1837 	 address is if OFFSET is zero and the address already is an operand
1838 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1839 	 operand.  */
1840       poly_int64 offset;
1841       rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1842       return (known_eq (offset + SUBREG_BYTE (op), 0)
1843 	      && general_operand (addr, Pmode));
1844     }
1845 
1846   return (MEM_P (op)
1847 	  && memory_operand (op, mode)
1848 	  && general_operand (XEXP (op, 0), Pmode));
1849 }
1850 
1851 /* Return 1 if this is an ordered comparison operator (not including
1852    ORDERED and UNORDERED).  */
1853 
1854 int
ordered_comparison_operator(rtx op,machine_mode mode)1855 ordered_comparison_operator (rtx op, machine_mode mode)
1856 {
1857   if (mode != VOIDmode && GET_MODE (op) != mode)
1858     return false;
1859   switch (GET_CODE (op))
1860     {
1861     case EQ:
1862     case NE:
1863     case LT:
1864     case LTU:
1865     case LE:
1866     case LEU:
1867     case GT:
1868     case GTU:
1869     case GE:
1870     case GEU:
1871       return true;
1872     default:
1873       return false;
1874     }
1875 }
1876 
1877 /* Return 1 if this is a comparison operator.  This allows the use of
1878    MATCH_OPERATOR to recognize all the branch insns.  */
1879 
1880 int
comparison_operator(rtx op,machine_mode mode)1881 comparison_operator (rtx op, machine_mode mode)
1882 {
1883   return ((mode == VOIDmode || GET_MODE (op) == mode)
1884 	  && COMPARISON_P (op));
1885 }
1886 
1887 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1888 
1889 rtx
extract_asm_operands(rtx body)1890 extract_asm_operands (rtx body)
1891 {
1892   rtx tmp;
1893   switch (GET_CODE (body))
1894     {
1895     case ASM_OPERANDS:
1896       return body;
1897 
1898     case SET:
1899       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1900       tmp = SET_SRC (body);
1901       if (GET_CODE (tmp) == ASM_OPERANDS)
1902 	return tmp;
1903       break;
1904 
1905     case PARALLEL:
1906       tmp = XVECEXP (body, 0, 0);
1907       if (GET_CODE (tmp) == ASM_OPERANDS)
1908 	return tmp;
1909       if (GET_CODE (tmp) == SET)
1910 	{
1911 	  tmp = SET_SRC (tmp);
1912 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1913 	    return tmp;
1914 	}
1915       break;
1916 
1917     default:
1918       break;
1919     }
1920   return NULL;
1921 }
1922 
1923 /* If BODY is an insn body that uses ASM_OPERANDS,
1924    return the number of operands (both input and output) in the insn.
1925    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1926    return 0.
1927    Otherwise return -1.  */
1928 
1929 int
asm_noperands(const_rtx body)1930 asm_noperands (const_rtx body)
1931 {
1932   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1933   int i, n_sets = 0;
1934 
1935   if (asm_op == NULL)
1936     {
1937       if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1938 	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1939 	{
1940 	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
1941 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1942 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1943 	      return -1;
1944 	  return 0;
1945 	}
1946       return -1;
1947     }
1948 
1949   if (GET_CODE (body) == SET)
1950     n_sets = 1;
1951   else if (GET_CODE (body) == PARALLEL)
1952     {
1953       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1954 	{
1955 	  /* Multiple output operands, or 1 output plus some clobbers:
1956 	     body is
1957 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1958 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1959 	  for (i = XVECLEN (body, 0); i > 0; i--)
1960 	    {
1961 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1962 		break;
1963 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1964 		return -1;
1965 	    }
1966 
1967 	  /* N_SETS is now number of output operands.  */
1968 	  n_sets = i;
1969 
1970 	  /* Verify that all the SETs we have
1971 	     came from a single original asm_operands insn
1972 	     (so that invalid combinations are blocked).  */
1973 	  for (i = 0; i < n_sets; i++)
1974 	    {
1975 	      rtx elt = XVECEXP (body, 0, i);
1976 	      if (GET_CODE (elt) != SET)
1977 		return -1;
1978 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1979 		return -1;
1980 	      /* If these ASM_OPERANDS rtx's came from different original insns
1981 	         then they aren't allowed together.  */
1982 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1983 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1984 		return -1;
1985 	    }
1986 	}
1987       else
1988 	{
1989 	  /* 0 outputs, but some clobbers:
1990 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1991 	  /* Make sure all the other parallel things really are clobbers.  */
1992 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1993 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1994 	      return -1;
1995 	}
1996     }
1997 
1998   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1999 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2000 }
2001 
2002 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2003    copy its operands (both input and output) into the vector OPERANDS,
2004    the locations of the operands within the insn into the vector OPERAND_LOCS,
2005    and the constraints for the operands into CONSTRAINTS.
2006    Write the modes of the operands into MODES.
2007    Write the location info into LOC.
2008    Return the assembler-template.
2009    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2010    return the basic assembly string.
2011 
2012    If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2013    we don't store that info.  */
2014 
2015 const char *
decode_asm_operands(rtx body,rtx * operands,rtx ** operand_locs,const char ** constraints,machine_mode * modes,location_t * loc)2016 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2017 		     const char **constraints, machine_mode *modes,
2018 		     location_t *loc)
2019 {
2020   int nbase = 0, n, i;
2021   rtx asmop;
2022 
2023   switch (GET_CODE (body))
2024     {
2025     case ASM_OPERANDS:
2026       /* Zero output asm: BODY is (asm_operands ...).  */
2027       asmop = body;
2028       break;
2029 
2030     case SET:
2031       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
2032       asmop = SET_SRC (body);
2033 
2034       /* The output is in the SET.
2035 	 Its constraint is in the ASM_OPERANDS itself.  */
2036       if (operands)
2037 	operands[0] = SET_DEST (body);
2038       if (operand_locs)
2039 	operand_locs[0] = &SET_DEST (body);
2040       if (constraints)
2041 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2042       if (modes)
2043 	modes[0] = GET_MODE (SET_DEST (body));
2044       nbase = 1;
2045       break;
2046 
2047     case PARALLEL:
2048       {
2049 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
2050 
2051 	asmop = XVECEXP (body, 0, 0);
2052 	if (GET_CODE (asmop) == SET)
2053 	  {
2054 	    asmop = SET_SRC (asmop);
2055 
2056 	    /* At least one output, plus some CLOBBERs.  The outputs are in
2057 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
2058 	    for (i = 0; i < nparallel; i++)
2059 	      {
2060 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2061 		  break;		/* Past last SET */
2062 		gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2063 		if (operands)
2064 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
2065 		if (operand_locs)
2066 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2067 		if (constraints)
2068 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2069 		if (modes)
2070 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2071 	      }
2072 	    nbase = i;
2073 	  }
2074 	else if (GET_CODE (asmop) == ASM_INPUT)
2075 	  {
2076 	    if (loc)
2077 	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2078 	    return XSTR (asmop, 0);
2079 	  }
2080 	break;
2081       }
2082 
2083     default:
2084       gcc_unreachable ();
2085     }
2086 
2087   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2088   for (i = 0; i < n; i++)
2089     {
2090       if (operand_locs)
2091 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2092       if (operands)
2093 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2094       if (constraints)
2095 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2096       if (modes)
2097 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2098     }
2099   nbase += n;
2100 
2101   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2102   for (i = 0; i < n; i++)
2103     {
2104       if (operand_locs)
2105 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2106       if (operands)
2107 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2108       if (constraints)
2109 	constraints[nbase + i] = "";
2110       if (modes)
2111 	modes[nbase + i] = Pmode;
2112     }
2113 
2114   if (loc)
2115     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2116 
2117   return ASM_OPERANDS_TEMPLATE (asmop);
2118 }
2119 
2120 /* Parse inline assembly string STRING and determine which operands are
2121    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
2122    to true if operand I is referenced.
2123 
2124    This is intended to distinguish barrier-like asms such as:
2125 
2126       asm ("" : "=m" (...));
2127 
2128    from real references such as:
2129 
2130       asm ("sw\t$0, %0" : "=m" (...));  */
2131 
2132 void
get_referenced_operands(const char * string,bool * used,unsigned int noperands)2133 get_referenced_operands (const char *string, bool *used,
2134 			 unsigned int noperands)
2135 {
2136   memset (used, 0, sizeof (bool) * noperands);
2137   const char *p = string;
2138   while (*p)
2139     switch (*p)
2140       {
2141       case '%':
2142 	p += 1;
2143 	/* A letter followed by a digit indicates an operand number.  */
2144 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2145 	  p += 1;
2146 	if (ISDIGIT (*p))
2147 	  {
2148 	    char *endptr;
2149 	    unsigned long opnum = strtoul (p, &endptr, 10);
2150 	    if (endptr != p && opnum < noperands)
2151 	      used[opnum] = true;
2152 	    p = endptr;
2153 	  }
2154 	else
2155 	  p += 1;
2156 	break;
2157 
2158       default:
2159 	p++;
2160 	break;
2161       }
2162 }
2163 
2164 /* Check if an asm_operand matches its constraints.
2165    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
2166 
2167 int
asm_operand_ok(rtx op,const char * constraint,const char ** constraints)2168 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2169 {
2170   int result = 0;
2171   bool incdec_ok = false;
2172 
2173   /* Use constrain_operands after reload.  */
2174   gcc_assert (!reload_completed);
2175 
2176   /* Empty constraint string is the same as "X,...,X", i.e. X for as
2177      many alternatives as required to match the other operands.  */
2178   if (*constraint == '\0')
2179     result = 1;
2180 
2181   while (*constraint)
2182     {
2183       enum constraint_num cn;
2184       char c = *constraint;
2185       int len;
2186       switch (c)
2187 	{
2188 	case ',':
2189 	  constraint++;
2190 	  continue;
2191 
2192 	case '0': case '1': case '2': case '3': case '4':
2193 	case '5': case '6': case '7': case '8': case '9':
2194 	  /* If caller provided constraints pointer, look up
2195 	     the matching constraint.  Otherwise, our caller should have
2196 	     given us the proper matching constraint, but we can't
2197 	     actually fail the check if they didn't.  Indicate that
2198 	     results are inconclusive.  */
2199 	  if (constraints)
2200 	    {
2201 	      char *end;
2202 	      unsigned long match;
2203 
2204 	      match = strtoul (constraint, &end, 10);
2205 	      if (!result)
2206 		result = asm_operand_ok (op, constraints[match], NULL);
2207 	      constraint = (const char *) end;
2208 	    }
2209 	  else
2210 	    {
2211 	      do
2212 		constraint++;
2213 	      while (ISDIGIT (*constraint));
2214 	      if (! result)
2215 		result = -1;
2216 	    }
2217 	  continue;
2218 
2219 	  /* The rest of the compiler assumes that reloading the address
2220 	     of a MEM into a register will make it fit an 'o' constraint.
2221 	     That is, if it sees a MEM operand for an 'o' constraint,
2222 	     it assumes that (mem (base-reg)) will fit.
2223 
2224 	     That assumption fails on targets that don't have offsettable
2225 	     addresses at all.  We therefore need to treat 'o' asm
2226 	     constraints as a special case and only accept operands that
2227 	     are already offsettable, thus proving that at least one
2228 	     offsettable address exists.  */
2229 	case 'o': /* offsettable */
2230 	  if (offsettable_nonstrict_memref_p (op))
2231 	    result = 1;
2232 	  break;
2233 
2234 	case 'g':
2235 	  if (general_operand (op, VOIDmode))
2236 	    result = 1;
2237 	  break;
2238 
2239 	case '<':
2240 	case '>':
2241 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2242 	     to exist, excepting those that expand_call created.  Further,
2243 	     on some machines which do not have generalized auto inc/dec,
2244 	     an inc/dec is not a memory_operand.
2245 
2246 	     Match any memory and hope things are resolved after reload.  */
2247 	  incdec_ok = true;
2248 	  /* FALLTHRU */
2249 	default:
2250 	  cn = lookup_constraint (constraint);
2251 	  rtx mem = NULL;
2252 	  switch (get_constraint_type (cn))
2253 	    {
2254 	    case CT_REGISTER:
2255 	      if (!result
2256 		  && reg_class_for_constraint (cn) != NO_REGS
2257 		  && GET_MODE (op) != BLKmode
2258 		  && register_operand (op, VOIDmode))
2259 		result = 1;
2260 	      break;
2261 
2262 	    case CT_CONST_INT:
2263 	      if (!result
2264 		  && CONST_INT_P (op)
2265 		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2266 		result = 1;
2267 	      break;
2268 
2269 	    case CT_MEMORY:
2270 	    case CT_RELAXED_MEMORY:
2271 	      mem = op;
2272 	      /* Fall through.  */
2273 	    case CT_SPECIAL_MEMORY:
2274 	      /* Every memory operand can be reloaded to fit.  */
2275 	      if (!mem)
2276 		mem = extract_mem_from_operand (op);
2277 	      result = result || memory_operand (mem, VOIDmode);
2278 	      break;
2279 
2280 	    case CT_ADDRESS:
2281 	      /* Every address operand can be reloaded to fit.  */
2282 	      result = result || address_operand (op, VOIDmode);
2283 	      break;
2284 
2285 	    case CT_FIXED_FORM:
2286 	      result = result || constraint_satisfied_p (op, cn);
2287 	      break;
2288 	    }
2289 	  break;
2290 	}
2291       len = CONSTRAINT_LEN (c, constraint);
2292       do
2293 	constraint++;
2294       while (--len && *constraint && *constraint != ',');
2295       if (len)
2296 	return 0;
2297     }
2298 
2299   /* For operands without < or > constraints reject side-effects.  */
2300   if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2301     switch (GET_CODE (XEXP (op, 0)))
2302       {
2303       case PRE_INC:
2304       case POST_INC:
2305       case PRE_DEC:
2306       case POST_DEC:
2307       case PRE_MODIFY:
2308       case POST_MODIFY:
2309 	return 0;
2310       default:
2311 	break;
2312       }
2313 
2314   return result;
2315 }
2316 
2317 /* Given an rtx *P, if it is a sum containing an integer constant term,
2318    return the location (type rtx *) of the pointer to that constant term.
2319    Otherwise, return a null pointer.  */
2320 
2321 rtx *
find_constant_term_loc(rtx * p)2322 find_constant_term_loc (rtx *p)
2323 {
2324   rtx *tem;
2325   enum rtx_code code = GET_CODE (*p);
2326 
2327   /* If *P IS such a constant term, P is its location.  */
2328 
2329   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2330       || code == CONST)
2331     return p;
2332 
2333   /* Otherwise, if not a sum, it has no constant term.  */
2334 
2335   if (GET_CODE (*p) != PLUS)
2336     return 0;
2337 
2338   /* If one of the summands is constant, return its location.  */
2339 
2340   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2341       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2342     return p;
2343 
2344   /* Otherwise, check each summand for containing a constant term.  */
2345 
2346   if (XEXP (*p, 0) != 0)
2347     {
2348       tem = find_constant_term_loc (&XEXP (*p, 0));
2349       if (tem != 0)
2350 	return tem;
2351     }
2352 
2353   if (XEXP (*p, 1) != 0)
2354     {
2355       tem = find_constant_term_loc (&XEXP (*p, 1));
2356       if (tem != 0)
2357 	return tem;
2358     }
2359 
2360   return 0;
2361 }
2362 
2363 /* Return 1 if OP is a memory reference
2364    whose address contains no side effects
2365    and remains valid after the addition
2366    of a positive integer less than the
2367    size of the object being referenced.
2368 
2369    We assume that the original address is valid and do not check it.
2370 
2371    This uses strict_memory_address_p as a subroutine, so
2372    don't use it before reload.  */
2373 
2374 int
offsettable_memref_p(rtx op)2375 offsettable_memref_p (rtx op)
2376 {
2377   return ((MEM_P (op))
2378 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2379 					       MEM_ADDR_SPACE (op)));
2380 }
2381 
2382 /* Similar, but don't require a strictly valid mem ref:
2383    consider pseudo-regs valid as index or base regs.  */
2384 
2385 int
offsettable_nonstrict_memref_p(rtx op)2386 offsettable_nonstrict_memref_p (rtx op)
2387 {
2388   return ((MEM_P (op))
2389 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2390 					       MEM_ADDR_SPACE (op)));
2391 }
2392 
2393 /* Return 1 if Y is a memory address which contains no side effects
2394    and would remain valid for address space AS after the addition of
2395    a positive integer less than the size of that mode.
2396 
2397    We assume that the original address is valid and do not check it.
2398    We do check that it is valid for narrower modes.
2399 
2400    If STRICTP is nonzero, we require a strictly valid address,
2401    for the sake of use in reload.c.  */
2402 
2403 int
offsettable_address_addr_space_p(int strictp,machine_mode mode,rtx y,addr_space_t as)2404 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2405 				  addr_space_t as)
2406 {
2407   enum rtx_code ycode = GET_CODE (y);
2408   rtx z;
2409   rtx y1 = y;
2410   rtx *y2;
2411   int (*addressp) (machine_mode, rtx, addr_space_t) =
2412     (strictp ? strict_memory_address_addr_space_p
2413 	     : memory_address_addr_space_p);
2414   poly_int64 mode_sz = GET_MODE_SIZE (mode);
2415 
2416   if (CONSTANT_ADDRESS_P (y))
2417     return 1;
2418 
2419   /* Adjusting an offsettable address involves changing to a narrower mode.
2420      Make sure that's OK.  */
2421 
2422   if (mode_dependent_address_p (y, as))
2423     return 0;
2424 
2425   machine_mode address_mode = GET_MODE (y);
2426   if (address_mode == VOIDmode)
2427     address_mode = targetm.addr_space.address_mode (as);
2428 #ifdef POINTERS_EXTEND_UNSIGNED
2429   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2430 #endif
2431 
2432   /* ??? How much offset does an offsettable BLKmode reference need?
2433      Clearly that depends on the situation in which it's being used.
2434      However, the current situation in which we test 0xffffffff is
2435      less than ideal.  Caveat user.  */
2436   if (known_eq (mode_sz, 0))
2437     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2438 
2439   /* If the expression contains a constant term,
2440      see if it remains valid when max possible offset is added.  */
2441 
2442   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2443     {
2444       int good;
2445 
2446       y1 = *y2;
2447       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2448       /* Use QImode because an odd displacement may be automatically invalid
2449 	 for any wider mode.  But it should be valid for a single byte.  */
2450       good = (*addressp) (QImode, y, as);
2451 
2452       /* In any case, restore old contents of memory.  */
2453       *y2 = y1;
2454       return good;
2455     }
2456 
2457   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2458     return 0;
2459 
2460   /* The offset added here is chosen as the maximum offset that
2461      any instruction could need to add when operating on something
2462      of the specified mode.  We assume that if Y and Y+c are
2463      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
2464      go inside a LO_SUM here, so we do so as well.  */
2465   if (GET_CODE (y) == LO_SUM
2466       && mode != BLKmode
2467       && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2468     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2469 			plus_constant (address_mode, XEXP (y, 1),
2470 				       mode_sz - 1));
2471 #ifdef POINTERS_EXTEND_UNSIGNED
2472   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2473   else if (POINTERS_EXTEND_UNSIGNED > 0
2474 	   && GET_CODE (y) == ZERO_EXTEND
2475 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2476     z = gen_rtx_ZERO_EXTEND (address_mode,
2477 			     plus_constant (pointer_mode, XEXP (y, 0),
2478 					    mode_sz - 1));
2479 #endif
2480   else
2481     z = plus_constant (address_mode, y, mode_sz - 1);
2482 
2483   /* Use QImode because an odd displacement may be automatically invalid
2484      for any wider mode.  But it should be valid for a single byte.  */
2485   return (*addressp) (QImode, z, as);
2486 }
2487 
2488 /* Return 1 if ADDR is an address-expression whose effect depends
2489    on the mode of the memory reference it is used in.
2490 
2491    ADDRSPACE is the address space associated with the address.
2492 
2493    Autoincrement addressing is a typical example of mode-dependence
2494    because the amount of the increment depends on the mode.  */
2495 
2496 bool
mode_dependent_address_p(rtx addr,addr_space_t addrspace)2497 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2498 {
2499   /* Auto-increment addressing with anything other than post_modify
2500      or pre_modify always introduces a mode dependency.  Catch such
2501      cases now instead of deferring to the target.  */
2502   if (GET_CODE (addr) == PRE_INC
2503       || GET_CODE (addr) == POST_INC
2504       || GET_CODE (addr) == PRE_DEC
2505       || GET_CODE (addr) == POST_DEC)
2506     return true;
2507 
2508   return targetm.mode_dependent_address_p (addr, addrspace);
2509 }
2510 
2511 /* Return true if boolean attribute ATTR is supported.  */
2512 
2513 static bool
have_bool_attr(bool_attr attr)2514 have_bool_attr (bool_attr attr)
2515 {
2516   switch (attr)
2517     {
2518     case BA_ENABLED:
2519       return HAVE_ATTR_enabled;
2520     case BA_PREFERRED_FOR_SIZE:
2521       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2522     case BA_PREFERRED_FOR_SPEED:
2523       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2524     }
2525   gcc_unreachable ();
2526 }
2527 
2528 /* Return the value of ATTR for instruction INSN.  */
2529 
2530 static bool
get_bool_attr(rtx_insn * insn,bool_attr attr)2531 get_bool_attr (rtx_insn *insn, bool_attr attr)
2532 {
2533   switch (attr)
2534     {
2535     case BA_ENABLED:
2536       return get_attr_enabled (insn);
2537     case BA_PREFERRED_FOR_SIZE:
2538       return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2539     case BA_PREFERRED_FOR_SPEED:
2540       return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2541     }
2542   gcc_unreachable ();
2543 }
2544 
2545 /* Like get_bool_attr_mask, but don't use the cache.  */
2546 
2547 static alternative_mask
get_bool_attr_mask_uncached(rtx_insn * insn,bool_attr attr)2548 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2549 {
2550   /* Temporarily install enough information for get_attr_<foo> to assume
2551      that the insn operands are already cached.  As above, the attribute
2552      mustn't depend on the values of operands, so we don't provide their
2553      real values here.  */
2554   rtx_insn *old_insn = recog_data.insn;
2555   int old_alternative = which_alternative;
2556 
2557   recog_data.insn = insn;
2558   alternative_mask mask = ALL_ALTERNATIVES;
2559   int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2560   for (int i = 0; i < n_alternatives; i++)
2561     {
2562       which_alternative = i;
2563       if (!get_bool_attr (insn, attr))
2564 	mask &= ~ALTERNATIVE_BIT (i);
2565     }
2566 
2567   recog_data.insn = old_insn;
2568   which_alternative = old_alternative;
2569   return mask;
2570 }
2571 
2572 /* Return the mask of operand alternatives that are allowed for INSN
2573    by boolean attribute ATTR.  This mask depends only on INSN and on
2574    the current target; it does not depend on things like the values of
2575    operands.  */
2576 
2577 static alternative_mask
get_bool_attr_mask(rtx_insn * insn,bool_attr attr)2578 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2579 {
2580   /* Quick exit for asms and for targets that don't use these attributes.  */
2581   int code = INSN_CODE (insn);
2582   if (code < 0 || !have_bool_attr (attr))
2583     return ALL_ALTERNATIVES;
2584 
2585   /* Calling get_attr_<foo> can be expensive, so cache the mask
2586      for speed.  */
2587   if (!this_target_recog->x_bool_attr_masks[code][attr])
2588     this_target_recog->x_bool_attr_masks[code][attr]
2589       = get_bool_attr_mask_uncached (insn, attr);
2590   return this_target_recog->x_bool_attr_masks[code][attr];
2591 }
2592 
2593 /* Return the set of alternatives of INSN that are allowed by the current
2594    target.  */
2595 
2596 alternative_mask
get_enabled_alternatives(rtx_insn * insn)2597 get_enabled_alternatives (rtx_insn *insn)
2598 {
2599   return get_bool_attr_mask (insn, BA_ENABLED);
2600 }
2601 
2602 /* Return the set of alternatives of INSN that are allowed by the current
2603    target and are preferred for the current size/speed optimization
2604    choice.  */
2605 
2606 alternative_mask
get_preferred_alternatives(rtx_insn * insn)2607 get_preferred_alternatives (rtx_insn *insn)
2608 {
2609   if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2610     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2611   else
2612     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2613 }
2614 
2615 /* Return the set of alternatives of INSN that are allowed by the current
2616    target and are preferred for the size/speed optimization choice
2617    associated with BB.  Passing a separate BB is useful if INSN has not
2618    been emitted yet or if we are considering moving it to a different
2619    block.  */
2620 
2621 alternative_mask
get_preferred_alternatives(rtx_insn * insn,basic_block bb)2622 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2623 {
2624   if (optimize_bb_for_speed_p (bb))
2625     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2626   else
2627     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2628 }
2629 
2630 /* Assert that the cached boolean attributes for INSN are still accurate.
2631    The backend is required to define these attributes in a way that only
2632    depends on the current target (rather than operands, compiler phase,
2633    etc.).  */
2634 
2635 bool
check_bool_attrs(rtx_insn * insn)2636 check_bool_attrs (rtx_insn *insn)
2637 {
2638   int code = INSN_CODE (insn);
2639   if (code >= 0)
2640     for (int i = 0; i <= BA_LAST; ++i)
2641       {
2642 	enum bool_attr attr = (enum bool_attr) i;
2643 	if (this_target_recog->x_bool_attr_masks[code][attr])
2644 	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2645 		      == get_bool_attr_mask_uncached (insn, attr));
2646       }
2647   return true;
2648 }
2649 
2650 /* Like extract_insn, but save insn extracted and don't extract again, when
2651    called again for the same insn expecting that recog_data still contain the
2652    valid information.  This is used primary by gen_attr infrastructure that
2653    often does extract insn again and again.  */
2654 void
extract_insn_cached(rtx_insn * insn)2655 extract_insn_cached (rtx_insn *insn)
2656 {
2657   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2658     return;
2659   extract_insn (insn);
2660   recog_data.insn = insn;
2661 }
2662 
2663 /* Do uncached extract_insn, constrain_operands and complain about failures.
2664    This should be used when extracting a pre-existing constrained instruction
2665    if the caller wants to know which alternative was chosen.  */
2666 void
extract_constrain_insn(rtx_insn * insn)2667 extract_constrain_insn (rtx_insn *insn)
2668 {
2669   extract_insn (insn);
2670   if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2671     fatal_insn_not_found (insn);
2672 }
2673 
2674 /* Do cached extract_insn, constrain_operands and complain about failures.
2675    Used by insn_attrtab.  */
2676 void
extract_constrain_insn_cached(rtx_insn * insn)2677 extract_constrain_insn_cached (rtx_insn *insn)
2678 {
2679   extract_insn_cached (insn);
2680   if (which_alternative == -1
2681       && !constrain_operands (reload_completed,
2682 			      get_enabled_alternatives (insn)))
2683     fatal_insn_not_found (insn);
2684 }
2685 
2686 /* Do cached constrain_operands on INSN and complain about failures.  */
2687 int
constrain_operands_cached(rtx_insn * insn,int strict)2688 constrain_operands_cached (rtx_insn *insn, int strict)
2689 {
2690   if (which_alternative == -1)
2691     return constrain_operands (strict, get_enabled_alternatives (insn));
2692   else
2693     return 1;
2694 }
2695 
2696 /* Analyze INSN and fill in recog_data.  */
2697 
2698 void
extract_insn(rtx_insn * insn)2699 extract_insn (rtx_insn *insn)
2700 {
2701   int i;
2702   int icode;
2703   int noperands;
2704   rtx body = PATTERN (insn);
2705 
2706   recog_data.n_operands = 0;
2707   recog_data.n_alternatives = 0;
2708   recog_data.n_dups = 0;
2709   recog_data.is_asm = false;
2710 
2711   switch (GET_CODE (body))
2712     {
2713     case USE:
2714     case CLOBBER:
2715     case ASM_INPUT:
2716     case ADDR_VEC:
2717     case ADDR_DIFF_VEC:
2718     case VAR_LOCATION:
2719     case DEBUG_MARKER:
2720       return;
2721 
2722     case SET:
2723       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2724 	goto asm_insn;
2725       else
2726 	goto normal_insn;
2727     case PARALLEL:
2728       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2729 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2730 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2731 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2732 	goto asm_insn;
2733       else
2734 	goto normal_insn;
2735     case ASM_OPERANDS:
2736     asm_insn:
2737       recog_data.n_operands = noperands = asm_noperands (body);
2738       if (noperands >= 0)
2739 	{
2740 	  /* This insn is an `asm' with operands.  */
2741 
2742 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2743 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2744 
2745 	  /* Now get the operand values and constraints out of the insn.  */
2746 	  decode_asm_operands (body, recog_data.operand,
2747 			       recog_data.operand_loc,
2748 			       recog_data.constraints,
2749 			       recog_data.operand_mode, NULL);
2750 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2751 	  if (noperands > 0)
2752 	    {
2753 	      const char *p =  recog_data.constraints[0];
2754 	      recog_data.n_alternatives = 1;
2755 	      while (*p)
2756 		recog_data.n_alternatives += (*p++ == ',');
2757 	    }
2758 	  recog_data.is_asm = true;
2759 	  break;
2760 	}
2761       fatal_insn_not_found (insn);
2762 
2763     default:
2764     normal_insn:
2765       /* Ordinary insn: recognize it, get the operands via insn_extract
2766 	 and get the constraints.  */
2767 
2768       icode = recog_memoized (insn);
2769       if (icode < 0)
2770 	fatal_insn_not_found (insn);
2771 
2772       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2773       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2774       recog_data.n_dups = insn_data[icode].n_dups;
2775 
2776       insn_extract (insn);
2777 
2778       for (i = 0; i < noperands; i++)
2779 	{
2780 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2781 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2782 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2783 	  /* VOIDmode match_operands gets mode from their real operand.  */
2784 	  if (recog_data.operand_mode[i] == VOIDmode)
2785 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2786 	}
2787     }
2788   for (i = 0; i < noperands; i++)
2789     recog_data.operand_type[i]
2790       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2791 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2792 	 : OP_IN);
2793 
2794   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2795 
2796   recog_data.insn = NULL;
2797   which_alternative = -1;
2798 }
2799 
2800 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2801    operands, N_ALTERNATIVES alternatives and constraint strings
2802    CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2803    and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
2804    if the insn is an asm statement and preprocessing should take the
2805    asm operands into account, e.g. to determine whether they could be
2806    addresses in constraints that require addresses; it should then
2807    point to an array of pointers to each operand.  */
2808 
2809 void
preprocess_constraints(int n_operands,int n_alternatives,const char ** constraints,operand_alternative * op_alt_base,rtx ** oploc)2810 preprocess_constraints (int n_operands, int n_alternatives,
2811 			const char **constraints,
2812 			operand_alternative *op_alt_base,
2813 			rtx **oploc)
2814 {
2815   for (int i = 0; i < n_operands; i++)
2816     {
2817       int j;
2818       struct operand_alternative *op_alt;
2819       const char *p = constraints[i];
2820 
2821       op_alt = op_alt_base;
2822 
2823       for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2824 	{
2825 	  op_alt[i].cl = NO_REGS;
2826 	  op_alt[i].constraint = p;
2827 	  op_alt[i].matches = -1;
2828 	  op_alt[i].matched = -1;
2829 
2830 	  if (*p == '\0' || *p == ',')
2831 	    {
2832 	      op_alt[i].anything_ok = 1;
2833 	      continue;
2834 	    }
2835 
2836 	  for (;;)
2837 	    {
2838 	      char c = *p;
2839 	      if (c == '#')
2840 		do
2841 		  c = *++p;
2842 		while (c != ',' && c != '\0');
2843 	      if (c == ',' || c == '\0')
2844 		{
2845 		  p++;
2846 		  break;
2847 		}
2848 
2849 	      switch (c)
2850 		{
2851 		case '?':
2852 		  op_alt[i].reject += 6;
2853 		  break;
2854 		case '!':
2855 		  op_alt[i].reject += 600;
2856 		  break;
2857 		case '&':
2858 		  op_alt[i].earlyclobber = 1;
2859 		  break;
2860 
2861 		case '0': case '1': case '2': case '3': case '4':
2862 		case '5': case '6': case '7': case '8': case '9':
2863 		  {
2864 		    char *end;
2865 		    op_alt[i].matches = strtoul (p, &end, 10);
2866 		    op_alt[op_alt[i].matches].matched = i;
2867 		    p = end;
2868 		  }
2869 		  continue;
2870 
2871 		case 'X':
2872 		  op_alt[i].anything_ok = 1;
2873 		  break;
2874 
2875 		case 'g':
2876 		  op_alt[i].cl =
2877 		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2878 		  break;
2879 
2880 		default:
2881 		  enum constraint_num cn = lookup_constraint (p);
2882 		  enum reg_class cl;
2883 		  switch (get_constraint_type (cn))
2884 		    {
2885 		    case CT_REGISTER:
2886 		      cl = reg_class_for_constraint (cn);
2887 		      if (cl != NO_REGS)
2888 			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2889 		      break;
2890 
2891 		    case CT_CONST_INT:
2892 		      break;
2893 
2894 		    case CT_MEMORY:
2895 		    case CT_SPECIAL_MEMORY:
2896 		    case CT_RELAXED_MEMORY:
2897 		      op_alt[i].memory_ok = 1;
2898 		      break;
2899 
2900 		    case CT_ADDRESS:
2901 		      if (oploc && !address_operand (*oploc[i], VOIDmode))
2902 			break;
2903 
2904 		      op_alt[i].is_address = 1;
2905 		      op_alt[i].cl
2906 			= (reg_class_subunion
2907 			   [(int) op_alt[i].cl]
2908 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2909 						  ADDRESS, SCRATCH)]);
2910 		      break;
2911 
2912 		    case CT_FIXED_FORM:
2913 		      break;
2914 		    }
2915 		  break;
2916 		}
2917 	      p += CONSTRAINT_LEN (c, p);
2918 	    }
2919 	}
2920     }
2921 }
2922 
2923 /* Return an array of operand_alternative instructions for
2924    instruction ICODE.  */
2925 
2926 const operand_alternative *
preprocess_insn_constraints(unsigned int icode)2927 preprocess_insn_constraints (unsigned int icode)
2928 {
2929   gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2930   if (this_target_recog->x_op_alt[icode])
2931     return this_target_recog->x_op_alt[icode];
2932 
2933   int n_operands = insn_data[icode].n_operands;
2934   if (n_operands == 0)
2935     return 0;
2936   /* Always provide at least one alternative so that which_op_alt ()
2937      works correctly.  If the instruction has 0 alternatives (i.e. all
2938      constraint strings are empty) then each operand in this alternative
2939      will have anything_ok set.  */
2940   int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2941   int n_entries = n_operands * n_alternatives;
2942 
2943   operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2944   const char **constraints = XALLOCAVEC (const char *, n_operands);
2945 
2946   for (int i = 0; i < n_operands; ++i)
2947     constraints[i] = insn_data[icode].operand[i].constraint;
2948   preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2949 			  NULL);
2950 
2951   this_target_recog->x_op_alt[icode] = op_alt;
2952   return op_alt;
2953 }
2954 
2955 /* After calling extract_insn, you can use this function to extract some
2956    information from the constraint strings into a more usable form.
2957    The collected data is stored in recog_op_alt.  */
2958 
2959 void
preprocess_constraints(rtx_insn * insn)2960 preprocess_constraints (rtx_insn *insn)
2961 {
2962   int icode = INSN_CODE (insn);
2963   if (icode >= 0)
2964     recog_op_alt = preprocess_insn_constraints (icode);
2965   else
2966     {
2967       int n_operands = recog_data.n_operands;
2968       int n_alternatives = recog_data.n_alternatives;
2969       int n_entries = n_operands * n_alternatives;
2970       memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2971       preprocess_constraints (n_operands, n_alternatives,
2972 			      recog_data.constraints, asm_op_alt,
2973 			      NULL);
2974       recog_op_alt = asm_op_alt;
2975     }
2976 }
2977 
2978 /* Check the operands of an insn against the insn's operand constraints
2979    and return 1 if they match any of the alternatives in ALTERNATIVES.
2980 
2981    The information about the insn's operands, constraints, operand modes
2982    etc. is obtained from the global variables set up by extract_insn.
2983 
2984    WHICH_ALTERNATIVE is set to a number which indicates which
2985    alternative of constraints was matched: 0 for the first alternative,
2986    1 for the next, etc.
2987 
2988    In addition, when two operands are required to match
2989    and it happens that the output operand is (reg) while the
2990    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2991    make the output operand look like the input.
2992    This is because the output operand is the one the template will print.
2993 
2994    This is used in final, just before printing the assembler code and by
2995    the routines that determine an insn's attribute.
2996 
2997    If STRICT is a positive nonzero value, it means that we have been
2998    called after reload has been completed.  In that case, we must
2999    do all checks strictly.  If it is zero, it means that we have been called
3000    before reload has completed.  In that case, we first try to see if we can
3001    find an alternative that matches strictly.  If not, we try again, this
3002    time assuming that reload will fix up the insn.  This provides a "best
3003    guess" for the alternative and is used to compute attributes of insns prior
3004    to reload.  A negative value of STRICT is used for this internal call.  */
3005 
3006 struct funny_match
3007 {
3008   int this_op, other;
3009 };
3010 
3011 int
constrain_operands(int strict,alternative_mask alternatives)3012 constrain_operands (int strict, alternative_mask alternatives)
3013 {
3014   const char *constraints[MAX_RECOG_OPERANDS];
3015   int matching_operands[MAX_RECOG_OPERANDS];
3016   int earlyclobber[MAX_RECOG_OPERANDS];
3017   int c;
3018 
3019   struct funny_match funny_match[MAX_RECOG_OPERANDS];
3020   int funny_match_index;
3021 
3022   which_alternative = 0;
3023   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3024     return 1;
3025 
3026   for (c = 0; c < recog_data.n_operands; c++)
3027     constraints[c] = recog_data.constraints[c];
3028 
3029   do
3030     {
3031       int seen_earlyclobber_at = -1;
3032       int opno;
3033       int lose = 0;
3034       funny_match_index = 0;
3035 
3036       if (!TEST_BIT (alternatives, which_alternative))
3037 	{
3038 	  int i;
3039 
3040 	  for (i = 0; i < recog_data.n_operands; i++)
3041 	    constraints[i] = skip_alternative (constraints[i]);
3042 
3043 	  which_alternative++;
3044 	  continue;
3045 	}
3046 
3047       for (opno = 0; opno < recog_data.n_operands; opno++)
3048 	matching_operands[opno] = -1;
3049 
3050       for (opno = 0; opno < recog_data.n_operands; opno++)
3051 	{
3052 	  rtx op = recog_data.operand[opno];
3053 	  machine_mode mode = GET_MODE (op);
3054 	  const char *p = constraints[opno];
3055 	  int offset = 0;
3056 	  int win = 0;
3057 	  int val;
3058 	  int len;
3059 
3060 	  earlyclobber[opno] = 0;
3061 
3062 	  /* A unary operator may be accepted by the predicate, but it
3063 	     is irrelevant for matching constraints.  */
3064 	  /* For special_memory_operand, there could be a memory operand inside,
3065 	     and it would cause a mismatch for constraint_satisfied_p.  */
3066 	  if (UNARY_P (op) && op == extract_mem_from_operand (op))
3067 	    op = XEXP (op, 0);
3068 
3069 	  if (GET_CODE (op) == SUBREG)
3070 	    {
3071 	      if (REG_P (SUBREG_REG (op))
3072 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3073 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3074 					      GET_MODE (SUBREG_REG (op)),
3075 					      SUBREG_BYTE (op),
3076 					      GET_MODE (op));
3077 	      op = SUBREG_REG (op);
3078 	    }
3079 
3080 	  /* An empty constraint or empty alternative
3081 	     allows anything which matched the pattern.  */
3082 	  if (*p == 0 || *p == ',')
3083 	    win = 1;
3084 
3085 	  do
3086 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3087 	      {
3088 	      case '\0':
3089 		len = 0;
3090 		break;
3091 	      case ',':
3092 		c = '\0';
3093 		break;
3094 
3095 	      case '#':
3096 		/* Ignore rest of this alternative as far as
3097 		   constraint checking is concerned.  */
3098 		do
3099 		  p++;
3100 		while (*p && *p != ',');
3101 		len = 0;
3102 		break;
3103 
3104 	      case '&':
3105 		earlyclobber[opno] = 1;
3106 		if (seen_earlyclobber_at < 0)
3107 		  seen_earlyclobber_at = opno;
3108 		break;
3109 
3110 	      case '0':  case '1':  case '2':  case '3':  case '4':
3111 	      case '5':  case '6':  case '7':  case '8':  case '9':
3112 		{
3113 		  /* This operand must be the same as a previous one.
3114 		     This kind of constraint is used for instructions such
3115 		     as add when they take only two operands.
3116 
3117 		     Note that the lower-numbered operand is passed first.
3118 
3119 		     If we are not testing strictly, assume that this
3120 		     constraint will be satisfied.  */
3121 
3122 		  char *end;
3123 		  int match;
3124 
3125 		  match = strtoul (p, &end, 10);
3126 		  p = end;
3127 
3128 		  if (strict < 0)
3129 		    val = 1;
3130 		  else
3131 		    {
3132 		      rtx op1 = recog_data.operand[match];
3133 		      rtx op2 = recog_data.operand[opno];
3134 
3135 		      /* A unary operator may be accepted by the predicate,
3136 			 but it is irrelevant for matching constraints.  */
3137 		      if (UNARY_P (op1))
3138 			op1 = XEXP (op1, 0);
3139 		      if (UNARY_P (op2))
3140 			op2 = XEXP (op2, 0);
3141 
3142 		      val = operands_match_p (op1, op2);
3143 		    }
3144 
3145 		  matching_operands[opno] = match;
3146 		  matching_operands[match] = opno;
3147 
3148 		  if (val != 0)
3149 		    win = 1;
3150 
3151 		  /* If output is *x and input is *--x, arrange later
3152 		     to change the output to *--x as well, since the
3153 		     output op is the one that will be printed.  */
3154 		  if (val == 2 && strict > 0)
3155 		    {
3156 		      funny_match[funny_match_index].this_op = opno;
3157 		      funny_match[funny_match_index++].other = match;
3158 		    }
3159 		}
3160 		len = 0;
3161 		break;
3162 
3163 	      case 'p':
3164 		/* p is used for address_operands.  When we are called by
3165 		   gen_reload, no one will have checked that the address is
3166 		   strictly valid, i.e., that all pseudos requiring hard regs
3167 		   have gotten them.  We also want to make sure we have a
3168 		   valid mode.  */
3169 		if ((GET_MODE (op) == VOIDmode
3170 		     || SCALAR_INT_MODE_P (GET_MODE (op)))
3171 		    && (strict <= 0
3172 			|| (strict_memory_address_p
3173 			     (recog_data.operand_mode[opno], op))))
3174 		  win = 1;
3175 		break;
3176 
3177 		/* No need to check general_operand again;
3178 		   it was done in insn-recog.c.  Well, except that reload
3179 		   doesn't check the validity of its replacements, but
3180 		   that should only matter when there's a bug.  */
3181 	      case 'g':
3182 		/* Anything goes unless it is a REG and really has a hard reg
3183 		   but the hard reg is not in the class GENERAL_REGS.  */
3184 		if (REG_P (op))
3185 		  {
3186 		    if (strict < 0
3187 			|| GENERAL_REGS == ALL_REGS
3188 			|| (reload_in_progress
3189 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3190 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3191 		      win = 1;
3192 		  }
3193 		else if (strict < 0 || general_operand (op, mode))
3194 		  win = 1;
3195 		break;
3196 
3197 	      default:
3198 		{
3199 		  enum constraint_num cn = lookup_constraint (p);
3200 		  enum reg_class cl = reg_class_for_constraint (cn);
3201 		  if (cl != NO_REGS)
3202 		    {
3203 		      if (strict < 0
3204 			  || (strict == 0
3205 			      && REG_P (op)
3206 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3207 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
3208 			  || (REG_P (op)
3209 			      && reg_fits_class_p (op, cl, offset, mode)))
3210 		        win = 1;
3211 		    }
3212 
3213 		  else if (constraint_satisfied_p (op, cn))
3214 		    win = 1;
3215 
3216 		  else if (insn_extra_memory_constraint (cn)
3217 			   /* Every memory operand can be reloaded to fit.  */
3218 			   && ((strict < 0 && MEM_P (op))
3219 			       /* Before reload, accept what reload can turn
3220 				  into a mem.  */
3221 			       || (strict < 0 && CONSTANT_P (op))
3222 			       /* Before reload, accept a pseudo or hard register,
3223 				  since LRA can turn it into a mem.  */
3224 			       || (strict < 0 && targetm.lra_p () && REG_P (op))
3225 			       /* During reload, accept a pseudo  */
3226 			       || (reload_in_progress && REG_P (op)
3227 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3228 		    win = 1;
3229 		  else if (insn_extra_address_constraint (cn)
3230 			   /* Every address operand can be reloaded to fit.  */
3231 			   && strict < 0)
3232 		    win = 1;
3233 		  /* Cater to architectures like IA-64 that define extra memory
3234 		     constraints without using define_memory_constraint.  */
3235 		  else if (reload_in_progress
3236 			   && REG_P (op)
3237 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
3238 			   && reg_renumber[REGNO (op)] < 0
3239 			   && reg_equiv_mem (REGNO (op)) != 0
3240 			   && constraint_satisfied_p
3241 			      (reg_equiv_mem (REGNO (op)), cn))
3242 		    win = 1;
3243 		  break;
3244 		}
3245 	      }
3246 	  while (p += len, c);
3247 
3248 	  constraints[opno] = p;
3249 	  /* If this operand did not win somehow,
3250 	     this alternative loses.  */
3251 	  if (! win)
3252 	    lose = 1;
3253 	}
3254       /* This alternative won; the operands are ok.
3255 	 Change whichever operands this alternative says to change.  */
3256       if (! lose)
3257 	{
3258 	  int opno, eopno;
3259 
3260 	  /* See if any earlyclobber operand conflicts with some other
3261 	     operand.  */
3262 
3263 	  if (strict > 0  && seen_earlyclobber_at >= 0)
3264 	    for (eopno = seen_earlyclobber_at;
3265 		 eopno < recog_data.n_operands;
3266 		 eopno++)
3267 	      /* Ignore earlyclobber operands now in memory,
3268 		 because we would often report failure when we have
3269 		 two memory operands, one of which was formerly a REG.  */
3270 	      if (earlyclobber[eopno]
3271 		  && REG_P (recog_data.operand[eopno]))
3272 		for (opno = 0; opno < recog_data.n_operands; opno++)
3273 		  if ((MEM_P (recog_data.operand[opno])
3274 		       || recog_data.operand_type[opno] != OP_OUT)
3275 		      && opno != eopno
3276 		      /* Ignore things like match_operator operands.  */
3277 		      && *recog_data.constraints[opno] != 0
3278 		      && ! (matching_operands[opno] == eopno
3279 			    && operands_match_p (recog_data.operand[opno],
3280 						 recog_data.operand[eopno]))
3281 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
3282 						   recog_data.operand[eopno]))
3283 		    lose = 1;
3284 
3285 	  if (! lose)
3286 	    {
3287 	      while (--funny_match_index >= 0)
3288 		{
3289 		  recog_data.operand[funny_match[funny_match_index].other]
3290 		    = recog_data.operand[funny_match[funny_match_index].this_op];
3291 		}
3292 
3293 	      /* For operands without < or > constraints reject side-effects.  */
3294 	      if (AUTO_INC_DEC && recog_data.is_asm)
3295 		{
3296 		  for (opno = 0; opno < recog_data.n_operands; opno++)
3297 		    if (MEM_P (recog_data.operand[opno]))
3298 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3299 			{
3300 			case PRE_INC:
3301 			case POST_INC:
3302 			case PRE_DEC:
3303 			case POST_DEC:
3304 			case PRE_MODIFY:
3305 			case POST_MODIFY:
3306 			  if (strchr (recog_data.constraints[opno], '<') == NULL
3307 			      && strchr (recog_data.constraints[opno], '>')
3308 				 == NULL)
3309 			    return 0;
3310 			  break;
3311 			default:
3312 			  break;
3313 			}
3314 		}
3315 
3316 	      return 1;
3317 	    }
3318 	}
3319 
3320       which_alternative++;
3321     }
3322   while (which_alternative < recog_data.n_alternatives);
3323 
3324   which_alternative = -1;
3325   /* If we are about to reject this, but we are not to test strictly,
3326      try a very loose test.  Only return failure if it fails also.  */
3327   if (strict == 0)
3328     return constrain_operands (-1, alternatives);
3329   else
3330     return 0;
3331 }
3332 
3333 /* Return true iff OPERAND (assumed to be a REG rtx)
3334    is a hard reg in class CLASS when its regno is offset by OFFSET
3335    and changed to mode MODE.
3336    If REG occupies multiple hard regs, all of them must be in CLASS.  */
3337 
3338 bool
reg_fits_class_p(const_rtx operand,reg_class_t cl,int offset,machine_mode mode)3339 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3340 		  machine_mode mode)
3341 {
3342   unsigned int regno = REGNO (operand);
3343 
3344   if (cl == NO_REGS)
3345     return false;
3346 
3347   /* Regno must not be a pseudo register.  Offset may be negative.  */
3348   return (HARD_REGISTER_NUM_P (regno)
3349 	  && HARD_REGISTER_NUM_P (regno + offset)
3350 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3351 				regno + offset));
3352 }
3353 
3354 /* Split single instruction.  Helper function for split_all_insns and
3355    split_all_insns_noflow.  Return last insn in the sequence if successful,
3356    or NULL if unsuccessful.  */
3357 
3358 static rtx_insn *
split_insn(rtx_insn * insn)3359 split_insn (rtx_insn *insn)
3360 {
3361   /* Split insns here to get max fine-grain parallelism.  */
3362   rtx_insn *first = PREV_INSN (insn);
3363   rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3364   rtx insn_set, last_set, note;
3365 
3366   if (last == insn)
3367     return NULL;
3368 
3369   /* If the original instruction was a single set that was known to be
3370      equivalent to a constant, see if we can say the same about the last
3371      instruction in the split sequence.  The two instructions must set
3372      the same destination.  */
3373   insn_set = single_set (insn);
3374   if (insn_set)
3375     {
3376       last_set = single_set (last);
3377       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3378 	{
3379 	  note = find_reg_equal_equiv_note (insn);
3380 	  if (note && CONSTANT_P (XEXP (note, 0)))
3381 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3382 	  else if (CONSTANT_P (SET_SRC (insn_set)))
3383 	    set_unique_reg_note (last, REG_EQUAL,
3384 				 copy_rtx (SET_SRC (insn_set)));
3385 	}
3386     }
3387 
3388   /* try_split returns the NOTE that INSN became.  */
3389   SET_INSN_DELETED (insn);
3390 
3391   /* ??? Coddle to md files that generate subregs in post-reload
3392      splitters instead of computing the proper hard register.  */
3393   if (reload_completed && first != last)
3394     {
3395       first = NEXT_INSN (first);
3396       for (;;)
3397 	{
3398 	  if (INSN_P (first))
3399 	    cleanup_subreg_operands (first);
3400 	  if (first == last)
3401 	    break;
3402 	  first = NEXT_INSN (first);
3403 	}
3404     }
3405 
3406   return last;
3407 }
3408 
3409 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
3410 
3411 void
split_all_insns(void)3412 split_all_insns (void)
3413 {
3414   bool changed;
3415   bool need_cfg_cleanup = false;
3416   basic_block bb;
3417 
3418   auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3419   bitmap_clear (blocks);
3420   changed = false;
3421 
3422   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3423     {
3424       rtx_insn *insn, *next;
3425       bool finish = false;
3426 
3427       rtl_profile_for_bb (bb);
3428       for (insn = BB_HEAD (bb); !finish ; insn = next)
3429 	{
3430 	  /* Can't use `next_real_insn' because that might go across
3431 	     CODE_LABELS and short-out basic blocks.  */
3432 	  next = NEXT_INSN (insn);
3433 	  finish = (insn == BB_END (bb));
3434 
3435 	  /* If INSN has a REG_EH_REGION note and we split INSN, the
3436 	     resulting split may not have/need REG_EH_REGION notes.
3437 
3438 	     If that happens and INSN was the last reference to the
3439 	     given EH region, then the EH region will become unreachable.
3440 	     We cannot leave the unreachable blocks in the CFG as that
3441 	     will trigger a checking failure.
3442 
3443 	     So track if INSN has a REG_EH_REGION note.  If so and we
3444 	     split INSN, then trigger a CFG cleanup.  */
3445 	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3446 	  if (INSN_P (insn))
3447 	    {
3448 	      rtx set = single_set (insn);
3449 
3450 	      /* Don't split no-op move insns.  These should silently
3451 		 disappear later in final.  Splitting such insns would
3452 		 break the code that handles LIBCALL blocks.  */
3453 	      if (set && set_noop_p (set))
3454 		{
3455 		  /* Nops get in the way while scheduling, so delete them
3456 		     now if register allocation has already been done.  It
3457 		     is too risky to try to do this before register
3458 		     allocation, and there are unlikely to be very many
3459 		     nops then anyways.  */
3460 		  if (reload_completed)
3461 		      delete_insn_and_edges (insn);
3462 		  if (note)
3463 		    need_cfg_cleanup = true;
3464 		}
3465 	      else
3466 		{
3467 		  if (split_insn (insn))
3468 		    {
3469 		      bitmap_set_bit (blocks, bb->index);
3470 		      changed = true;
3471 		      if (note)
3472 			need_cfg_cleanup = true;
3473 		    }
3474 		}
3475 	    }
3476 	}
3477     }
3478 
3479   default_rtl_profile ();
3480   if (changed)
3481     {
3482       find_many_sub_basic_blocks (blocks);
3483 
3484       /* Splitting could drop an REG_EH_REGION if it potentially
3485 	 trapped in its original form, but does not in its split
3486 	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
3487 	 store/load pair and -fnon-call-exceptions.  */
3488       if (need_cfg_cleanup)
3489 	cleanup_cfg (0);
3490     }
3491 
3492   checking_verify_flow_info ();
3493 }
3494 
3495 /* Same as split_all_insns, but do not expect CFG to be available.
3496    Used by machine dependent reorg passes.  */
3497 
3498 unsigned int
split_all_insns_noflow(void)3499 split_all_insns_noflow (void)
3500 {
3501   rtx_insn *next, *insn;
3502 
3503   for (insn = get_insns (); insn; insn = next)
3504     {
3505       next = NEXT_INSN (insn);
3506       if (INSN_P (insn))
3507 	{
3508 	  /* Don't split no-op move insns.  These should silently
3509 	     disappear later in final.  Splitting such insns would
3510 	     break the code that handles LIBCALL blocks.  */
3511 	  rtx set = single_set (insn);
3512 	  if (set && set_noop_p (set))
3513 	    {
3514 	      /* Nops get in the way while scheduling, so delete them
3515 		 now if register allocation has already been done.  It
3516 		 is too risky to try to do this before register
3517 		 allocation, and there are unlikely to be very many
3518 		 nops then anyways.
3519 
3520 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3521 	      if (reload_completed)
3522 		delete_insn_and_edges (insn);
3523 	    }
3524 	  else
3525 	    split_insn (insn);
3526 	}
3527     }
3528   return 0;
3529 }
3530 
3531 struct peep2_insn_data
3532 {
3533   rtx_insn *insn;
3534   regset live_before;
3535 };
3536 
3537 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3538 static int peep2_current;
3539 
3540 static bool peep2_do_rebuild_jump_labels;
3541 static bool peep2_do_cleanup_cfg;
3542 
3543 /* The number of instructions available to match a peep2.  */
3544 int peep2_current_count;
3545 
3546 /* A marker indicating the last insn of the block.  The live_before regset
3547    for this element is correct, indicating DF_LIVE_OUT for the block.  */
3548 #define PEEP2_EOB invalid_insn_rtx
3549 
3550 /* Wrap N to fit into the peep2_insn_data buffer.  */
3551 
3552 static int
peep2_buf_position(int n)3553 peep2_buf_position (int n)
3554 {
3555   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3556     n -= MAX_INSNS_PER_PEEP2 + 1;
3557   return n;
3558 }
3559 
3560 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3561    does not exist.  Used by the recognizer to find the next insn to match
3562    in a multi-insn pattern.  */
3563 
3564 rtx_insn *
peep2_next_insn(int n)3565 peep2_next_insn (int n)
3566 {
3567   gcc_assert (n <= peep2_current_count);
3568 
3569   n = peep2_buf_position (peep2_current + n);
3570 
3571   return peep2_insn_data[n].insn;
3572 }
3573 
3574 /* Return true if REGNO is dead before the Nth non-note insn
3575    after `current'.  */
3576 
3577 int
peep2_regno_dead_p(int ofs,int regno)3578 peep2_regno_dead_p (int ofs, int regno)
3579 {
3580   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3581 
3582   ofs = peep2_buf_position (peep2_current + ofs);
3583 
3584   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3585 
3586   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3587 }
3588 
3589 /* Similarly for a REG.  */
3590 
3591 int
peep2_reg_dead_p(int ofs,rtx reg)3592 peep2_reg_dead_p (int ofs, rtx reg)
3593 {
3594   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3595 
3596   ofs = peep2_buf_position (peep2_current + ofs);
3597 
3598   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3599 
3600   unsigned int end_regno = END_REGNO (reg);
3601   for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3602     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3603       return 0;
3604   return 1;
3605 }
3606 
3607 /* Regno offset to be used in the register search.  */
3608 static int search_ofs;
3609 
3610 /* Try to find a hard register of mode MODE, matching the register class in
3611    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3612    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3613    in which case the only condition is that the register must be available
3614    before CURRENT_INSN.
3615    Registers that already have bits set in REG_SET will not be considered.
3616 
3617    If an appropriate register is available, it will be returned and the
3618    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3619    returned.  */
3620 
3621 rtx
peep2_find_free_register(int from,int to,const char * class_str,machine_mode mode,HARD_REG_SET * reg_set)3622 peep2_find_free_register (int from, int to, const char *class_str,
3623 			  machine_mode mode, HARD_REG_SET *reg_set)
3624 {
3625   enum reg_class cl;
3626   HARD_REG_SET live;
3627   df_ref def;
3628   int i;
3629 
3630   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3631   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3632 
3633   from = peep2_buf_position (peep2_current + from);
3634   to = peep2_buf_position (peep2_current + to);
3635 
3636   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3637   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3638 
3639   while (from != to)
3640     {
3641       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3642 
3643       /* Don't use registers set or clobbered by the insn.  */
3644       FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3645 	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3646 
3647       from = peep2_buf_position (from + 1);
3648     }
3649 
3650   cl = reg_class_for_constraint (lookup_constraint (class_str));
3651 
3652   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653     {
3654       int raw_regno, regno, success, j;
3655 
3656       /* Distribute the free registers as much as possible.  */
3657       raw_regno = search_ofs + i;
3658       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3659 	raw_regno -= FIRST_PSEUDO_REGISTER;
3660 #ifdef REG_ALLOC_ORDER
3661       regno = reg_alloc_order[raw_regno];
3662 #else
3663       regno = raw_regno;
3664 #endif
3665 
3666       /* Can it support the mode we need?  */
3667       if (!targetm.hard_regno_mode_ok (regno, mode))
3668 	continue;
3669 
3670       success = 1;
3671       for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3672 	{
3673 	  /* Don't allocate fixed registers.  */
3674 	  if (fixed_regs[regno + j])
3675 	    {
3676 	      success = 0;
3677 	      break;
3678 	    }
3679 	  /* Don't allocate global registers.  */
3680 	  if (global_regs[regno + j])
3681 	    {
3682 	      success = 0;
3683 	      break;
3684 	    }
3685 	  /* Make sure the register is of the right class.  */
3686 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3687 	    {
3688 	      success = 0;
3689 	      break;
3690 	    }
3691 	  /* And that we don't create an extra save/restore.  */
3692 	  if (! crtl->abi->clobbers_full_reg_p (regno + j)
3693 	      && ! df_regs_ever_live_p (regno + j))
3694 	    {
3695 	      success = 0;
3696 	      break;
3697 	    }
3698 
3699 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3700 	    {
3701 	      success = 0;
3702 	      break;
3703 	    }
3704 
3705 	  /* And we don't clobber traceback for noreturn functions.  */
3706 	  if ((regno + j == FRAME_POINTER_REGNUM
3707 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3708 	      && (! reload_completed || frame_pointer_needed))
3709 	    {
3710 	      success = 0;
3711 	      break;
3712 	    }
3713 
3714 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3715 	      || TEST_HARD_REG_BIT (live, regno + j))
3716 	    {
3717 	      success = 0;
3718 	      break;
3719 	    }
3720 	}
3721 
3722       if (success)
3723 	{
3724 	  add_to_hard_reg_set (reg_set, mode, regno);
3725 
3726 	  /* Start the next search with the next register.  */
3727 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3728 	    raw_regno = 0;
3729 	  search_ofs = raw_regno;
3730 
3731 	  return gen_rtx_REG (mode, regno);
3732 	}
3733     }
3734 
3735   search_ofs = 0;
3736   return NULL_RTX;
3737 }
3738 
3739 /* Forget all currently tracked instructions, only remember current
3740    LIVE regset.  */
3741 
3742 static void
peep2_reinit_state(regset live)3743 peep2_reinit_state (regset live)
3744 {
3745   int i;
3746 
3747   /* Indicate that all slots except the last holds invalid data.  */
3748   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3749     peep2_insn_data[i].insn = NULL;
3750   peep2_current_count = 0;
3751 
3752   /* Indicate that the last slot contains live_after data.  */
3753   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3754   peep2_current = MAX_INSNS_PER_PEEP2;
3755 
3756   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3757 }
3758 
3759 /* Copies frame related info of an insn (OLD_INSN) to the single
3760    insn (NEW_INSN) that was obtained by splitting OLD_INSN.  */
3761 
3762 void
copy_frame_info_to_split_insn(rtx_insn * old_insn,rtx_insn * new_insn)3763 copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3764 {
3765   bool any_note = false;
3766   rtx note;
3767 
3768   if (!RTX_FRAME_RELATED_P (old_insn))
3769     return;
3770 
3771   RTX_FRAME_RELATED_P (new_insn) = 1;
3772 
3773   /* Allow the backend to fill in a note during the split.  */
3774   for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3775     switch (REG_NOTE_KIND (note))
3776       {
3777       case REG_FRAME_RELATED_EXPR:
3778       case REG_CFA_DEF_CFA:
3779       case REG_CFA_ADJUST_CFA:
3780       case REG_CFA_OFFSET:
3781       case REG_CFA_REGISTER:
3782       case REG_CFA_EXPRESSION:
3783       case REG_CFA_RESTORE:
3784       case REG_CFA_SET_VDRAP:
3785         any_note = true;
3786         break;
3787       default:
3788         break;
3789       }
3790 
3791   /* If the backend didn't supply a note, copy one over.  */
3792   if (!any_note)
3793     for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3794       switch (REG_NOTE_KIND (note))
3795         {
3796         case REG_FRAME_RELATED_EXPR:
3797         case REG_CFA_DEF_CFA:
3798         case REG_CFA_ADJUST_CFA:
3799         case REG_CFA_OFFSET:
3800         case REG_CFA_REGISTER:
3801         case REG_CFA_EXPRESSION:
3802         case REG_CFA_RESTORE:
3803         case REG_CFA_SET_VDRAP:
3804           add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3805           any_note = true;
3806           break;
3807         default:
3808           break;
3809         }
3810 
3811   /* If there still isn't a note, make sure the unwind info sees the
3812      same expression as before the split.  */
3813   if (!any_note)
3814     {
3815       rtx old_set, new_set;
3816 
3817       /* The old insn had better have been simple, or annotated.  */
3818       old_set = single_set (old_insn);
3819       gcc_assert (old_set != NULL);
3820 
3821       new_set = single_set (new_insn);
3822       if (!new_set || !rtx_equal_p (new_set, old_set))
3823         add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3824     }
3825 
3826   /* Copy prologue/epilogue status.  This is required in order to keep
3827      proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3828   maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3829 }
3830 
3831 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3832    starting at INSN.  Perform the replacement, removing the old insns and
3833    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3834    if the replacement is rejected.  */
3835 
3836 static rtx_insn *
peep2_attempt(basic_block bb,rtx_insn * insn,int match_len,rtx_insn * attempt)3837 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3838 {
3839   int i;
3840   rtx_insn *last, *before_try, *x;
3841   rtx eh_note, as_note;
3842   rtx_insn *old_insn;
3843   rtx_insn *new_insn;
3844   bool was_call = false;
3845 
3846   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3847      match more than one insn, or to be split into more than one insn.  */
3848   old_insn = peep2_insn_data[peep2_current].insn;
3849   if (RTX_FRAME_RELATED_P (old_insn))
3850     {
3851       if (match_len != 0)
3852 	return NULL;
3853 
3854       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3855 	 may be in the stream for the purpose of register allocation.  */
3856       if (active_insn_p (attempt))
3857 	new_insn = attempt;
3858       else
3859 	new_insn = next_active_insn (attempt);
3860       if (next_active_insn (new_insn))
3861 	return NULL;
3862 
3863       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3864       copy_frame_info_to_split_insn (old_insn, new_insn);
3865     }
3866 
3867   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3868      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3869      cfg-related call notes.  */
3870   for (i = 0; i <= match_len; ++i)
3871     {
3872       int j;
3873       rtx note;
3874 
3875       j = peep2_buf_position (peep2_current + i);
3876       old_insn = peep2_insn_data[j].insn;
3877       if (!CALL_P (old_insn))
3878 	continue;
3879       was_call = true;
3880 
3881       new_insn = attempt;
3882       while (new_insn != NULL_RTX)
3883 	{
3884 	  if (CALL_P (new_insn))
3885 	    break;
3886 	  new_insn = NEXT_INSN (new_insn);
3887 	}
3888 
3889       gcc_assert (new_insn != NULL_RTX);
3890 
3891       CALL_INSN_FUNCTION_USAGE (new_insn)
3892 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3893       SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3894 
3895       for (note = REG_NOTES (old_insn);
3896 	   note;
3897 	   note = XEXP (note, 1))
3898 	switch (REG_NOTE_KIND (note))
3899 	  {
3900 	  case REG_NORETURN:
3901 	  case REG_SETJMP:
3902 	  case REG_TM:
3903 	  case REG_CALL_NOCF_CHECK:
3904 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3905 			  XEXP (note, 0));
3906 	    break;
3907 	  default:
3908 	    /* Discard all other reg notes.  */
3909 	    break;
3910 	  }
3911 
3912       /* Croak if there is another call in the sequence.  */
3913       while (++i <= match_len)
3914 	{
3915 	  j = peep2_buf_position (peep2_current + i);
3916 	  old_insn = peep2_insn_data[j].insn;
3917 	  gcc_assert (!CALL_P (old_insn));
3918 	}
3919       break;
3920     }
3921 
3922   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3923      move those notes over to the new sequence.  */
3924   as_note = NULL;
3925   for (i = match_len; i >= 0; --i)
3926     {
3927       int j = peep2_buf_position (peep2_current + i);
3928       old_insn = peep2_insn_data[j].insn;
3929 
3930       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3931       if (as_note)
3932 	break;
3933     }
3934 
3935   i = peep2_buf_position (peep2_current + match_len);
3936   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3937 
3938   /* Replace the old sequence with the new.  */
3939   rtx_insn *peepinsn = peep2_insn_data[i].insn;
3940   last = emit_insn_after_setloc (attempt,
3941 				 peep2_insn_data[i].insn,
3942 				 INSN_LOCATION (peepinsn));
3943   if (JUMP_P (peepinsn) && JUMP_P (last))
3944     CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3945   before_try = PREV_INSN (insn);
3946   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3947 
3948   /* Re-insert the EH_REGION notes.  */
3949   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3950     {
3951       edge eh_edge;
3952       edge_iterator ei;
3953 
3954       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3955 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3956 	  break;
3957 
3958       if (eh_note)
3959 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3960 
3961       if (eh_edge)
3962 	for (x = last; x != before_try; x = PREV_INSN (x))
3963 	  if (x != BB_END (bb)
3964 	      && (can_throw_internal (x)
3965 		  || can_nonlocal_goto (x)))
3966 	    {
3967 	      edge nfte, nehe;
3968 	      int flags;
3969 
3970 	      nfte = split_block (bb, x);
3971 	      flags = (eh_edge->flags
3972 		       & (EDGE_EH | EDGE_ABNORMAL));
3973 	      if (CALL_P (x))
3974 		flags |= EDGE_ABNORMAL_CALL;
3975 	      nehe = make_edge (nfte->src, eh_edge->dest,
3976 				flags);
3977 
3978 	      nehe->probability = eh_edge->probability;
3979 	      nfte->probability = nehe->probability.invert ();
3980 
3981 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3982 	      bb = nfte->src;
3983 	      eh_edge = nehe;
3984 	    }
3985 
3986       /* Converting possibly trapping insn to non-trapping is
3987 	 possible.  Zap dummy outgoing edges.  */
3988       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3989     }
3990 
3991   /* Re-insert the ARGS_SIZE notes.  */
3992   if (as_note)
3993     fixup_args_size_notes (before_try, last, get_args_size (as_note));
3994 
3995   /* Scan the new insns for embedded side effects and add appropriate
3996      REG_INC notes.  */
3997   if (AUTO_INC_DEC)
3998     for (x = last; x != before_try; x = PREV_INSN (x))
3999       if (NONDEBUG_INSN_P (x))
4000 	add_auto_inc_notes (x, PATTERN (x));
4001 
4002   /* If we generated a jump instruction, it won't have
4003      JUMP_LABEL set.  Recompute after we're done.  */
4004   for (x = last; x != before_try; x = PREV_INSN (x))
4005     if (JUMP_P (x))
4006       {
4007 	peep2_do_rebuild_jump_labels = true;
4008 	break;
4009       }
4010 
4011   return last;
4012 }
4013 
4014 /* After performing a replacement in basic block BB, fix up the life
4015    information in our buffer.  LAST is the last of the insns that we
4016    emitted as a replacement.  PREV is the insn before the start of
4017    the replacement.  MATCH_LEN is the number of instructions that were
4018    matched, and which now need to be replaced in the buffer.  */
4019 
4020 static void
peep2_update_life(basic_block bb,int match_len,rtx_insn * last,rtx_insn * prev)4021 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4022 		   rtx_insn *prev)
4023 {
4024   int i = peep2_buf_position (peep2_current + match_len + 1);
4025   rtx_insn *x;
4026   regset_head live;
4027 
4028   INIT_REG_SET (&live);
4029   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4030 
4031   gcc_assert (peep2_current_count >= match_len + 1);
4032   peep2_current_count -= match_len + 1;
4033 
4034   x = last;
4035   do
4036     {
4037       if (INSN_P (x))
4038 	{
4039 	  df_insn_rescan (x);
4040 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4041 	    {
4042 	      peep2_current_count++;
4043 	      if (--i < 0)
4044 		i = MAX_INSNS_PER_PEEP2;
4045 	      peep2_insn_data[i].insn = x;
4046 	      df_simulate_one_insn_backwards (bb, x, &live);
4047 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4048 	    }
4049 	}
4050       x = PREV_INSN (x);
4051     }
4052   while (x != prev);
4053   CLEAR_REG_SET (&live);
4054 
4055   peep2_current = i;
4056 }
4057 
4058 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4059    Return true if we added it, false otherwise.  The caller will try to match
4060    peepholes against the buffer if we return false; otherwise it will try to
4061    add more instructions to the buffer.  */
4062 
4063 static bool
peep2_fill_buffer(basic_block bb,rtx_insn * insn,regset live)4064 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4065 {
4066   int pos;
4067 
4068   /* Once we have filled the maximum number of insns the buffer can hold,
4069      allow the caller to match the insns against peepholes.  We wait until
4070      the buffer is full in case the target has similar peepholes of different
4071      length; we always want to match the longest if possible.  */
4072   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4073     return false;
4074 
4075   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4076      any other pattern, lest it change the semantics of the frame info.  */
4077   if (RTX_FRAME_RELATED_P (insn))
4078     {
4079       /* Let the buffer drain first.  */
4080       if (peep2_current_count > 0)
4081 	return false;
4082       /* Now the insn will be the only thing in the buffer.  */
4083     }
4084 
4085   pos = peep2_buf_position (peep2_current + peep2_current_count);
4086   peep2_insn_data[pos].insn = insn;
4087   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4088   peep2_current_count++;
4089 
4090   df_simulate_one_insn_forwards (bb, insn, live);
4091   return true;
4092 }
4093 
4094 /* Perform the peephole2 optimization pass.  */
4095 
4096 static void
peephole2_optimize(void)4097 peephole2_optimize (void)
4098 {
4099   rtx_insn *insn;
4100   bitmap live;
4101   int i;
4102   basic_block bb;
4103 
4104   peep2_do_cleanup_cfg = false;
4105   peep2_do_rebuild_jump_labels = false;
4106 
4107   df_set_flags (DF_LR_RUN_DCE);
4108   df_note_add_problem ();
4109   df_analyze ();
4110 
4111   /* Initialize the regsets we're going to use.  */
4112   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4113     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
4114   search_ofs = 0;
4115   live = BITMAP_ALLOC (&reg_obstack);
4116 
4117   FOR_EACH_BB_REVERSE_FN (bb, cfun)
4118     {
4119       bool past_end = false;
4120       int pos;
4121 
4122       rtl_profile_for_bb (bb);
4123 
4124       /* Start up propagation.  */
4125       bitmap_copy (live, DF_LR_IN (bb));
4126       df_simulate_initialize_forwards (bb, live);
4127       peep2_reinit_state (live);
4128 
4129       insn = BB_HEAD (bb);
4130       for (;;)
4131 	{
4132 	  rtx_insn *attempt, *head;
4133 	  int match_len;
4134 
4135 	  if (!past_end && !NONDEBUG_INSN_P (insn))
4136 	    {
4137 	    next_insn:
4138 	      insn = NEXT_INSN (insn);
4139 	      if (insn == NEXT_INSN (BB_END (bb)))
4140 		past_end = true;
4141 	      continue;
4142 	    }
4143 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
4144 	    goto next_insn;
4145 
4146 	  /* If we did not fill an empty buffer, it signals the end of the
4147 	     block.  */
4148 	  if (peep2_current_count == 0)
4149 	    break;
4150 
4151 	  /* The buffer filled to the current maximum, so try to match.  */
4152 
4153 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
4154 	  peep2_insn_data[pos].insn = PEEP2_EOB;
4155 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4156 
4157 	  /* Match the peephole.  */
4158 	  head = peep2_insn_data[peep2_current].insn;
4159 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
4160 	  if (attempt != NULL)
4161 	    {
4162 	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4163 	      if (last)
4164 		{
4165 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4166 		  continue;
4167 		}
4168 	    }
4169 
4170 	  /* No match: advance the buffer by one insn.  */
4171 	  peep2_current = peep2_buf_position (peep2_current + 1);
4172 	  peep2_current_count--;
4173 	}
4174     }
4175 
4176   default_rtl_profile ();
4177   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4178     BITMAP_FREE (peep2_insn_data[i].live_before);
4179   BITMAP_FREE (live);
4180   if (peep2_do_rebuild_jump_labels)
4181     rebuild_jump_labels (get_insns ());
4182   if (peep2_do_cleanup_cfg)
4183     cleanup_cfg (CLEANUP_CFG_CHANGED);
4184 }
4185 
4186 /* Common predicates for use with define_bypass.  */
4187 
4188 /* Helper function for store_data_bypass_p, handle just a single SET
4189    IN_SET.  */
4190 
4191 static bool
store_data_bypass_p_1(rtx_insn * out_insn,rtx in_set)4192 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4193 {
4194   if (!MEM_P (SET_DEST (in_set)))
4195     return false;
4196 
4197   rtx out_set = single_set (out_insn);
4198   if (out_set)
4199     return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4200 
4201   rtx out_pat = PATTERN (out_insn);
4202   if (GET_CODE (out_pat) != PARALLEL)
4203     return false;
4204 
4205   for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4206     {
4207       rtx out_exp = XVECEXP (out_pat, 0, i);
4208 
4209       if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4210 	continue;
4211 
4212       gcc_assert (GET_CODE (out_exp) == SET);
4213 
4214       if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4215 	return false;
4216     }
4217 
4218   return true;
4219 }
4220 
4221 /* True if the dependency between OUT_INSN and IN_INSN is on the store
4222    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
4223    must be either a single_set or a PARALLEL with SETs inside.  */
4224 
4225 int
store_data_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)4226 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4227 {
4228   rtx in_set = single_set (in_insn);
4229   if (in_set)
4230     return store_data_bypass_p_1 (out_insn, in_set);
4231 
4232   rtx in_pat = PATTERN (in_insn);
4233   if (GET_CODE (in_pat) != PARALLEL)
4234     return false;
4235 
4236   for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4237     {
4238       rtx in_exp = XVECEXP (in_pat, 0, i);
4239 
4240       if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4241 	continue;
4242 
4243       gcc_assert (GET_CODE (in_exp) == SET);
4244 
4245       if (!store_data_bypass_p_1 (out_insn, in_exp))
4246 	return false;
4247     }
4248 
4249   return true;
4250 }
4251 
4252 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4253    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
4254    or multiple set; IN_INSN should be single_set for truth, but for convenience
4255    of insn categorization may be any JUMP or CALL insn.  */
4256 
4257 int
if_test_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)4258 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4259 {
4260   rtx out_set, in_set;
4261 
4262   in_set = single_set (in_insn);
4263   if (! in_set)
4264     {
4265       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4266       return false;
4267     }
4268 
4269   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4270     return false;
4271   in_set = SET_SRC (in_set);
4272 
4273   out_set = single_set (out_insn);
4274   if (out_set)
4275     {
4276       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4277 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4278 	return false;
4279     }
4280   else
4281     {
4282       rtx out_pat;
4283       int i;
4284 
4285       out_pat = PATTERN (out_insn);
4286       gcc_assert (GET_CODE (out_pat) == PARALLEL);
4287 
4288       for (i = 0; i < XVECLEN (out_pat, 0); i++)
4289 	{
4290 	  rtx exp = XVECEXP (out_pat, 0, i);
4291 
4292 	  if (GET_CODE (exp) == CLOBBER)
4293 	    continue;
4294 
4295 	  gcc_assert (GET_CODE (exp) == SET);
4296 
4297 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4298 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4299 	    return false;
4300 	}
4301     }
4302 
4303   return true;
4304 }
4305 
4306 static unsigned int
rest_of_handle_peephole2(void)4307 rest_of_handle_peephole2 (void)
4308 {
4309   if (HAVE_peephole2)
4310     peephole2_optimize ();
4311 
4312   return 0;
4313 }
4314 
4315 namespace {
4316 
4317 const pass_data pass_data_peephole2 =
4318 {
4319   RTL_PASS, /* type */
4320   "peephole2", /* name */
4321   OPTGROUP_NONE, /* optinfo_flags */
4322   TV_PEEPHOLE2, /* tv_id */
4323   0, /* properties_required */
4324   0, /* properties_provided */
4325   0, /* properties_destroyed */
4326   0, /* todo_flags_start */
4327   TODO_df_finish, /* todo_flags_finish */
4328 };
4329 
4330 class pass_peephole2 : public rtl_opt_pass
4331 {
4332 public:
pass_peephole2(gcc::context * ctxt)4333   pass_peephole2 (gcc::context *ctxt)
4334     : rtl_opt_pass (pass_data_peephole2, ctxt)
4335   {}
4336 
4337   /* opt_pass methods: */
4338   /* The epiphany backend creates a second instance of this pass, so we need
4339      a clone method.  */
clone()4340   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
gate(function *)4341   virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
execute(function *)4342   virtual unsigned int execute (function *)
4343     {
4344       return rest_of_handle_peephole2 ();
4345     }
4346 
4347 }; // class pass_peephole2
4348 
4349 } // anon namespace
4350 
4351 rtl_opt_pass *
make_pass_peephole2(gcc::context * ctxt)4352 make_pass_peephole2 (gcc::context *ctxt)
4353 {
4354   return new pass_peephole2 (ctxt);
4355 }
4356 
4357 namespace {
4358 
4359 const pass_data pass_data_split_all_insns =
4360 {
4361   RTL_PASS, /* type */
4362   "split1", /* name */
4363   OPTGROUP_NONE, /* optinfo_flags */
4364   TV_NONE, /* tv_id */
4365   0, /* properties_required */
4366   PROP_rtl_split_insns, /* properties_provided */
4367   0, /* properties_destroyed */
4368   0, /* todo_flags_start */
4369   0, /* todo_flags_finish */
4370 };
4371 
4372 class pass_split_all_insns : public rtl_opt_pass
4373 {
4374 public:
pass_split_all_insns(gcc::context * ctxt)4375   pass_split_all_insns (gcc::context *ctxt)
4376     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4377   {}
4378 
4379   /* opt_pass methods: */
4380   /* The epiphany backend creates a second instance of this pass, so
4381      we need a clone method.  */
clone()4382   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
execute(function *)4383   virtual unsigned int execute (function *)
4384     {
4385       split_all_insns ();
4386       return 0;
4387     }
4388 
4389 }; // class pass_split_all_insns
4390 
4391 } // anon namespace
4392 
4393 rtl_opt_pass *
make_pass_split_all_insns(gcc::context * ctxt)4394 make_pass_split_all_insns (gcc::context *ctxt)
4395 {
4396   return new pass_split_all_insns (ctxt);
4397 }
4398 
4399 namespace {
4400 
4401 const pass_data pass_data_split_after_reload =
4402 {
4403   RTL_PASS, /* type */
4404   "split2", /* name */
4405   OPTGROUP_NONE, /* optinfo_flags */
4406   TV_NONE, /* tv_id */
4407   0, /* properties_required */
4408   0, /* properties_provided */
4409   0, /* properties_destroyed */
4410   0, /* todo_flags_start */
4411   0, /* todo_flags_finish */
4412 };
4413 
4414 class pass_split_after_reload : public rtl_opt_pass
4415 {
4416 public:
pass_split_after_reload(gcc::context * ctxt)4417   pass_split_after_reload (gcc::context *ctxt)
4418     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4419   {}
4420 
4421   /* opt_pass methods: */
gate(function *)4422   virtual bool gate (function *)
4423     {
4424       /* If optimizing, then go ahead and split insns now.  */
4425       return optimize > 0;
4426     }
4427 
execute(function *)4428   virtual unsigned int execute (function *)
4429     {
4430       split_all_insns ();
4431       return 0;
4432     }
4433 
4434 }; // class pass_split_after_reload
4435 
4436 } // anon namespace
4437 
4438 rtl_opt_pass *
make_pass_split_after_reload(gcc::context * ctxt)4439 make_pass_split_after_reload (gcc::context *ctxt)
4440 {
4441   return new pass_split_after_reload (ctxt);
4442 }
4443 
4444 static bool
enable_split_before_sched2(void)4445 enable_split_before_sched2 (void)
4446 {
4447 #ifdef INSN_SCHEDULING
4448   return optimize > 0 && flag_schedule_insns_after_reload;
4449 #else
4450   return false;
4451 #endif
4452 }
4453 
4454 namespace {
4455 
4456 const pass_data pass_data_split_before_sched2 =
4457 {
4458   RTL_PASS, /* type */
4459   "split3", /* name */
4460   OPTGROUP_NONE, /* optinfo_flags */
4461   TV_NONE, /* tv_id */
4462   0, /* properties_required */
4463   0, /* properties_provided */
4464   0, /* properties_destroyed */
4465   0, /* todo_flags_start */
4466   0, /* todo_flags_finish */
4467 };
4468 
4469 class pass_split_before_sched2 : public rtl_opt_pass
4470 {
4471 public:
pass_split_before_sched2(gcc::context * ctxt)4472   pass_split_before_sched2 (gcc::context *ctxt)
4473     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4474   {}
4475 
4476   /* opt_pass methods: */
gate(function *)4477   virtual bool gate (function *)
4478     {
4479       return enable_split_before_sched2 ();
4480     }
4481 
execute(function *)4482   virtual unsigned int execute (function *)
4483     {
4484       split_all_insns ();
4485       return 0;
4486     }
4487 
4488 }; // class pass_split_before_sched2
4489 
4490 } // anon namespace
4491 
4492 rtl_opt_pass *
make_pass_split_before_sched2(gcc::context * ctxt)4493 make_pass_split_before_sched2 (gcc::context *ctxt)
4494 {
4495   return new pass_split_before_sched2 (ctxt);
4496 }
4497 
4498 namespace {
4499 
4500 const pass_data pass_data_split_before_regstack =
4501 {
4502   RTL_PASS, /* type */
4503   "split4", /* name */
4504   OPTGROUP_NONE, /* optinfo_flags */
4505   TV_NONE, /* tv_id */
4506   0, /* properties_required */
4507   0, /* properties_provided */
4508   0, /* properties_destroyed */
4509   0, /* todo_flags_start */
4510   0, /* todo_flags_finish */
4511 };
4512 
4513 class pass_split_before_regstack : public rtl_opt_pass
4514 {
4515 public:
pass_split_before_regstack(gcc::context * ctxt)4516   pass_split_before_regstack (gcc::context *ctxt)
4517     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4518   {}
4519 
4520   /* opt_pass methods: */
4521   virtual bool gate (function *);
execute(function *)4522   virtual unsigned int execute (function *)
4523     {
4524       split_all_insns ();
4525       return 0;
4526     }
4527 
4528 }; // class pass_split_before_regstack
4529 
4530 bool
gate(function *)4531 pass_split_before_regstack::gate (function *)
4532 {
4533 #if HAVE_ATTR_length && defined (STACK_REGS)
4534   /* If flow2 creates new instructions which need splitting
4535      and scheduling after reload is not done, they might not be
4536      split until final which doesn't allow splitting
4537      if HAVE_ATTR_length.  Selective scheduling can result in
4538      further instructions that need splitting.  */
4539 #ifdef INSN_SCHEDULING
4540   return !enable_split_before_sched2 () || flag_selective_scheduling2;
4541 #else
4542   return !enable_split_before_sched2 ();
4543 #endif
4544 #else
4545   return false;
4546 #endif
4547 }
4548 
4549 } // anon namespace
4550 
4551 rtl_opt_pass *
make_pass_split_before_regstack(gcc::context * ctxt)4552 make_pass_split_before_regstack (gcc::context *ctxt)
4553 {
4554   return new pass_split_before_regstack (ctxt);
4555 }
4556 
4557 namespace {
4558 
4559 const pass_data pass_data_split_for_shorten_branches =
4560 {
4561   RTL_PASS, /* type */
4562   "split5", /* name */
4563   OPTGROUP_NONE, /* optinfo_flags */
4564   TV_NONE, /* tv_id */
4565   0, /* properties_required */
4566   0, /* properties_provided */
4567   0, /* properties_destroyed */
4568   0, /* todo_flags_start */
4569   0, /* todo_flags_finish */
4570 };
4571 
4572 class pass_split_for_shorten_branches : public rtl_opt_pass
4573 {
4574 public:
pass_split_for_shorten_branches(gcc::context * ctxt)4575   pass_split_for_shorten_branches (gcc::context *ctxt)
4576     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4577   {}
4578 
4579   /* opt_pass methods: */
gate(function *)4580   virtual bool gate (function *)
4581     {
4582       /* The placement of the splitting that we do for shorten_branches
4583 	 depends on whether regstack is used by the target or not.  */
4584 #if HAVE_ATTR_length && !defined (STACK_REGS)
4585       return true;
4586 #else
4587       return false;
4588 #endif
4589     }
4590 
execute(function *)4591   virtual unsigned int execute (function *)
4592     {
4593       return split_all_insns_noflow ();
4594     }
4595 
4596 }; // class pass_split_for_shorten_branches
4597 
4598 } // anon namespace
4599 
4600 rtl_opt_pass *
make_pass_split_for_shorten_branches(gcc::context * ctxt)4601 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4602 {
4603   return new pass_split_for_shorten_branches (ctxt);
4604 }
4605 
4606 /* (Re)initialize the target information after a change in target.  */
4607 
4608 void
recog_init()4609 recog_init ()
4610 {
4611   /* The information is zero-initialized, so we don't need to do anything
4612      first time round.  */
4613   if (!this_target_recog->x_initialized)
4614     {
4615       this_target_recog->x_initialized = true;
4616       return;
4617     }
4618   memset (this_target_recog->x_bool_attr_masks, 0,
4619 	  sizeof (this_target_recog->x_bool_attr_masks));
4620   for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4621     if (this_target_recog->x_op_alt[i])
4622       {
4623 	free (this_target_recog->x_op_alt[i]);
4624 	this_target_recog->x_op_alt[i] = 0;
4625       }
4626 }
4627