1 /* Post-reload compare elimination.
2    Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* There is a set of targets whose general-purpose move or addition
21    instructions clobber the flags.  These targets cannot split their
22    CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23    itself insert these instructions in between the flags setter and user.
24    Because these targets cannot split the compare from the use, they
25    cannot make use of the comparison elimination offered by the combine pass.
26 
27    This is a small pass intended to provide comparison elimination similar to
28    what is available via NOTICE_UPDATE_CC for cc0 targets.  This should help
29    encourage cc0 targets to convert to an explicit post-reload representation
30    of the flags.
31 
32    This pass assumes:
33 
34    (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
35 
36    (1) All comparison patterns are represented as
37 
38 	[(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
39 
40    (2) All insn patterns that modify the flags are represented as
41 
42 	[(set (reg) (operation)
43 	 (clobber (reg:CC))]
44 
45    (3) If an insn of form (2) can usefully set the flags, there is
46        another pattern of the form
47 
48 	[(set (reg:CCM) (compare:CCM (operation) (immediate)))
49 	 (set (reg) (operation)]
50 
51        The mode CCM will be chosen as if by SELECT_CC_MODE.
52 
53    Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
54    This could be handled as a future enhancement.
55 */
56 
57 #include "config.h"
58 #include "system.h"
59 #include "coretypes.h"
60 #include "backend.h"
61 #include "target.h"
62 #include "rtl.h"
63 #include "df.h"
64 #include "memmodel.h"
65 #include "tm_p.h"
66 #include "insn-config.h"
67 #include "recog.h"
68 #include "emit-rtl.h"
69 #include "cfgrtl.h"
70 #include "tree-pass.h"
71 #include "domwalk.h"
72 
73 
74 /* These structures describe a comparison and how it is used.  */
75 
76 /* The choice of maximum 3 uses comes from wanting to eliminate the two
77    duplicate compares from a three-way branch on the sign of a value.
78    This is also sufficient to eliminate the duplicate compare against the
79    high-part of a double-word comparison.  */
80 #define MAX_CMP_USE 3
81 
82 struct comparison_use
83 {
84   /* The instruction in which the result of the compare is used.  */
85   rtx_insn *insn;
86   /* The location of the flags register within the use.  */
87   rtx *loc;
88   /* The comparison code applied against the flags register.  */
89   enum rtx_code code;
90 };
91 
92 struct comparison
93 {
94   /* The comparison instruction.  */
95   rtx_insn *insn;
96 
97   /* The insn prior to the comparison insn that clobbers the flags.  */
98   rtx_insn *prev_clobber;
99 
100   /* The insn prior to the comparison insn that sets in_a REG.  */
101   rtx_insn *in_a_setter;
102 
103   /* The two values being compared.  These will be either REGs or
104      constants.  */
105   rtx in_a, in_b;
106 
107   /* The REG_EH_REGION of the comparison.  */
108   rtx eh_note;
109 
110   /* Information about how this comparison is used.  */
111   struct comparison_use uses[MAX_CMP_USE];
112 
113   /* The original CC_MODE for this comparison.  */
114   machine_mode orig_mode;
115 
116   /* The number of uses identified for this comparison.  */
117   unsigned short n_uses;
118 
119   /* True if not all uses of this comparison have been identified.
120      This can happen either for overflowing the array above, or if
121      the flags register is used in some unusual context.  */
122   bool missing_uses;
123 
124   /* True if its inputs are still valid at the end of the block.  */
125   bool inputs_valid;
126 
127   /* Whether IN_A is wrapped in a NOT before being compared.  */
128   bool not_in_a;
129 };
130 
131 static vec<comparison *> all_compares;
132 
133 /* Return whether X is a NOT unary expression.  */
134 
135 static bool
is_not(rtx x)136 is_not (rtx x)
137 {
138   return GET_CODE (x) == NOT;
139 }
140 
141 /* Strip a NOT unary expression around X, if any.  */
142 
143 static rtx
strip_not(rtx x)144 strip_not (rtx x)
145 {
146   if (is_not (x))
147     return XEXP (x, 0);
148 
149   return x;
150 }
151 
152 /* Look for a "conforming" comparison, as defined above.  If valid, return
153    the rtx for the COMPARE itself.  */
154 
155 static rtx
conforming_compare(rtx_insn * insn)156 conforming_compare (rtx_insn *insn)
157 {
158   rtx set, src, dest;
159 
160   set = single_set (insn);
161   if (set == NULL)
162     return NULL;
163 
164   src = SET_SRC (set);
165   if (GET_CODE (src) != COMPARE)
166     return NULL;
167 
168   dest = SET_DEST (set);
169   if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
170     return NULL;
171 
172   if (!REG_P (strip_not (XEXP (src, 0))))
173     return NULL;
174 
175   if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
176     return src;
177 
178   if (GET_CODE (XEXP (src, 1)) == UNSPEC)
179     {
180       for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
181 	if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
182 	  return NULL;
183       return src;
184     }
185 
186   return NULL;
187 }
188 
189 /* Look for a pattern of the "correct" form for an insn with a flags clobber
190    for which we may be able to eliminate a compare later.  We're not looking
191    to validate any inputs at this time, merely see that the basic shape is
192    correct.  The term "arithmetic" may be somewhat misleading...  */
193 
194 static bool
arithmetic_flags_clobber_p(rtx_insn * insn)195 arithmetic_flags_clobber_p (rtx_insn *insn)
196 {
197   rtx pat, x;
198 
199   if (!NONJUMP_INSN_P (insn))
200     return false;
201   pat = PATTERN (insn);
202   if (asm_noperands (pat) >= 0)
203     return false;
204 
205   if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
206     {
207       x = XVECEXP (pat, 0, 0);
208       if (GET_CODE (x) != SET)
209 	return false;
210       x = SET_DEST (x);
211       if (!REG_P (x))
212 	return false;
213 
214       x = XVECEXP (pat, 0, 1);
215       if (GET_CODE (x) == CLOBBER)
216 	{
217 	  x = XEXP (x, 0);
218 	  if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
219 	    return true;
220 	}
221     }
222 
223   return false;
224 }
225 
226 /* Look for uses of FLAGS in INSN.  If we find one we can analyze, record
227    it in CMP; otherwise indicate that we've missed a use.  */
228 
229 static void
find_flags_uses_in_insn(struct comparison * cmp,rtx_insn * insn)230 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
231 {
232   df_ref use;
233 
234   /* If we've already lost track of uses, don't bother collecting more.  */
235   if (cmp->missing_uses)
236     return;
237 
238   /* Find a USE of the flags register.  */
239   FOR_EACH_INSN_USE (use, insn)
240     if (DF_REF_REGNO (use) == targetm.flags_regnum)
241       {
242 	rtx x, *loc;
243 
244 	/* If this is an unusual use, quit.  */
245 	if (DF_REF_TYPE (use) != DF_REF_REG_USE)
246 	  goto fail;
247 
248 	/* If we've run out of slots to record uses, quit.  */
249 	if (cmp->n_uses == MAX_CMP_USE)
250 	  goto fail;
251 
252 	/* Unfortunately the location of the flags register, while present
253 	   in the reference structure, doesn't help.  We need to find the
254 	   comparison code that is outer to the actual flags use.  */
255 	loc = DF_REF_LOC (use);
256 	x = PATTERN (insn);
257 	if (GET_CODE (x) == PARALLEL)
258 	  x = XVECEXP (x, 0, 0);
259 	x = SET_SRC (x);
260 	if (GET_CODE (x) == IF_THEN_ELSE)
261 	  x = XEXP (x, 0);
262 	if (COMPARISON_P (x)
263 	    && loc == &XEXP (x, 0)
264 	    && XEXP (x, 1) == const0_rtx)
265 	  {
266 	    /* We've found a use of the flags that we understand.  */
267 	    struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
268 	    cuse->insn = insn;
269 	    cuse->loc = loc;
270 	    cuse->code = GET_CODE (x);
271 	  }
272 	else
273 	  goto fail;
274       }
275   return;
276 
277  fail:
278   /* We failed to recognize this use of the flags register.  */
279   cmp->missing_uses = true;
280 }
281 
282 class find_comparison_dom_walker : public dom_walker
283 {
284 public:
find_comparison_dom_walker(cdi_direction direction)285   find_comparison_dom_walker (cdi_direction direction)
286     : dom_walker (direction) {}
287 
288   virtual edge before_dom_children (basic_block);
289 };
290 
291 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
292    CMP and can thus be eliminated.  */
293 
294 static bool
can_eliminate_compare(rtx compare,rtx eh_note,struct comparison * cmp)295 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
296 {
297   /* Take care that it's in the same EH region.  */
298   if (cfun->can_throw_non_call_exceptions
299       && !rtx_equal_p (eh_note, cmp->eh_note))
300     return false;
301 
302   /* Make sure the compare is redundant with the previous.  */
303   if (!rtx_equal_p (strip_not (XEXP (compare, 0)), cmp->in_a)
304       || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
305     return false;
306 
307   if (is_not (XEXP (compare, 0)) != cmp->not_in_a)
308     return false;
309 
310   /* New mode must be compatible with the previous compare mode.  */
311   machine_mode new_mode
312     = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
313 
314   if (new_mode == VOIDmode)
315     return false;
316 
317   if (cmp->orig_mode != new_mode)
318     {
319       /* Generate new comparison for substitution.  */
320       rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
321       rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
322       x = gen_rtx_SET (flags, x);
323 
324       if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false))
325 	return false;
326 
327       cmp->orig_mode = new_mode;
328     }
329 
330   return true;
331 }
332 
333 /* Identify comparison instructions within BB.  If the flags from the last
334    compare in the BB is live at the end of the block, install the compare
335    in BB->AUX.  Called via dom_walker.walk ().  */
336 
337 edge
before_dom_children(basic_block bb)338 find_comparison_dom_walker::before_dom_children (basic_block bb)
339 {
340   rtx_insn *insn, *next;
341   bool need_purge = false;
342   rtx_insn *last_setter[FIRST_PSEUDO_REGISTER];
343 
344   /* The last comparison that was made.  Will be reset to NULL
345      once the flags are clobbered.  */
346   struct comparison *last_cmp = NULL;
347 
348   /* True iff the last comparison has not been clobbered, nor
349      have its inputs.  Used to eliminate duplicate compares.  */
350   bool last_cmp_valid = false;
351 
352   /* The last insn that clobbered the flags, if that insn is of
353      a form that may be valid for eliminating a following compare.
354      To be reset to NULL once the flags are set otherwise.  */
355   rtx_insn *last_clobber = NULL;
356 
357   /* Propagate the last live comparison throughout the extended basic block. */
358   if (single_pred_p (bb))
359     {
360       last_cmp = (struct comparison *) single_pred (bb)->aux;
361       if (last_cmp)
362 	last_cmp_valid = last_cmp->inputs_valid;
363     }
364 
365   memset (last_setter, 0, sizeof (last_setter));
366   for (insn = BB_HEAD (bb); insn; insn = next)
367     {
368       rtx src;
369 
370       next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
371       if (!NONDEBUG_INSN_P (insn))
372 	continue;
373 
374       src = conforming_compare (insn);
375       if (src)
376 	{
377 	  rtx eh_note = NULL;
378 
379 	  if (cfun->can_throw_non_call_exceptions)
380 	    eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
381 
382 	  if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
383 	    {
384 	      if (eh_note)
385 		need_purge = true;
386 	      delete_insn (insn);
387 	      continue;
388 	    }
389 
390 	  last_cmp = XCNEW (struct comparison);
391 	  last_cmp->insn = insn;
392 	  last_cmp->prev_clobber = last_clobber;
393 	  last_cmp->in_a = strip_not (XEXP (src, 0));
394 	  last_cmp->in_b = XEXP (src, 1);
395 	  last_cmp->not_in_a = is_not (XEXP (src, 0));
396 	  last_cmp->eh_note = eh_note;
397 	  last_cmp->orig_mode = GET_MODE (src);
398 	  if (last_cmp->in_b == const0_rtx
399 	      && last_setter[REGNO (last_cmp->in_a)])
400 	    {
401 	      rtx set = single_set (last_setter[REGNO (last_cmp->in_a)]);
402 	      if (set && rtx_equal_p (SET_DEST (set), last_cmp->in_a))
403 		last_cmp->in_a_setter = last_setter[REGNO (last_cmp->in_a)];
404 	    }
405 	  all_compares.safe_push (last_cmp);
406 
407 	  /* It's unusual, but be prepared for comparison patterns that
408 	     also clobber an input, or perhaps a scratch.  */
409 	  last_clobber = NULL;
410 	  last_cmp_valid = true;
411 	}
412 
413       else
414 	{
415 	  /* Notice if this instruction uses the flags register.  */
416 	  if (last_cmp)
417 	    find_flags_uses_in_insn (last_cmp, insn);
418 
419 	  /* Notice if this instruction kills the flags register.  */
420 	  df_ref def;
421 	  FOR_EACH_INSN_DEF (def, insn)
422 	    if (DF_REF_REGNO (def) == targetm.flags_regnum)
423 	      {
424 		/* See if this insn could be the "clobber" that eliminates
425 		   a future comparison.   */
426 		last_clobber = (arithmetic_flags_clobber_p (insn)
427 				? insn : NULL);
428 
429 		/* In either case, the previous compare is no longer valid.  */
430 		last_cmp = NULL;
431 		last_cmp_valid = false;
432 		break;
433 	      }
434 	}
435 
436       /* Notice if any of the inputs to the comparison have changed
437 	 and remember last insn that sets each register.  */
438       df_ref def;
439       FOR_EACH_INSN_DEF (def, insn)
440 	{
441 	  if (last_cmp_valid
442 	      && (DF_REF_REGNO (def) == REGNO (last_cmp->in_a)
443 		  || (REG_P (last_cmp->in_b)
444 		      && DF_REF_REGNO (def) == REGNO (last_cmp->in_b))))
445 	    last_cmp_valid = false;
446 	  last_setter[DF_REF_REGNO (def)] = insn;
447 	}
448     }
449 
450   /* Remember the live comparison for subsequent members of
451      the extended basic block.  */
452   if (last_cmp)
453     {
454       bb->aux = last_cmp;
455       last_cmp->inputs_valid = last_cmp_valid;
456 
457       /* Look to see if the flags register is live outgoing here, and
458 	 incoming to any successor not part of the extended basic block.  */
459       if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
460 	{
461 	  edge e;
462 	  edge_iterator ei;
463 
464 	  FOR_EACH_EDGE (e, ei, bb->succs)
465 	    {
466 	      basic_block dest = e->dest;
467 	      if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
468 		  && !single_pred_p (dest))
469 		{
470 		  last_cmp->missing_uses = true;
471 		  break;
472 		}
473 	    }
474 	}
475     }
476 
477   /* If we deleted a compare with a REG_EH_REGION note, we may need to
478      remove EH edges.  */
479   if (need_purge)
480     purge_dead_edges (bb);
481 
482   return NULL;
483 }
484 
485 /* Find all comparisons in the function.  */
486 
487 static void
find_comparisons(void)488 find_comparisons (void)
489 {
490   calculate_dominance_info (CDI_DOMINATORS);
491 
492   find_comparison_dom_walker (CDI_DOMINATORS)
493     .walk (cfun->cfg->x_entry_block_ptr);
494 
495   clear_aux_for_blocks ();
496   free_dominance_info (CDI_DOMINATORS);
497 }
498 
499 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
500    Note that inputs are almost certainly different than the IN_A and IN_B
501    stored in CMP -- we're called while attempting to eliminate the compare
502    after all.  Return the new FLAGS rtx if successful, else return NULL.
503    Note that this function may start a change group.  */
504 
505 static rtx
maybe_select_cc_mode(struct comparison * cmp,rtx a ATTRIBUTE_UNUSED,rtx b ATTRIBUTE_UNUSED)506 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
507 		      rtx b ATTRIBUTE_UNUSED)
508 {
509   machine_mode sel_mode;
510   const int n = cmp->n_uses;
511   rtx flags = NULL;
512 
513 #ifndef SELECT_CC_MODE
514   /* Minimize code differences when this target macro is undefined.  */
515   return NULL;
516 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
517 #endif
518 
519   /* If we don't have access to all of the uses, we can't validate.  */
520   if (cmp->missing_uses || n == 0)
521     return NULL;
522 
523   /* Find a new mode that works for all of the uses.  Special case the
524      common case of exactly one use.  */
525   if (n == 1)
526     {
527       sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
528       if (sel_mode != cmp->orig_mode)
529 	{
530 	  flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
531 	  validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
532 	}
533     }
534   else
535     {
536       int i;
537 
538       sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
539       for (i = 1; i < n; ++i)
540 	{
541 	  machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
542 	  if (new_mode != sel_mode)
543 	    {
544 	      sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
545 	      if (sel_mode == VOIDmode)
546 		return NULL;
547 	    }
548 	}
549 
550       if (sel_mode != cmp->orig_mode)
551 	{
552 	  flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
553 	  for (i = 0; i < n; ++i)
554 	    validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
555 	}
556     }
557 
558   return flags;
559 }
560 
561 /* Return a register RTX holding the same value at START as REG at END, or
562    NULL_RTX if there is none.  */
563 
564 static rtx
equivalent_reg_at_start(rtx reg,rtx_insn * end,rtx_insn * start)565 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
566 {
567   machine_mode orig_mode = GET_MODE (reg);
568   rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
569 
570   for (rtx_insn *insn = PREV_INSN (end);
571        insn != start;
572        insn = PREV_INSN (insn))
573     {
574       const int abnormal_flags
575 	= (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
576 	   | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
577 	   | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
578 	   | DF_REF_PRE_POST_MODIFY);
579       df_ref def;
580 
581       /* Note that the BB_HEAD is always either a note or a label, but in
582 	 any case it means that REG is defined outside the block.  */
583       if (insn == bb_head)
584 	return NULL_RTX;
585       if (NOTE_P (insn) || DEBUG_INSN_P (insn))
586 	continue;
587 
588       /* Find a possible def of REG in INSN.  */
589       FOR_EACH_INSN_DEF (def, insn)
590 	if (DF_REF_REGNO (def) == REGNO (reg))
591 	  break;
592 
593       /* No definitions of REG; continue searching.  */
594       if (def == NULL)
595 	continue;
596 
597       /* Bail if this is not a totally normal set of REG.  */
598       if (DF_REF_IS_ARTIFICIAL (def))
599 	return NULL_RTX;
600       if (DF_REF_FLAGS (def) & abnormal_flags)
601 	return NULL_RTX;
602 
603       /* We've found an insn between the compare and the clobber that sets
604 	 REG.  Given that pass_cprop_hardreg has not yet run, we still find
605 	 situations in which we can usefully look through a copy insn.  */
606       rtx x = single_set (insn);
607       if (x == NULL_RTX)
608 	return NULL_RTX;
609       reg = SET_SRC (x);
610       if (!REG_P (reg))
611 	return NULL_RTX;
612     }
613 
614   if (GET_MODE (reg) != orig_mode)
615     return NULL_RTX;
616 
617   return reg;
618 }
619 
620 /* Return true if it is okay to merge the comparison CMP_INSN with
621    the instruction ARITH_INSN.  Both instructions are assumed to be in the
622    same basic block with ARITH_INSN appearing before CMP_INSN.  This checks
623    that there are no uses or defs of the condition flags or control flow
624    changes between the two instructions.  */
625 
626 static bool
can_merge_compare_into_arith(rtx_insn * cmp_insn,rtx_insn * arith_insn)627 can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn)
628 {
629   for (rtx_insn *insn = PREV_INSN (cmp_insn);
630        insn && insn != arith_insn;
631        insn = PREV_INSN (insn))
632     {
633       if (!NONDEBUG_INSN_P (insn))
634 	continue;
635       /* Bail if there are jumps or calls in between.  */
636       if (!NONJUMP_INSN_P (insn))
637 	return false;
638 
639       /* Bail on old-style asm statements because they lack
640 	 data flow information.  */
641       if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
642 	return false;
643 
644       df_ref ref;
645       /* Find a USE of the flags register.  */
646       FOR_EACH_INSN_USE (ref, insn)
647 	if (DF_REF_REGNO (ref) == targetm.flags_regnum)
648 	  return false;
649 
650       /* Find a DEF of the flags register.  */
651       FOR_EACH_INSN_DEF (ref, insn)
652 	if (DF_REF_REGNO (ref) == targetm.flags_regnum)
653 	  return false;
654     }
655   return true;
656 }
657 
658 /* Given two SET expressions, SET_A and SET_B determine whether they form
659    a recognizable pattern when emitted in parallel.  Return that parallel
660    if so.  Otherwise return NULL.  */
661 
662 static rtx
try_validate_parallel(rtx set_a,rtx set_b)663 try_validate_parallel (rtx set_a, rtx set_b)
664 {
665   rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b));
666   rtx_insn *insn = make_insn_raw (par);
667 
668   if (insn_invalid_p (insn, false))
669     {
670       crtl->emit.x_cur_insn_uid--;
671       return NULL_RTX;
672     }
673 
674   SET_PREV_INSN (insn) = NULL_RTX;
675   SET_NEXT_INSN (insn) = NULL_RTX;
676   INSN_LOCATION (insn) = 0;
677   return insn;
678 }
679 
680 /* For a comparison instruction described by CMP check if it compares a
681    register with zero i.e. it is of the form CC := CMP R1, 0.
682    If it is, find the instruction defining R1 (say I1) and try to create a
683    PARALLEL consisting of I1 and the comparison, representing a flag-setting
684    arithmetic instruction.  Example:
685    I1: R1 := R2 + R3
686    <instructions that don't read the condition register>
687    I2: CC := CMP R1 0
688    I2 can be merged with I1 into:
689    I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 }
690    This catches cases where R1 is used between I1 and I2 and therefore
691    combine and other RTL optimisations will not try to propagate it into
692    I2.  Return true if we succeeded in merging CMP.  */
693 
694 static bool
try_merge_compare(struct comparison * cmp)695 try_merge_compare (struct comparison *cmp)
696 {
697   rtx_insn *cmp_insn = cmp->insn;
698 
699   if (cmp->in_b != const0_rtx || cmp->in_a_setter == NULL)
700     return false;
701   rtx in_a = cmp->in_a;
702   df_ref use;
703 
704   FOR_EACH_INSN_USE (use, cmp_insn)
705     if (DF_REF_REGNO (use) == REGNO (in_a))
706       break;
707   if (!use)
708     return false;
709 
710   rtx_insn *def_insn = cmp->in_a_setter;
711   rtx set = single_set (def_insn);
712   if (!set)
713     return false;
714 
715   if (!can_merge_compare_into_arith (cmp_insn, def_insn))
716     return false;
717 
718   rtx src = SET_SRC (set);
719 
720   /* If the source uses addressing modes with side effects, we can't
721      do the merge because we'd end up with a PARALLEL that has two
722      instances of that side effect in it.  */
723   if (side_effects_p (src))
724     return false;
725 
726   rtx flags = maybe_select_cc_mode (cmp, src, CONST0_RTX (GET_MODE (src)));
727   if (!flags)
728     {
729     /* We may already have a change group going through maybe_select_cc_mode.
730        Discard it properly.  */
731       cancel_changes (0);
732       return false;
733     }
734 
735   rtx flag_set
736     = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags),
737 					   copy_rtx (src),
738 					   CONST0_RTX (GET_MODE (src))));
739   rtx arith_set = copy_rtx (PATTERN (def_insn));
740   rtx par = try_validate_parallel (flag_set, arith_set);
741   if (!par)
742     {
743       /* We may already have a change group going through maybe_select_cc_mode.
744 	 Discard it properly.  */
745       cancel_changes (0);
746       return false;
747     }
748   if (!apply_change_group ())
749     return false;
750   emit_insn_after (par, def_insn);
751   delete_insn (def_insn);
752   delete_insn (cmp->insn);
753   return true;
754 }
755 
756 /* Attempt to replace a comparison with a prior arithmetic insn that can
757    compute the same flags value as the comparison itself.  Return true if
758    successful, having made all rtl modifications necessary.  */
759 
760 static bool
try_eliminate_compare(struct comparison * cmp)761 try_eliminate_compare (struct comparison *cmp)
762 {
763   rtx flags, in_a, in_b, cmp_a, cmp_b;
764 
765   if (try_merge_compare (cmp))
766     return true;
767 
768   /* We must have found an interesting "clobber" preceding the compare.  */
769   if (cmp->prev_clobber == NULL)
770     return false;
771 
772   /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
773      Given that this target requires this pass, we can assume that most
774      insns do clobber the flags, and so the distance between the compare
775      and the clobber is likely to be small.  */
776   /* ??? This is one point at which one could argue that DF_REF_CHAIN would
777      be useful, but it is thought to be too heavy-weight a solution here.  */
778   in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber);
779   if (!in_a)
780     return false;
781 
782   /* Likewise for IN_B if need be.  */
783   if (CONSTANT_P (cmp->in_b))
784     in_b = cmp->in_b;
785   else if (REG_P (cmp->in_b))
786     {
787       in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber);
788       if (!in_b)
789 	return false;
790     }
791   else if (GET_CODE (cmp->in_b) == UNSPEC)
792     {
793       const int len = XVECLEN (cmp->in_b, 0);
794       rtvec v = rtvec_alloc (len);
795       for (int i = 0; i < len; i++)
796 	{
797 	  rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
798 					   cmp->insn, cmp->prev_clobber);
799 	  if (!r)
800 	    return false;
801 	  RTVEC_ELT (v, i) = r;
802 	}
803       in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
804     }
805   else
806     gcc_unreachable ();
807 
808   /* We've reached PREV_CLOBBER without finding a modification of IN_A.
809      Validate that PREV_CLOBBER itself does in fact refer to IN_A.  Do
810      recall that we've already validated the shape of PREV_CLOBBER.  */
811   rtx_insn *insn = cmp->prev_clobber;
812 
813   rtx x = XVECEXP (PATTERN (insn), 0, 0);
814   if (rtx_equal_p (SET_DEST (x), in_a))
815     cmp_a = SET_SRC (x);
816 
817   /* Also check operations with implicit extensions, e.g.:
818      [(set (reg:DI)
819 	   (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
820       (set (reg:CCZ flags)
821 	   (compare:CCZ (plus:SI (reg:SI) (reg:SI))
822 			(const_int 0)))] */
823   else if (REG_P (SET_DEST (x))
824 	   && REG_P (in_a)
825 	   && REGNO (SET_DEST (x)) == REGNO (in_a)
826 	   && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
827 	       || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
828 	   && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
829     cmp_a = XEXP (SET_SRC (x), 0);
830 
831   /* Also check fully redundant comparisons, e.g.:
832      [(set (reg:SI)
833 	   (minus:SI (reg:SI) (reg:SI))))
834       (set (reg:CC flags)
835 	   (compare:CC (reg:SI) (reg:SI)))] */
836   else if (REG_P (in_b)
837 	   && GET_CODE (SET_SRC (x)) == MINUS
838 	   && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
839 	   && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
840     cmp_a = in_a;
841 
842   else
843     return false;
844 
845   /* If the source uses addressing modes with side effects, we can't
846      do the merge because we'd end up with a PARALLEL that has two
847      instances of that side effect in it.  */
848   if (side_effects_p (cmp_a))
849     return false;
850 
851   if (in_a == in_b)
852     cmp_b = cmp_a;
853   else if (rtx_equal_p (SET_DEST (x), in_b))
854     cmp_b = SET_SRC (x);
855   else
856     cmp_b = in_b;
857   if (side_effects_p (cmp_b))
858     return false;
859 
860   /* Determine if we ought to use a different CC_MODE here.  */
861   flags = maybe_select_cc_mode (cmp, cmp_a, cmp_b);
862   if (flags == NULL)
863     flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
864 
865   /* Generate a new comparison for installation in the setter.  */
866   rtx y = cmp->not_in_a
867 	  ? gen_rtx_NOT (GET_MODE (cmp_a), copy_rtx (cmp_a))
868 	  : copy_rtx (cmp_a);
869   y = gen_rtx_COMPARE (GET_MODE (flags), y, copy_rtx (cmp_b));
870   y = gen_rtx_SET (flags, y);
871 
872   /* Canonicalize instruction to:
873      [(set (reg:CCM) (compare:CCM (operation) (immediate)))
874       (set (reg) (operation)]  */
875 
876   rtvec v = rtvec_alloc (2);
877   RTVEC_ELT (v, 0) = y;
878   RTVEC_ELT (v, 1) = x;
879 
880   rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
881 
882   /* Succeed if the new instruction is valid.  Note that we may have started
883      a change group within maybe_select_cc_mode, therefore we must continue. */
884   validate_change (insn, &PATTERN (insn), pat, true);
885 
886   if (!apply_change_group ())
887     return false;
888 
889   /* Success.  Delete the compare insn...  */
890   delete_insn (cmp->insn);
891 
892   /* ... and any notes that are now invalid due to multiple sets.  */
893   x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
894   if (x)
895     remove_note (insn, x);
896   x = find_reg_note (insn, REG_EQUAL, NULL);
897   if (x)
898     remove_note (insn, x);
899   x = find_reg_note (insn, REG_EQUIV, NULL);
900   if (x)
901     remove_note (insn, x);
902 
903   return true;
904 }
905 
906 /* Main entry point to the pass.  */
907 
908 static unsigned int
execute_compare_elim_after_reload(void)909 execute_compare_elim_after_reload (void)
910 {
911   df_analyze ();
912 
913   gcc_checking_assert (!all_compares.exists ());
914 
915   /* Locate all comparisons and their uses, and eliminate duplicates.  */
916   find_comparisons ();
917   if (all_compares.exists ())
918     {
919       struct comparison *cmp;
920       size_t i;
921 
922       /* Eliminate comparisons that are redundant with flags computation.  */
923       FOR_EACH_VEC_ELT (all_compares, i, cmp)
924 	{
925 	  try_eliminate_compare (cmp);
926 	  XDELETE (cmp);
927 	}
928 
929       all_compares.release ();
930     }
931 
932   return 0;
933 }
934 
935 namespace {
936 
937 const pass_data pass_data_compare_elim_after_reload =
938 {
939   RTL_PASS, /* type */
940   "cmpelim", /* name */
941   OPTGROUP_NONE, /* optinfo_flags */
942   TV_NONE, /* tv_id */
943   0, /* properties_required */
944   0, /* properties_provided */
945   0, /* properties_destroyed */
946   0, /* todo_flags_start */
947   ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
948 };
949 
950 class pass_compare_elim_after_reload : public rtl_opt_pass
951 {
952 public:
pass_compare_elim_after_reload(gcc::context * ctxt)953   pass_compare_elim_after_reload (gcc::context *ctxt)
954     : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
955   {}
956 
957   /* opt_pass methods: */
gate(function *)958   virtual bool gate (function *)
959     {
960       /* Setting this target hook value is how a backend indicates the need.  */
961       if (targetm.flags_regnum == INVALID_REGNUM)
962 	return false;
963       return flag_compare_elim_after_reload;
964     }
965 
execute(function *)966   virtual unsigned int execute (function *)
967     {
968       return execute_compare_elim_after_reload ();
969     }
970 
971 }; // class pass_compare_elim_after_reload
972 
973 } // anon namespace
974 
975 rtl_opt_pass *
make_pass_compare_elim_after_reload(gcc::context * ctxt)976 make_pass_compare_elim_after_reload (gcc::context *ctxt)
977 {
978   return new pass_compare_elim_after_reload (ctxt);
979 }
980