1 /* IRA processing allocno lives to build allocno live ranges.
2    Copyright (C) 2006-2016 Free Software Foundation, Inc.
3    Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "regs.h"
32 #include "ira.h"
33 #include "ira-int.h"
34 #include "sparseset.h"
35 
36 /* The code in this file is similar to one in global but the code
37    works on the allocno basis and creates live ranges instead of
38    pseudo-register conflicts.  */
39 
40 /* Program points are enumerated by numbers from range
41    0..IRA_MAX_POINT-1.  There are approximately two times more program
42    points than insns.  Program points are places in the program where
43    liveness info can be changed.  In most general case (there are more
44    complicated cases too) some program points correspond to places
45    where input operand dies and other ones correspond to places where
46    output operands are born.  */
47 int ira_max_point;
48 
49 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
50    live ranges with given start/finish point.  */
51 live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
52 
53 /* Number of the current program point.  */
54 static int curr_point;
55 
56 /* Point where register pressure excess started or -1 if there is no
57    register pressure excess.  Excess pressure for a register class at
58    some point means that there are more allocnos of given register
59    class living at the point than number of hard-registers of the
60    class available for the allocation.  It is defined only for
61    pressure classes.  */
62 static int high_pressure_start_point[N_REG_CLASSES];
63 
64 /* Objects live at current point in the scan.  */
65 static sparseset objects_live;
66 
67 /* A temporary bitmap used in functions that wish to avoid visiting an allocno
68    multiple times.  */
69 static sparseset allocnos_processed;
70 
71 /* Set of hard regs (except eliminable ones) currently live.  */
72 static HARD_REG_SET hard_regs_live;
73 
74 /* The loop tree node corresponding to the current basic block.  */
75 static ira_loop_tree_node_t curr_bb_node;
76 
77 /* The number of the last processed call.  */
78 static int last_call_num;
79 /* The number of last call at which given allocno was saved.  */
80 static int *allocno_saved_at_call;
81 
82 /* The value of get_preferred_alternatives for the current instruction,
83    supplemental to recog_data.  */
84 static alternative_mask preferred_alternatives;
85 
86 /* Record the birth of hard register REGNO, updating hard_regs_live and
87    hard reg conflict information for living allocnos.  */
88 static void
make_hard_regno_born(int regno)89 make_hard_regno_born (int regno)
90 {
91   unsigned int i;
92 
93   SET_HARD_REG_BIT (hard_regs_live, regno);
94   EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
95     {
96       ira_object_t obj = ira_object_id_map[i];
97 
98       SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
99       SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), regno);
100     }
101 }
102 
103 /* Process the death of hard register REGNO.  This updates
104    hard_regs_live.  */
105 static void
make_hard_regno_dead(int regno)106 make_hard_regno_dead (int regno)
107 {
108   CLEAR_HARD_REG_BIT (hard_regs_live, regno);
109 }
110 
111 /* Record the birth of object OBJ.  Set a bit for it in objects_live,
112    start a new live range for it if necessary and update hard register
113    conflicts.  */
114 static void
make_object_born(ira_object_t obj)115 make_object_born (ira_object_t obj)
116 {
117   live_range_t lr = OBJECT_LIVE_RANGES (obj);
118 
119   sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
120   IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), hard_regs_live);
121   IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), hard_regs_live);
122 
123   if (lr == NULL
124       || (lr->finish != curr_point && lr->finish + 1 != curr_point))
125     ira_add_live_range_to_object (obj, curr_point, -1);
126 }
127 
128 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for the allocno
129    associated with object OBJ.  */
130 static void
update_allocno_pressure_excess_length(ira_object_t obj)131 update_allocno_pressure_excess_length (ira_object_t obj)
132 {
133   ira_allocno_t a = OBJECT_ALLOCNO (obj);
134   int start, i;
135   enum reg_class aclass, pclass, cl;
136   live_range_t p;
137 
138   aclass = ALLOCNO_CLASS (a);
139   pclass = ira_pressure_class_translate[aclass];
140   for (i = 0;
141        (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
142        i++)
143     {
144       if (! ira_reg_pressure_class_p[cl])
145 	continue;
146       if (high_pressure_start_point[cl] < 0)
147 	continue;
148       p = OBJECT_LIVE_RANGES (obj);
149       ira_assert (p != NULL);
150       start = (high_pressure_start_point[cl] > p->start
151 	       ? high_pressure_start_point[cl] : p->start);
152       ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
153     }
154 }
155 
156 /* Process the death of object OBJ, which is associated with allocno
157    A.  This finishes the current live range for it.  */
158 static void
make_object_dead(ira_object_t obj)159 make_object_dead (ira_object_t obj)
160 {
161   live_range_t lr;
162 
163   sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (obj));
164   lr = OBJECT_LIVE_RANGES (obj);
165   ira_assert (lr != NULL);
166   lr->finish = curr_point;
167   update_allocno_pressure_excess_length (obj);
168 }
169 
170 /* The current register pressures for each pressure class for the current
171    basic block.  */
172 static int curr_reg_pressure[N_REG_CLASSES];
173 
174 /* Record that register pressure for PCLASS increased by N registers.
175    Update the current register pressure, maximal register pressure for
176    the current BB and the start point of the register pressure
177    excess.  */
178 static void
inc_register_pressure(enum reg_class pclass,int n)179 inc_register_pressure (enum reg_class pclass, int n)
180 {
181   int i;
182   enum reg_class cl;
183 
184   for (i = 0;
185        (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
186        i++)
187     {
188       if (! ira_reg_pressure_class_p[cl])
189 	continue;
190       curr_reg_pressure[cl] += n;
191       if (high_pressure_start_point[cl] < 0
192 	  && (curr_reg_pressure[cl] > ira_class_hard_regs_num[cl]))
193 	high_pressure_start_point[cl] = curr_point;
194       if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
195 	curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
196     }
197 }
198 
199 /* Record that register pressure for PCLASS has decreased by NREGS
200    registers; update current register pressure, start point of the
201    register pressure excess, and register pressure excess length for
202    living allocnos.  */
203 
204 static void
dec_register_pressure(enum reg_class pclass,int nregs)205 dec_register_pressure (enum reg_class pclass, int nregs)
206 {
207   int i;
208   unsigned int j;
209   enum reg_class cl;
210   bool set_p = false;
211 
212   for (i = 0;
213        (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
214        i++)
215     {
216       if (! ira_reg_pressure_class_p[cl])
217 	continue;
218       curr_reg_pressure[cl] -= nregs;
219       ira_assert (curr_reg_pressure[cl] >= 0);
220       if (high_pressure_start_point[cl] >= 0
221 	  && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
222 	set_p = true;
223     }
224   if (set_p)
225     {
226       EXECUTE_IF_SET_IN_SPARSESET (objects_live, j)
227 	update_allocno_pressure_excess_length (ira_object_id_map[j]);
228       for (i = 0;
229 	   (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
230 	   i++)
231 	{
232 	  if (! ira_reg_pressure_class_p[cl])
233 	    continue;
234 	  if (high_pressure_start_point[cl] >= 0
235 	      && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
236 	    high_pressure_start_point[cl] = -1;
237 	}
238     }
239 }
240 
241 /* Determine from the objects_live bitmap whether REGNO is currently live,
242    and occupies only one object.  Return false if we have no information.  */
243 static bool
pseudo_regno_single_word_and_live_p(int regno)244 pseudo_regno_single_word_and_live_p (int regno)
245 {
246   ira_allocno_t a = ira_curr_regno_allocno_map[regno];
247   ira_object_t obj;
248 
249   if (a == NULL)
250     return false;
251   if (ALLOCNO_NUM_OBJECTS (a) > 1)
252     return false;
253 
254   obj = ALLOCNO_OBJECT (a, 0);
255 
256   return sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj));
257 }
258 
259 /* Mark the pseudo register REGNO as live.  Update all information about
260    live ranges and register pressure.  */
261 static void
mark_pseudo_regno_live(int regno)262 mark_pseudo_regno_live (int regno)
263 {
264   ira_allocno_t a = ira_curr_regno_allocno_map[regno];
265   enum reg_class pclass;
266   int i, n, nregs;
267 
268   if (a == NULL)
269     return;
270 
271   /* Invalidate because it is referenced.  */
272   allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
273 
274   n = ALLOCNO_NUM_OBJECTS (a);
275   pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
276   nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
277   if (n > 1)
278     {
279       /* We track every subobject separately.  */
280       gcc_assert (nregs == n);
281       nregs = 1;
282     }
283 
284   for (i = 0; i < n; i++)
285     {
286       ira_object_t obj = ALLOCNO_OBJECT (a, i);
287 
288       if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
289 	continue;
290 
291       inc_register_pressure (pclass, nregs);
292       make_object_born (obj);
293     }
294 }
295 
296 /* Like mark_pseudo_regno_live, but try to only mark one subword of
297    the pseudo as live.  SUBWORD indicates which; a value of 0
298    indicates the low part.  */
299 static void
mark_pseudo_regno_subword_live(int regno,int subword)300 mark_pseudo_regno_subword_live (int regno, int subword)
301 {
302   ira_allocno_t a = ira_curr_regno_allocno_map[regno];
303   int n;
304   enum reg_class pclass;
305   ira_object_t obj;
306 
307   if (a == NULL)
308     return;
309 
310   /* Invalidate because it is referenced.  */
311   allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
312 
313   n = ALLOCNO_NUM_OBJECTS (a);
314   if (n == 1)
315     {
316       mark_pseudo_regno_live (regno);
317       return;
318     }
319 
320   pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
321   gcc_assert
322     (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
323   obj = ALLOCNO_OBJECT (a, subword);
324 
325   if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
326     return;
327 
328   inc_register_pressure (pclass, 1);
329   make_object_born (obj);
330 }
331 
332 /* Mark the register REG as live.  Store a 1 in hard_regs_live for
333    this register, record how many consecutive hardware registers it
334    actually needs.  */
335 static void
mark_hard_reg_live(rtx reg)336 mark_hard_reg_live (rtx reg)
337 {
338   int regno = REGNO (reg);
339 
340   if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
341     {
342       int last = END_REGNO (reg);
343       enum reg_class aclass, pclass;
344 
345       while (regno < last)
346 	{
347 	  if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
348 	      && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
349 	    {
350 	      aclass = ira_hard_regno_allocno_class[regno];
351 	      pclass = ira_pressure_class_translate[aclass];
352 	      inc_register_pressure (pclass, 1);
353 	      make_hard_regno_born (regno);
354 	    }
355 	  regno++;
356 	}
357     }
358 }
359 
360 /* Mark a pseudo, or one of its subwords, as live.  REGNO is the pseudo's
361    register number; ORIG_REG is the access in the insn, which may be a
362    subreg.  */
363 static void
mark_pseudo_reg_live(rtx orig_reg,unsigned regno)364 mark_pseudo_reg_live (rtx orig_reg, unsigned regno)
365 {
366   if (df_read_modify_subreg_p (orig_reg))
367     {
368       mark_pseudo_regno_subword_live (regno,
369 				      subreg_lowpart_p (orig_reg) ? 0 : 1);
370     }
371   else
372     mark_pseudo_regno_live (regno);
373 }
374 
375 /* Mark the register referenced by use or def REF as live.  */
376 static void
mark_ref_live(df_ref ref)377 mark_ref_live (df_ref ref)
378 {
379   rtx reg = DF_REF_REG (ref);
380   rtx orig_reg = reg;
381 
382   if (GET_CODE (reg) == SUBREG)
383     reg = SUBREG_REG (reg);
384 
385   if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
386     mark_pseudo_reg_live (orig_reg, REGNO (reg));
387   else
388     mark_hard_reg_live (reg);
389 }
390 
391 /* Mark the pseudo register REGNO as dead.  Update all information about
392    live ranges and register pressure.  */
393 static void
mark_pseudo_regno_dead(int regno)394 mark_pseudo_regno_dead (int regno)
395 {
396   ira_allocno_t a = ira_curr_regno_allocno_map[regno];
397   int n, i, nregs;
398   enum reg_class cl;
399 
400   if (a == NULL)
401     return;
402 
403   /* Invalidate because it is referenced.  */
404   allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
405 
406   n = ALLOCNO_NUM_OBJECTS (a);
407   cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
408   nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
409   if (n > 1)
410     {
411       /* We track every subobject separately.  */
412       gcc_assert (nregs == n);
413       nregs = 1;
414     }
415   for (i = 0; i < n; i++)
416     {
417       ira_object_t obj = ALLOCNO_OBJECT (a, i);
418       if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
419 	continue;
420 
421       dec_register_pressure (cl, nregs);
422       make_object_dead (obj);
423     }
424 }
425 
426 /* Like mark_pseudo_regno_dead, but called when we know that only part of the
427    register dies.  SUBWORD indicates which; a value of 0 indicates the low part.  */
428 static void
mark_pseudo_regno_subword_dead(int regno,int subword)429 mark_pseudo_regno_subword_dead (int regno, int subword)
430 {
431   ira_allocno_t a = ira_curr_regno_allocno_map[regno];
432   int n;
433   enum reg_class cl;
434   ira_object_t obj;
435 
436   if (a == NULL)
437     return;
438 
439   /* Invalidate because it is referenced.  */
440   allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
441 
442   n = ALLOCNO_NUM_OBJECTS (a);
443   if (n == 1)
444     /* The allocno as a whole doesn't die in this case.  */
445     return;
446 
447   cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
448   gcc_assert
449     (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
450 
451   obj = ALLOCNO_OBJECT (a, subword);
452   if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
453     return;
454 
455   dec_register_pressure (cl, 1);
456   make_object_dead (obj);
457 }
458 
459 /* Mark the hard register REG as dead.  Store a 0 in hard_regs_live for the
460    register.  */
461 static void
mark_hard_reg_dead(rtx reg)462 mark_hard_reg_dead (rtx reg)
463 {
464   int regno = REGNO (reg);
465 
466   if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
467     {
468       int last = END_REGNO (reg);
469       enum reg_class aclass, pclass;
470 
471       while (regno < last)
472 	{
473 	  if (TEST_HARD_REG_BIT (hard_regs_live, regno))
474 	    {
475 	      aclass = ira_hard_regno_allocno_class[regno];
476 	      pclass = ira_pressure_class_translate[aclass];
477 	      dec_register_pressure (pclass, 1);
478 	      make_hard_regno_dead (regno);
479 	    }
480 	  regno++;
481 	}
482     }
483 }
484 
485 /* Mark a pseudo, or one of its subwords, as dead.  REGNO is the pseudo's
486    register number; ORIG_REG is the access in the insn, which may be a
487    subreg.  */
488 static void
mark_pseudo_reg_dead(rtx orig_reg,unsigned regno)489 mark_pseudo_reg_dead (rtx orig_reg, unsigned regno)
490 {
491   if (df_read_modify_subreg_p (orig_reg))
492     {
493       mark_pseudo_regno_subword_dead (regno,
494 				      subreg_lowpart_p (orig_reg) ? 0 : 1);
495     }
496   else
497     mark_pseudo_regno_dead (regno);
498 }
499 
500 /* Mark the register referenced by definition DEF as dead, if the
501    definition is a total one.  */
502 static void
mark_ref_dead(df_ref def)503 mark_ref_dead (df_ref def)
504 {
505   rtx reg = DF_REF_REG (def);
506   rtx orig_reg = reg;
507 
508   if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
509     return;
510 
511   if (GET_CODE (reg) == SUBREG)
512     reg = SUBREG_REG (reg);
513 
514   if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
515       && (GET_CODE (orig_reg) != SUBREG
516 	  || REGNO (reg) < FIRST_PSEUDO_REGISTER
517 	  || !df_read_modify_subreg_p (orig_reg)))
518     return;
519 
520   if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
521     mark_pseudo_reg_dead (orig_reg, REGNO (reg));
522   else
523     mark_hard_reg_dead (reg);
524 }
525 
526 /* If REG is a pseudo or a subreg of it, and the class of its allocno
527    intersects CL, make a conflict with pseudo DREG.  ORIG_DREG is the
528    rtx actually accessed, it may be identical to DREG or a subreg of it.
529    Advance the current program point before making the conflict if
530    ADVANCE_P.  Return TRUE if we will need to advance the current
531    program point.  */
532 static bool
make_pseudo_conflict(rtx reg,enum reg_class cl,rtx dreg,rtx orig_dreg,bool advance_p)533 make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, rtx orig_dreg,
534 		      bool advance_p)
535 {
536   rtx orig_reg = reg;
537   ira_allocno_t a;
538 
539   if (GET_CODE (reg) == SUBREG)
540     reg = SUBREG_REG (reg);
541 
542   if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
543     return advance_p;
544 
545   a = ira_curr_regno_allocno_map[REGNO (reg)];
546   if (! reg_classes_intersect_p (cl, ALLOCNO_CLASS (a)))
547     return advance_p;
548 
549   if (advance_p)
550     curr_point++;
551 
552   mark_pseudo_reg_live (orig_reg, REGNO (reg));
553   mark_pseudo_reg_live (orig_dreg, REGNO (dreg));
554   mark_pseudo_reg_dead (orig_reg, REGNO (reg));
555   mark_pseudo_reg_dead (orig_dreg, REGNO (dreg));
556 
557   return false;
558 }
559 
560 /* Check and make if necessary conflicts for pseudo DREG of class
561    DEF_CL of the current insn with input operand USE of class USE_CL.
562    ORIG_DREG is the rtx actually accessed, it may be identical to
563    DREG or a subreg of it.  Advance the current program point before
564    making the conflict if ADVANCE_P.  Return TRUE if we will need to
565    advance the current program point.  */
566 static bool
check_and_make_def_use_conflict(rtx dreg,rtx orig_dreg,enum reg_class def_cl,int use,enum reg_class use_cl,bool advance_p)567 check_and_make_def_use_conflict (rtx dreg, rtx orig_dreg,
568 				 enum reg_class def_cl, int use,
569 				 enum reg_class use_cl, bool advance_p)
570 {
571   if (! reg_classes_intersect_p (def_cl, use_cl))
572     return advance_p;
573 
574   advance_p = make_pseudo_conflict (recog_data.operand[use],
575 				    use_cl, dreg, orig_dreg, advance_p);
576 
577   /* Reload may end up swapping commutative operands, so you
578      have to take both orderings into account.  The
579      constraints for the two operands can be completely
580      different.  (Indeed, if the constraints for the two
581      operands are the same for all alternatives, there's no
582      point marking them as commutative.)  */
583   if (use < recog_data.n_operands - 1
584       && recog_data.constraints[use][0] == '%')
585     advance_p
586       = make_pseudo_conflict (recog_data.operand[use + 1],
587 			      use_cl, dreg, orig_dreg, advance_p);
588   if (use >= 1
589       && recog_data.constraints[use - 1][0] == '%')
590     advance_p
591       = make_pseudo_conflict (recog_data.operand[use - 1],
592 			      use_cl, dreg, orig_dreg, advance_p);
593   return advance_p;
594 }
595 
596 /* Check and make if necessary conflicts for definition DEF of class
597    DEF_CL of the current insn with input operands.  Process only
598    constraints of alternative ALT.  */
599 static void
check_and_make_def_conflict(int alt,int def,enum reg_class def_cl)600 check_and_make_def_conflict (int alt, int def, enum reg_class def_cl)
601 {
602   int use, use_match;
603   ira_allocno_t a;
604   enum reg_class use_cl, acl;
605   bool advance_p;
606   rtx dreg = recog_data.operand[def];
607   rtx orig_dreg = dreg;
608 
609   if (def_cl == NO_REGS)
610     return;
611 
612   if (GET_CODE (dreg) == SUBREG)
613     dreg = SUBREG_REG (dreg);
614 
615   if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
616     return;
617 
618   a = ira_curr_regno_allocno_map[REGNO (dreg)];
619   acl = ALLOCNO_CLASS (a);
620   if (! reg_classes_intersect_p (acl, def_cl))
621     return;
622 
623   advance_p = true;
624 
625   int n_operands = recog_data.n_operands;
626   const operand_alternative *op_alt = &recog_op_alt[alt * n_operands];
627   for (use = 0; use < n_operands; use++)
628     {
629       int alt1;
630 
631       if (use == def || recog_data.operand_type[use] == OP_OUT)
632 	continue;
633 
634       if (op_alt[use].anything_ok)
635 	use_cl = ALL_REGS;
636       else
637 	use_cl = op_alt[use].cl;
638 
639       /* If there's any alternative that allows USE to match DEF, do not
640 	 record a conflict.  If that causes us to create an invalid
641 	 instruction due to the earlyclobber, reload must fix it up.  */
642       for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++)
643 	{
644 	  if (!TEST_BIT (preferred_alternatives, alt1))
645 	    continue;
646 	  const operand_alternative *op_alt1
647 	    = &recog_op_alt[alt1 * n_operands];
648 	  if (op_alt1[use].matches == def
649 	      || (use < n_operands - 1
650 		  && recog_data.constraints[use][0] == '%'
651 		  && op_alt1[use + 1].matches == def)
652 	      || (use >= 1
653 		  && recog_data.constraints[use - 1][0] == '%'
654 		  && op_alt1[use - 1].matches == def))
655 	    break;
656 	}
657 
658       if (alt1 < recog_data.n_alternatives)
659 	continue;
660 
661       advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
662 						   use, use_cl, advance_p);
663 
664       if ((use_match = op_alt[use].matches) >= 0)
665 	{
666 	  if (use_match == def)
667 	    continue;
668 
669 	  if (op_alt[use_match].anything_ok)
670 	    use_cl = ALL_REGS;
671 	  else
672 	    use_cl = op_alt[use_match].cl;
673 	  advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
674 						       use, use_cl, advance_p);
675 	}
676     }
677 }
678 
679 /* Make conflicts of early clobber pseudo registers of the current
680    insn with its inputs.  Avoid introducing unnecessary conflicts by
681    checking classes of the constraints and pseudos because otherwise
682    significant code degradation is possible for some targets.  */
683 static void
make_early_clobber_and_input_conflicts(void)684 make_early_clobber_and_input_conflicts (void)
685 {
686   int alt;
687   int def, def_match;
688   enum reg_class def_cl;
689 
690   int n_alternatives = recog_data.n_alternatives;
691   int n_operands = recog_data.n_operands;
692   const operand_alternative *op_alt = recog_op_alt;
693   for (alt = 0; alt < n_alternatives; alt++, op_alt += n_operands)
694     if (TEST_BIT (preferred_alternatives, alt))
695       for (def = 0; def < n_operands; def++)
696 	{
697 	  def_cl = NO_REGS;
698 	  if (op_alt[def].earlyclobber)
699 	    {
700 	      if (op_alt[def].anything_ok)
701 		def_cl = ALL_REGS;
702 	      else
703 		def_cl = op_alt[def].cl;
704 	      check_and_make_def_conflict (alt, def, def_cl);
705 	    }
706 	  if ((def_match = op_alt[def].matches) >= 0
707 	      && (op_alt[def_match].earlyclobber
708 		  || op_alt[def].earlyclobber))
709 	    {
710 	      if (op_alt[def_match].anything_ok)
711 		def_cl = ALL_REGS;
712 	      else
713 		def_cl = op_alt[def_match].cl;
714 	      check_and_make_def_conflict (alt, def, def_cl);
715 	    }
716 	}
717 }
718 
719 /* Mark early clobber hard registers of the current INSN as live (if
720    LIVE_P) or dead.  Return true if there are such registers.  */
721 static bool
mark_hard_reg_early_clobbers(rtx_insn * insn,bool live_p)722 mark_hard_reg_early_clobbers (rtx_insn *insn, bool live_p)
723 {
724   df_ref def;
725   bool set_p = false;
726 
727   FOR_EACH_INSN_DEF (def, insn)
728     if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
729       {
730 	rtx dreg = DF_REF_REG (def);
731 
732 	if (GET_CODE (dreg) == SUBREG)
733 	  dreg = SUBREG_REG (dreg);
734 	if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
735 	  continue;
736 
737 	/* Hard register clobbers are believed to be early clobber
738 	   because there is no way to say that non-operand hard
739 	   register clobbers are not early ones.  */
740 	if (live_p)
741 	  mark_ref_live (def);
742 	else
743 	  mark_ref_dead (def);
744 	set_p = true;
745       }
746 
747   return set_p;
748 }
749 
750 /* Checks that CONSTRAINTS permits to use only one hard register.  If
751    it is so, the function returns the class of the hard register.
752    Otherwise it returns NO_REGS.  */
753 static enum reg_class
single_reg_class(const char * constraints,rtx op,rtx equiv_const)754 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
755 {
756   int c;
757   enum reg_class cl, next_cl;
758   enum constraint_num cn;
759 
760   cl = NO_REGS;
761   alternative_mask preferred = preferred_alternatives;
762   for (; (c = *constraints); constraints += CONSTRAINT_LEN (c, constraints))
763     if (c == '#')
764       preferred &= ~ALTERNATIVE_BIT (0);
765     else if (c == ',')
766       preferred >>= 1;
767     else if (preferred & 1)
768       switch (c)
769 	{
770 	case 'g':
771 	  return NO_REGS;
772 
773 	default:
774 	  /* ??? Is this the best way to handle memory constraints?  */
775 	  cn = lookup_constraint (constraints);
776 	  if (insn_extra_memory_constraint (cn)
777 	      || insn_extra_special_memory_constraint (cn)
778 	      || insn_extra_address_constraint (cn))
779 	    return NO_REGS;
780 	  if (constraint_satisfied_p (op, cn)
781 	      || (equiv_const != NULL_RTX
782 		  && CONSTANT_P (equiv_const)
783 		  && constraint_satisfied_p (equiv_const, cn)))
784 	    return NO_REGS;
785 	  next_cl = reg_class_for_constraint (cn);
786 	  if (next_cl == NO_REGS)
787 	    break;
788 	  if (cl == NO_REGS
789 	      ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
790 	      : (ira_class_singleton[cl][GET_MODE (op)]
791 		 != ira_class_singleton[next_cl][GET_MODE (op)]))
792 	    return NO_REGS;
793 	  cl = next_cl;
794 	  break;
795 
796 	case '0': case '1': case '2': case '3': case '4':
797 	case '5': case '6': case '7': case '8': case '9':
798 	  next_cl
799 	    = single_reg_class (recog_data.constraints[c - '0'],
800 				recog_data.operand[c - '0'], NULL_RTX);
801 	  if (cl == NO_REGS
802 	      ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
803 	      : (ira_class_singleton[cl][GET_MODE (op)]
804 		 != ira_class_singleton[next_cl][GET_MODE (op)]))
805 	    return NO_REGS;
806 	  cl = next_cl;
807 	  break;
808 	}
809   return cl;
810 }
811 
812 /* The function checks that operand OP_NUM of the current insn can use
813    only one hard register.  If it is so, the function returns the
814    class of the hard register.  Otherwise it returns NO_REGS.  */
815 static enum reg_class
single_reg_operand_class(int op_num)816 single_reg_operand_class (int op_num)
817 {
818   if (op_num < 0 || recog_data.n_alternatives == 0)
819     return NO_REGS;
820   return single_reg_class (recog_data.constraints[op_num],
821 			   recog_data.operand[op_num], NULL_RTX);
822 }
823 
824 /* The function sets up hard register set *SET to hard registers which
825    might be used by insn reloads because the constraints are too
826    strict.  */
827 void
ira_implicitly_set_insn_hard_regs(HARD_REG_SET * set,alternative_mask preferred)828 ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set,
829 				   alternative_mask preferred)
830 {
831   int i, c, regno = 0;
832   enum reg_class cl;
833   rtx op;
834   machine_mode mode;
835 
836   CLEAR_HARD_REG_SET (*set);
837   for (i = 0; i < recog_data.n_operands; i++)
838     {
839       op = recog_data.operand[i];
840 
841       if (GET_CODE (op) == SUBREG)
842 	op = SUBREG_REG (op);
843 
844       if (GET_CODE (op) == SCRATCH
845 	  || (REG_P (op) && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER))
846 	{
847 	  const char *p = recog_data.constraints[i];
848 
849 	  mode = (GET_CODE (op) == SCRATCH
850 		  ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno));
851 	  cl = NO_REGS;
852 	  for (; (c = *p); p += CONSTRAINT_LEN (c, p))
853 	    if (c == '#')
854 	      preferred &= ~ALTERNATIVE_BIT (0);
855 	    else if (c == ',')
856 	      preferred >>= 1;
857 	    else if (preferred & 1)
858 	      {
859 		cl = reg_class_for_constraint (lookup_constraint (p));
860 		if (cl != NO_REGS)
861 		  {
862 		    /* There is no register pressure problem if all of the
863 		       regs in this class are fixed.  */
864 		    int regno = ira_class_singleton[cl][mode];
865 		    if (regno >= 0)
866 		      add_to_hard_reg_set (set, mode, regno);
867 		  }
868 	      }
869 	}
870     }
871 }
872 /* Processes input operands, if IN_P, or output operands otherwise of
873    the current insn with FREQ to find allocno which can use only one
874    hard register and makes other currently living allocnos conflicting
875    with the hard register.  */
876 static void
process_single_reg_class_operands(bool in_p,int freq)877 process_single_reg_class_operands (bool in_p, int freq)
878 {
879   int i, regno;
880   unsigned int px;
881   enum reg_class cl;
882   rtx operand;
883   ira_allocno_t operand_a, a;
884 
885   for (i = 0; i < recog_data.n_operands; i++)
886     {
887       operand = recog_data.operand[i];
888       if (in_p && recog_data.operand_type[i] != OP_IN
889 	  && recog_data.operand_type[i] != OP_INOUT)
890 	continue;
891       if (! in_p && recog_data.operand_type[i] != OP_OUT
892 	  && recog_data.operand_type[i] != OP_INOUT)
893 	continue;
894       cl = single_reg_operand_class (i);
895       if (cl == NO_REGS)
896 	continue;
897 
898       operand_a = NULL;
899 
900       if (GET_CODE (operand) == SUBREG)
901 	operand = SUBREG_REG (operand);
902 
903       if (REG_P (operand)
904 	  && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
905 	{
906 	  enum reg_class aclass;
907 
908 	  operand_a = ira_curr_regno_allocno_map[regno];
909 	  aclass = ALLOCNO_CLASS (operand_a);
910 	  if (ira_class_subset_p[cl][aclass])
911 	    {
912 	      /* View the desired allocation of OPERAND as:
913 
914 		    (REG:YMODE YREGNO),
915 
916 		 a simplification of:
917 
918 		    (subreg:YMODE (reg:XMODE XREGNO) OFFSET).  */
919 	      machine_mode ymode, xmode;
920 	      int xregno, yregno;
921 	      HOST_WIDE_INT offset;
922 
923 	      xmode = recog_data.operand_mode[i];
924 	      xregno = ira_class_singleton[cl][xmode];
925 	      gcc_assert (xregno >= 0);
926 	      ymode = ALLOCNO_MODE (operand_a);
927 	      offset = subreg_lowpart_offset (ymode, xmode);
928 	      yregno = simplify_subreg_regno (xregno, xmode, offset, ymode);
929 	      if (yregno >= 0
930 		  && ira_class_hard_reg_index[aclass][yregno] >= 0)
931 		{
932 		  int cost;
933 
934 		  ira_allocate_and_set_costs
935 		    (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a),
936 		     aclass, 0);
937 		  ira_init_register_move_cost_if_necessary (xmode);
938 		  cost = freq * (in_p
939 				 ? ira_register_move_cost[xmode][aclass][cl]
940 				 : ira_register_move_cost[xmode][cl][aclass]);
941 		  ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
942 		    [ira_class_hard_reg_index[aclass][yregno]] -= cost;
943 		}
944 	    }
945 	}
946 
947       EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
948         {
949 	  ira_object_t obj = ira_object_id_map[px];
950 	  a = OBJECT_ALLOCNO (obj);
951 	  if (a != operand_a)
952 	    {
953 	      /* We could increase costs of A instead of making it
954 		 conflicting with the hard register.  But it works worse
955 		 because it will be spilled in reload in anyway.  */
956 	      IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
957 				reg_class_contents[cl]);
958 	      IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
959 				reg_class_contents[cl]);
960 	    }
961 	}
962     }
963 }
964 
965 /* Look through the CALL_INSN_FUNCTION_USAGE of a call insn INSN, and see if
966    we find a SET rtx that we can use to deduce that a register can be cheaply
967    caller-saved.  Return such a register, or NULL_RTX if none is found.  */
968 static rtx
find_call_crossed_cheap_reg(rtx_insn * insn)969 find_call_crossed_cheap_reg (rtx_insn *insn)
970 {
971   rtx cheap_reg = NULL_RTX;
972   rtx exp = CALL_INSN_FUNCTION_USAGE (insn);
973 
974   while (exp != NULL)
975     {
976       rtx x = XEXP (exp, 0);
977       if (GET_CODE (x) == SET)
978 	{
979 	  exp = x;
980 	  break;
981 	}
982       exp = XEXP (exp, 1);
983     }
984   if (exp != NULL)
985     {
986       basic_block bb = BLOCK_FOR_INSN (insn);
987       rtx reg = SET_SRC (exp);
988       rtx_insn *prev = PREV_INSN (insn);
989       while (prev && !(INSN_P (prev)
990 		       && BLOCK_FOR_INSN (prev) != bb))
991 	{
992 	  if (NONDEBUG_INSN_P (prev))
993 	    {
994 	      rtx set = single_set (prev);
995 
996 	      if (set && rtx_equal_p (SET_DEST (set), reg))
997 		{
998 		  rtx src = SET_SRC (set);
999 		  if (!REG_P (src) || HARD_REGISTER_P (src)
1000 		      || !pseudo_regno_single_word_and_live_p (REGNO (src)))
1001 		    break;
1002 		  if (!modified_between_p (src, prev, insn))
1003 		    cheap_reg = src;
1004 		  break;
1005 		}
1006 	      if (set && rtx_equal_p (SET_SRC (set), reg))
1007 		{
1008 		  rtx dest = SET_DEST (set);
1009 		  if (!REG_P (dest) || HARD_REGISTER_P (dest)
1010 		      || !pseudo_regno_single_word_and_live_p (REGNO (dest)))
1011 		    break;
1012 		  if (!modified_between_p (dest, prev, insn))
1013 		    cheap_reg = dest;
1014 		  break;
1015 		}
1016 
1017 	      if (reg_set_p (reg, prev))
1018 		break;
1019 	    }
1020 	  prev = PREV_INSN (prev);
1021 	}
1022     }
1023   return cheap_reg;
1024 }
1025 
1026 /* Process insns of the basic block given by its LOOP_TREE_NODE to
1027    update allocno live ranges, allocno hard register conflicts,
1028    intersected calls, and register pressure info for allocnos for the
1029    basic block for and regions containing the basic block.  */
1030 static void
process_bb_node_lives(ira_loop_tree_node_t loop_tree_node)1031 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
1032 {
1033   int i, freq;
1034   unsigned int j;
1035   basic_block bb;
1036   rtx_insn *insn;
1037   bitmap_iterator bi;
1038   bitmap reg_live_out;
1039   unsigned int px;
1040   bool set_p;
1041 
1042   bb = loop_tree_node->bb;
1043   if (bb != NULL)
1044     {
1045       for (i = 0; i < ira_pressure_classes_num; i++)
1046 	{
1047 	  curr_reg_pressure[ira_pressure_classes[i]] = 0;
1048 	  high_pressure_start_point[ira_pressure_classes[i]] = -1;
1049 	}
1050       curr_bb_node = loop_tree_node;
1051       reg_live_out = df_get_live_out (bb);
1052       sparseset_clear (objects_live);
1053       REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
1054       AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
1055       AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
1056       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1057 	if (TEST_HARD_REG_BIT (hard_regs_live, i))
1058 	  {
1059 	    enum reg_class aclass, pclass, cl;
1060 
1061 	    aclass = ira_allocno_class_translate[REGNO_REG_CLASS (i)];
1062 	    pclass = ira_pressure_class_translate[aclass];
1063 	    for (j = 0;
1064 		 (cl = ira_reg_class_super_classes[pclass][j])
1065 		   != LIM_REG_CLASSES;
1066 		 j++)
1067 	      {
1068 		if (! ira_reg_pressure_class_p[cl])
1069 		  continue;
1070 		curr_reg_pressure[cl]++;
1071 		if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
1072 		  curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
1073 		ira_assert (curr_reg_pressure[cl]
1074 			    <= ira_class_hard_regs_num[cl]);
1075 	      }
1076 	  }
1077       EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
1078 	mark_pseudo_regno_live (j);
1079 
1080       freq = REG_FREQ_FROM_BB (bb);
1081       if (freq == 0)
1082 	freq = 1;
1083 
1084       /* Invalidate all allocno_saved_at_call entries.  */
1085       last_call_num++;
1086 
1087       /* Scan the code of this basic block, noting which allocnos and
1088 	 hard regs are born or die.
1089 
1090 	 Note that this loop treats uninitialized values as live until
1091 	 the beginning of the block.  For example, if an instruction
1092 	 uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
1093 	 set, FOO will remain live until the beginning of the block.
1094 	 Likewise if FOO is not set at all.  This is unnecessarily
1095 	 pessimistic, but it probably doesn't matter much in practice.  */
1096       FOR_BB_INSNS_REVERSE (bb, insn)
1097 	{
1098 	  ira_allocno_t a;
1099 	  df_ref def, use;
1100 	  bool call_p;
1101 
1102 	  if (!NONDEBUG_INSN_P (insn))
1103 	    continue;
1104 
1105 	  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1106 	    fprintf (ira_dump_file, "   Insn %u(l%d): point = %d\n",
1107 		     INSN_UID (insn), loop_tree_node->parent->loop_num,
1108 		     curr_point);
1109 
1110 	  call_p = CALL_P (insn);
1111 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1112 	  int regno;
1113 	  bool clear_pic_use_conflict_p = false;
1114 	  /* Processing insn usage in call insn can create conflict
1115 	     with pic pseudo and pic hard reg and that is wrong.
1116 	     Check this situation and fix it at the end of the insn
1117 	     processing.  */
1118 	  if (call_p && pic_offset_table_rtx != NULL_RTX
1119 	      && (regno = REGNO (pic_offset_table_rtx)) >= FIRST_PSEUDO_REGISTER
1120 	      && (a = ira_curr_regno_allocno_map[regno]) != NULL)
1121 	    clear_pic_use_conflict_p
1122 		= (find_regno_fusage (insn, USE, REAL_PIC_OFFSET_TABLE_REGNUM)
1123 		   && ! TEST_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS
1124 					   (ALLOCNO_OBJECT (a, 0)),
1125 					   REAL_PIC_OFFSET_TABLE_REGNUM));
1126 #endif
1127 
1128 	  /* Mark each defined value as live.  We need to do this for
1129 	     unused values because they still conflict with quantities
1130 	     that are live at the time of the definition.
1131 
1132 	     Ignore DF_REF_MAY_CLOBBERs on a call instruction.  Such
1133 	     references represent the effect of the called function
1134 	     on a call-clobbered register.  Marking the register as
1135 	     live would stop us from allocating it to a call-crossing
1136 	     allocno.  */
1137 	  FOR_EACH_INSN_DEF (def, insn)
1138 	    if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1139 	      mark_ref_live (def);
1140 
1141 	  /* If INSN has multiple outputs, then any value used in one
1142 	     of the outputs conflicts with the other outputs.  Model this
1143 	     by making the used value live during the output phase.
1144 
1145 	     It is unsafe to use !single_set here since it will ignore
1146 	     an unused output.  Just because an output is unused does
1147 	     not mean the compiler can assume the side effect will not
1148 	     occur.  Consider if ALLOCNO appears in the address of an
1149 	     output and we reload the output.  If we allocate ALLOCNO
1150 	     to the same hard register as an unused output we could
1151 	     set the hard register before the output reload insn.  */
1152 	  if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1153 	    FOR_EACH_INSN_USE (use, insn)
1154 	      {
1155 		int i;
1156 		rtx reg;
1157 
1158 		reg = DF_REF_REG (use);
1159 		for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
1160 		  {
1161 		    rtx set;
1162 
1163 		    set = XVECEXP (PATTERN (insn), 0, i);
1164 		    if (GET_CODE (set) == SET
1165 			&& reg_overlap_mentioned_p (reg, SET_DEST (set)))
1166 		      {
1167 			/* After the previous loop, this is a no-op if
1168 			   REG is contained within SET_DEST (SET).  */
1169 			mark_ref_live (use);
1170 			break;
1171 		      }
1172 		  }
1173 	      }
1174 
1175 	  extract_insn (insn);
1176 	  preferred_alternatives = get_preferred_alternatives (insn);
1177 	  preprocess_constraints (insn);
1178 	  process_single_reg_class_operands (false, freq);
1179 
1180 	  /* See which defined values die here.  */
1181 	  FOR_EACH_INSN_DEF (def, insn)
1182 	    if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1183 	      mark_ref_dead (def);
1184 
1185 	  if (call_p)
1186 	    {
1187 	      /* Try to find a SET in the CALL_INSN_FUNCTION_USAGE, and from
1188 		 there, try to find a pseudo that is live across the call but
1189 		 can be cheaply reconstructed from the return value.  */
1190 	      rtx cheap_reg = find_call_crossed_cheap_reg (insn);
1191 	      if (cheap_reg != NULL_RTX)
1192 		add_reg_note (insn, REG_RETURNED, cheap_reg);
1193 
1194 	      last_call_num++;
1195 	      sparseset_clear (allocnos_processed);
1196 	      /* The current set of live allocnos are live across the call.  */
1197 	      EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1198 	        {
1199 		  ira_object_t obj = ira_object_id_map[i];
1200 		  a = OBJECT_ALLOCNO (obj);
1201 		  int num = ALLOCNO_NUM (a);
1202 		  HARD_REG_SET this_call_used_reg_set;
1203 
1204 		  get_call_reg_set_usage (insn, &this_call_used_reg_set,
1205 					  call_used_reg_set);
1206 
1207 		  /* Don't allocate allocnos that cross setjmps or any
1208 		     call, if this function receives a nonlocal
1209 		     goto.  */
1210 		  if (cfun->has_nonlocal_label
1211 		      || find_reg_note (insn, REG_SETJMP,
1212 					NULL_RTX) != NULL_RTX)
1213 		    {
1214 		      SET_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj));
1215 		      SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
1216 		    }
1217 		  if (can_throw_internal (insn))
1218 		    {
1219 		      IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1220 					this_call_used_reg_set);
1221 		      IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1222 					this_call_used_reg_set);
1223 		    }
1224 
1225 		  if (sparseset_bit_p (allocnos_processed, num))
1226 		    continue;
1227 		  sparseset_set_bit (allocnos_processed, num);
1228 
1229 		  if (allocno_saved_at_call[num] != last_call_num)
1230 		    /* Here we are mimicking caller-save.c behavior
1231 		       which does not save hard register at a call if
1232 		       it was saved on previous call in the same basic
1233 		       block and the hard register was not mentioned
1234 		       between the two calls.  */
1235 		    ALLOCNO_CALL_FREQ (a) += freq;
1236 		  /* Mark it as saved at the next call.  */
1237 		  allocno_saved_at_call[num] = last_call_num + 1;
1238 		  ALLOCNO_CALLS_CROSSED_NUM (a)++;
1239 		  IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a),
1240 				    this_call_used_reg_set);
1241 		  if (cheap_reg != NULL_RTX
1242 		      && ALLOCNO_REGNO (a) == (int) REGNO (cheap_reg))
1243 		    ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)++;
1244 		}
1245 	    }
1246 
1247 	  make_early_clobber_and_input_conflicts ();
1248 
1249 	  curr_point++;
1250 
1251 	  /* Mark each used value as live.  */
1252 	  FOR_EACH_INSN_USE (use, insn)
1253 	    mark_ref_live (use);
1254 
1255 	  process_single_reg_class_operands (true, freq);
1256 
1257 	  set_p = mark_hard_reg_early_clobbers (insn, true);
1258 
1259 	  if (set_p)
1260 	    {
1261 	      mark_hard_reg_early_clobbers (insn, false);
1262 
1263 	      /* Mark each hard reg as live again.  For example, a
1264 		 hard register can be in clobber and in an insn
1265 		 input.  */
1266 	      FOR_EACH_INSN_USE (use, insn)
1267 		{
1268 		  rtx ureg = DF_REF_REG (use);
1269 
1270 		  if (GET_CODE (ureg) == SUBREG)
1271 		    ureg = SUBREG_REG (ureg);
1272 		  if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
1273 		    continue;
1274 
1275 		  mark_ref_live (use);
1276 		}
1277 	    }
1278 
1279 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1280 	  if (clear_pic_use_conflict_p)
1281 	    {
1282 	      regno = REGNO (pic_offset_table_rtx);
1283 	      a = ira_curr_regno_allocno_map[regno];
1284 	      CLEAR_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (ALLOCNO_OBJECT (a, 0)),
1285 				  REAL_PIC_OFFSET_TABLE_REGNUM);
1286 	      CLEAR_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS
1287 				  (ALLOCNO_OBJECT (a, 0)),
1288 				  REAL_PIC_OFFSET_TABLE_REGNUM);
1289 	    }
1290 #endif
1291 	  curr_point++;
1292 	}
1293 
1294       if (bb_has_eh_pred (bb))
1295 	for (j = 0; ; ++j)
1296 	  {
1297 	    unsigned int regno = EH_RETURN_DATA_REGNO (j);
1298 	    if (regno == INVALID_REGNUM)
1299 	      break;
1300 	    make_hard_regno_born (regno);
1301 	  }
1302 
1303       /* Allocnos can't go in stack regs at the start of a basic block
1304 	 that is reached by an abnormal edge. Likewise for call
1305 	 clobbered regs, because caller-save, fixup_abnormal_edges and
1306 	 possibly the table driven EH machinery are not quite ready to
1307 	 handle such allocnos live across such edges.  */
1308       if (bb_has_abnormal_pred (bb))
1309 	{
1310 #ifdef STACK_REGS
1311 	  EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1312 	    {
1313 	      ira_allocno_t a = OBJECT_ALLOCNO (ira_object_id_map[px]);
1314 
1315 	      ALLOCNO_NO_STACK_REG_P (a) = true;
1316 	      ALLOCNO_TOTAL_NO_STACK_REG_P (a) = true;
1317 	    }
1318 	  for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1319 	    make_hard_regno_born (px);
1320 #endif
1321 	  /* No need to record conflicts for call clobbered regs if we
1322 	     have nonlocal labels around, as we don't ever try to
1323 	     allocate such regs in this case.  */
1324 	  if (!cfun->has_nonlocal_label
1325 	      && has_abnormal_call_or_eh_pred_edge_p (bb))
1326 	    for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1327 	      if (call_used_regs[px]
1328 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1329 		  /* We should create a conflict of PIC pseudo with
1330 		     PIC hard reg as PIC hard reg can have a wrong
1331 		     value after jump described by the abnormal edge.
1332 		     In this case we can not allocate PIC hard reg to
1333 		     PIC pseudo as PIC pseudo will also have a wrong
1334 		     value.  This code is not critical as LRA can fix
1335 		     it but it is better to have the right allocation
1336 		     earlier.  */
1337 		  || (px == REAL_PIC_OFFSET_TABLE_REGNUM
1338 		      && pic_offset_table_rtx != NULL_RTX
1339 		      && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
1340 #endif
1341 		  )
1342 		make_hard_regno_born (px);
1343 	}
1344 
1345       EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1346 	make_object_dead (ira_object_id_map[i]);
1347 
1348       curr_point++;
1349 
1350     }
1351   /* Propagate register pressure to upper loop tree nodes.  */
1352   if (loop_tree_node != ira_loop_tree_root)
1353     for (i = 0; i < ira_pressure_classes_num; i++)
1354       {
1355 	enum reg_class pclass;
1356 
1357 	pclass = ira_pressure_classes[i];
1358 	if (loop_tree_node->reg_pressure[pclass]
1359 	    > loop_tree_node->parent->reg_pressure[pclass])
1360 	  loop_tree_node->parent->reg_pressure[pclass]
1361 	    = loop_tree_node->reg_pressure[pclass];
1362       }
1363 }
1364 
1365 /* Create and set up IRA_START_POINT_RANGES and
1366    IRA_FINISH_POINT_RANGES.  */
1367 static void
create_start_finish_chains(void)1368 create_start_finish_chains (void)
1369 {
1370   ira_object_t obj;
1371   ira_object_iterator oi;
1372   live_range_t r;
1373 
1374   ira_start_point_ranges
1375     = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1376   memset (ira_start_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1377   ira_finish_point_ranges
1378     = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1379   memset (ira_finish_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1380   FOR_EACH_OBJECT (obj, oi)
1381     for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1382       {
1383 	r->start_next = ira_start_point_ranges[r->start];
1384 	ira_start_point_ranges[r->start] = r;
1385 	r->finish_next = ira_finish_point_ranges[r->finish];
1386  	  ira_finish_point_ranges[r->finish] = r;
1387       }
1388 }
1389 
1390 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1391    new live ranges and program points were added as a result if new
1392    insn generation.  */
1393 void
ira_rebuild_start_finish_chains(void)1394 ira_rebuild_start_finish_chains (void)
1395 {
1396   ira_free (ira_finish_point_ranges);
1397   ira_free (ira_start_point_ranges);
1398   create_start_finish_chains ();
1399 }
1400 
1401 /* Compress allocno live ranges by removing program points where
1402    nothing happens.  */
1403 static void
remove_some_program_points_and_update_live_ranges(void)1404 remove_some_program_points_and_update_live_ranges (void)
1405 {
1406   unsigned i;
1407   int n;
1408   int *map;
1409   ira_object_t obj;
1410   ira_object_iterator oi;
1411   live_range_t r, prev_r, next_r;
1412   sbitmap born_or_dead, born, dead;
1413   sbitmap_iterator sbi;
1414   bool born_p, dead_p, prev_born_p, prev_dead_p;
1415 
1416   born = sbitmap_alloc (ira_max_point);
1417   dead = sbitmap_alloc (ira_max_point);
1418   bitmap_clear (born);
1419   bitmap_clear (dead);
1420   FOR_EACH_OBJECT (obj, oi)
1421     for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1422       {
1423 	ira_assert (r->start <= r->finish);
1424 	bitmap_set_bit (born, r->start);
1425 	bitmap_set_bit (dead, r->finish);
1426       }
1427 
1428   born_or_dead = sbitmap_alloc (ira_max_point);
1429   bitmap_ior (born_or_dead, born, dead);
1430   map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1431   n = -1;
1432   prev_born_p = prev_dead_p = false;
1433   EXECUTE_IF_SET_IN_BITMAP (born_or_dead, 0, i, sbi)
1434     {
1435       born_p = bitmap_bit_p (born, i);
1436       dead_p = bitmap_bit_p (dead, i);
1437       if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
1438 	  || (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
1439 	map[i] = n;
1440       else
1441 	map[i] = ++n;
1442       prev_born_p = born_p;
1443       prev_dead_p = dead_p;
1444     }
1445   sbitmap_free (born_or_dead);
1446   sbitmap_free (born);
1447   sbitmap_free (dead);
1448   n++;
1449   if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1450     fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1451 	     ira_max_point, n, 100 * n / ira_max_point);
1452   ira_max_point = n;
1453 
1454   FOR_EACH_OBJECT (obj, oi)
1455     for (r = OBJECT_LIVE_RANGES (obj), prev_r = NULL; r != NULL; r = next_r)
1456       {
1457 	next_r = r->next;
1458 	r->start = map[r->start];
1459 	r->finish = map[r->finish];
1460 	if (prev_r == NULL || prev_r->start > r->finish + 1)
1461 	  {
1462 	    prev_r = r;
1463 	    continue;
1464 	  }
1465 	prev_r->start = r->start;
1466 	prev_r->next = next_r;
1467 	ira_finish_live_range (r);
1468       }
1469 
1470   ira_free (map);
1471 }
1472 
1473 /* Print live ranges R to file F.  */
1474 void
ira_print_live_range_list(FILE * f,live_range_t r)1475 ira_print_live_range_list (FILE *f, live_range_t r)
1476 {
1477   for (; r != NULL; r = r->next)
1478     fprintf (f, " [%d..%d]", r->start, r->finish);
1479   fprintf (f, "\n");
1480 }
1481 
1482 DEBUG_FUNCTION void
debug(live_range & ref)1483 debug (live_range &ref)
1484 {
1485   ira_print_live_range_list (stderr, &ref);
1486 }
1487 
1488 DEBUG_FUNCTION void
debug(live_range * ptr)1489 debug (live_range *ptr)
1490 {
1491   if (ptr)
1492     debug (*ptr);
1493   else
1494     fprintf (stderr, "<nil>\n");
1495 }
1496 
1497 /* Print live ranges R to stderr.  */
1498 void
ira_debug_live_range_list(live_range_t r)1499 ira_debug_live_range_list (live_range_t r)
1500 {
1501   ira_print_live_range_list (stderr, r);
1502 }
1503 
1504 /* Print live ranges of object OBJ to file F.  */
1505 static void
print_object_live_ranges(FILE * f,ira_object_t obj)1506 print_object_live_ranges (FILE *f, ira_object_t obj)
1507 {
1508   ira_print_live_range_list (f, OBJECT_LIVE_RANGES (obj));
1509 }
1510 
1511 /* Print live ranges of allocno A to file F.  */
1512 static void
print_allocno_live_ranges(FILE * f,ira_allocno_t a)1513 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1514 {
1515   int n = ALLOCNO_NUM_OBJECTS (a);
1516   int i;
1517 
1518   for (i = 0; i < n; i++)
1519     {
1520       fprintf (f, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1521       if (n > 1)
1522 	fprintf (f, " [%d]", i);
1523       fprintf (f, "):");
1524       print_object_live_ranges (f, ALLOCNO_OBJECT (a, i));
1525     }
1526 }
1527 
1528 /* Print live ranges of allocno A to stderr.  */
1529 void
ira_debug_allocno_live_ranges(ira_allocno_t a)1530 ira_debug_allocno_live_ranges (ira_allocno_t a)
1531 {
1532   print_allocno_live_ranges (stderr, a);
1533 }
1534 
1535 /* Print live ranges of all allocnos to file F.  */
1536 static void
print_live_ranges(FILE * f)1537 print_live_ranges (FILE *f)
1538 {
1539   ira_allocno_t a;
1540   ira_allocno_iterator ai;
1541 
1542   FOR_EACH_ALLOCNO (a, ai)
1543     print_allocno_live_ranges (f, a);
1544 }
1545 
1546 /* Print live ranges of all allocnos to stderr.  */
1547 void
ira_debug_live_ranges(void)1548 ira_debug_live_ranges (void)
1549 {
1550   print_live_ranges (stderr);
1551 }
1552 
1553 /* The main entry function creates live ranges, set up
1554    CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for objects, and
1555    calculate register pressure info.  */
1556 void
ira_create_allocno_live_ranges(void)1557 ira_create_allocno_live_ranges (void)
1558 {
1559   objects_live = sparseset_alloc (ira_objects_num);
1560   allocnos_processed = sparseset_alloc (ira_allocnos_num);
1561   curr_point = 0;
1562   last_call_num = 0;
1563   allocno_saved_at_call
1564     = (int *) ira_allocate (ira_allocnos_num * sizeof (int));
1565   memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int));
1566   ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1567 			  process_bb_node_lives);
1568   ira_max_point = curr_point;
1569   create_start_finish_chains ();
1570   if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1571     print_live_ranges (ira_dump_file);
1572   /* Clean up.  */
1573   ira_free (allocno_saved_at_call);
1574   sparseset_free (objects_live);
1575   sparseset_free (allocnos_processed);
1576 }
1577 
1578 /* Compress allocno live ranges.  */
1579 void
ira_compress_allocno_live_ranges(void)1580 ira_compress_allocno_live_ranges (void)
1581 {
1582   remove_some_program_points_and_update_live_ranges ();
1583   ira_rebuild_start_finish_chains ();
1584   if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1585     {
1586       fprintf (ira_dump_file, "Ranges after the compression:\n");
1587       print_live_ranges (ira_dump_file);
1588     }
1589 }
1590 
1591 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES.  */
1592 void
ira_finish_allocno_live_ranges(void)1593 ira_finish_allocno_live_ranges (void)
1594 {
1595   ira_free (ira_finish_point_ranges);
1596   ira_free (ira_start_point_ranges);
1597 }
1598