1 //! pub(crate) Implementation of the linear scan allocator algorithm.
2 //!
3 //! This tries to follow the implementation as suggested by:
4 //!   Optimized Interval Splitting in a Linear Scan Register Allocator,
5 //!     by Wimmer et al., 2005
6 
7 use log::{info, log_enabled, trace, Level};
8 
9 use std::default;
10 use std::env;
11 use std::fmt;
12 
13 use crate::data_structures::{BlockIx, InstIx, InstPoint, Point, RealReg, RegVecsAndBounds};
14 use crate::inst_stream::{add_spills_reloads_and_moves, InstToInsertAndExtPoint};
15 use crate::{
16     checker::CheckerContext, reg_maps::MentionRegUsageMapper, Function, RealRegUniverse,
17     RegAllocError, RegAllocResult, RegClass, Set, SpillSlot, VirtualReg, NUM_REG_CLASSES,
18 };
19 
20 use analysis::{AnalysisInfo, RangeFrag};
21 use smallvec::SmallVec;
22 
23 mod analysis;
24 mod assign_registers;
25 mod resolve_moves;
26 
27 #[derive(Default)]
28 pub(crate) struct Statistics {
29     only_large: bool,
30 
31     num_fixed: usize,
32     num_vregs: usize,
33     num_virtual_ranges: usize,
34 
35     peak_active: usize,
36     peak_inactive: usize,
37 
38     num_try_allocate_reg: usize,
39     num_try_allocate_reg_success: usize,
40 
41     num_reg_splits: usize,
42     num_reg_splits_success: usize,
43 }
44 
45 impl Drop for Statistics {
drop(&mut self)46     fn drop(&mut self) {
47         if self.only_large && self.num_vregs < 1000 {
48             return;
49         }
50         println!(
51             "stats: {} fixed; {} vreg; {} vranges; {} peak-active; {} peak-inactive, {} direct-alloc; {} total-alloc; {} partial-splits; {} partial-splits-attempts",
52             self.num_fixed,
53             self.num_vregs,
54             self.num_virtual_ranges,
55             self.peak_active,
56             self.peak_inactive,
57             self.num_try_allocate_reg_success,
58             self.num_try_allocate_reg,
59             self.num_reg_splits_success,
60             self.num_reg_splits,
61         );
62     }
63 }
64 
65 /// Which strategy should we use when trying to find the best split position?
66 /// TODO Consider loop depth to avoid splitting in the middle of a loop
67 /// whenever possible.
68 #[derive(Copy, Clone, Debug)]
69 enum OptimalSplitStrategy {
70     From,
71     To,
72     NextFrom,
73     NextNextFrom,
74     PrevTo,
75     PrevPrevTo,
76     Mid,
77 }
78 
79 #[derive(Clone)]
80 pub struct LinearScanOptions {
81     split_strategy: OptimalSplitStrategy,
82     partial_split: bool,
83     partial_split_near_end: bool,
84     stats: bool,
85     large_stats: bool,
86 }
87 
88 impl default::Default for LinearScanOptions {
default() -> Self89     fn default() -> Self {
90         // Useful for debugging.
91         let optimal_split_strategy = match env::var("LSRA_SPLIT") {
92             Ok(s) => match s.as_str() {
93                 "t" | "to" => OptimalSplitStrategy::To,
94                 "n" => OptimalSplitStrategy::NextFrom,
95                 "nn" => OptimalSplitStrategy::NextNextFrom,
96                 "p" => OptimalSplitStrategy::PrevTo,
97                 "pp" => OptimalSplitStrategy::PrevPrevTo,
98                 "m" | "mid" => OptimalSplitStrategy::Mid,
99                 _ => OptimalSplitStrategy::From,
100             },
101             Err(_) => OptimalSplitStrategy::From,
102         };
103 
104         let large_stats = env::var("LSRA_LARGE_STATS").is_ok();
105         let stats = env::var("LSRA_STATS").is_ok() || large_stats;
106 
107         let partial_split = env::var("LSRA_PARTIAL").is_ok();
108         let partial_split_near_end = env::var("LSRA_PARTIAL_END").is_ok();
109 
110         Self {
111             split_strategy: optimal_split_strategy,
112             partial_split,
113             partial_split_near_end,
114             stats,
115             large_stats,
116         }
117     }
118 }
119 
120 impl fmt::Debug for LinearScanOptions {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result121     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
122         writeln!(fmt, "linear scan")?;
123         write!(fmt, "  split: {:?}", self.split_strategy)
124     }
125 }
126 
127 // Local shorthands.
128 type RegUses = RegVecsAndBounds;
129 
130 /// A unique identifier for an interval.
131 #[derive(Clone, Copy, PartialEq, Eq)]
132 struct IntId(pub(crate) usize);
133 
134 impl fmt::Debug for IntId {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result135     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
136         write!(fmt, "int{}", self.0)
137     }
138 }
139 
140 #[derive(Clone)]
141 struct FixedInterval {
142     reg: RealReg,
143     frags: Vec<RangeFrag>,
144 }
145 
146 impl fmt::Display for FixedInterval {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result147     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
148         write!(f, "fixed {:?} [", self.reg)?;
149         for (i, frag) in self.frags.iter().enumerate() {
150             if i > 0 {
151                 write!(f, ", ")?;
152             }
153             write!(f, "({:?}, {:?})", frag.first, frag.last)?;
154         }
155         write!(f, "]")
156     }
157 }
158 
159 #[derive(Clone)]
160 pub(crate) struct VirtualInterval {
161     id: IntId,
162     vreg: VirtualReg,
163 
164     /// Parent interval in the split tree.
165     parent: Option<IntId>,
166     ancestor: Option<IntId>,
167     /// Child interval, if it has one, in the split tree.
168     child: Option<IntId>,
169 
170     /// Location assigned to this live interval.
171     location: Location,
172 
173     mentions: MentionMap,
174     start: InstPoint,
175     end: InstPoint,
176 }
177 
178 impl fmt::Display for VirtualInterval {
fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result179     fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
180         write!(fmt, "virtual {:?}", self.id)?;
181         if let Some(ref p) = self.parent {
182             write!(fmt, " (parent={:?})", p)?;
183         }
184         write!(
185             fmt,
186             ": {:?} {} [{:?}; {:?}]",
187             self.vreg, self.location, self.start, self.end
188         )
189     }
190 }
191 
192 impl VirtualInterval {
new( id: IntId, vreg: VirtualReg, start: InstPoint, end: InstPoint, mentions: MentionMap, ) -> Self193     fn new(
194         id: IntId,
195         vreg: VirtualReg,
196         start: InstPoint,
197         end: InstPoint,
198         mentions: MentionMap,
199     ) -> Self {
200         Self {
201             id,
202             vreg,
203             parent: None,
204             ancestor: None,
205             child: None,
206             location: Location::None,
207             mentions,
208             start,
209             end,
210         }
211     }
mentions(&self) -> &MentionMap212     fn mentions(&self) -> &MentionMap {
213         &self.mentions
214     }
mentions_mut(&mut self) -> &mut MentionMap215     fn mentions_mut(&mut self) -> &mut MentionMap {
216         &mut self.mentions
217     }
covers(&self, pos: InstPoint) -> bool218     fn covers(&self, pos: InstPoint) -> bool {
219         self.start <= pos && pos <= self.end
220     }
221 }
222 
223 /// This data structure tracks the mentions of a register (virtual or real) at a precise
224 /// instruction point. It's a set encoded as three flags, one for each of use/mod/def.
225 #[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
226 pub struct Mention(u8);
227 
228 impl fmt::Debug for Mention {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result229     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
230         let mut comma = false;
231         if self.0 & 1 == 1 {
232             write!(fmt, "use")?;
233             comma = true;
234         }
235         if (self.0 >> 1) & 1 == 1 {
236             if comma {
237                 write!(fmt, ",")?;
238             }
239             write!(fmt, "mod")?;
240             comma = true;
241         }
242         if (self.0 >> 2) & 1 == 1 {
243             if comma {
244                 write!(fmt, ",")?;
245             }
246             write!(fmt, "def")?;
247         }
248         Ok(())
249     }
250 }
251 
252 impl Mention {
new() -> Self253     fn new() -> Self {
254         Self(0)
255     }
256 
257     // Setters.
add_use(&mut self)258     fn add_use(&mut self) {
259         self.0 |= 1 << 0;
260     }
add_mod(&mut self)261     fn add_mod(&mut self) {
262         self.0 |= 1 << 1;
263     }
add_def(&mut self)264     fn add_def(&mut self) {
265         self.0 |= 1 << 2;
266     }
267 
remove_use(&mut self)268     fn remove_use(&mut self) {
269         self.0 &= !(1 << 0);
270     }
271 
272     // Getters.
is_use(&self) -> bool273     fn is_use(&self) -> bool {
274         (self.0 & 0b001) != 0
275     }
is_mod(&self) -> bool276     fn is_mod(&self) -> bool {
277         (self.0 & 0b010) != 0
278     }
is_def(&self) -> bool279     fn is_def(&self) -> bool {
280         (self.0 & 0b100) != 0
281     }
is_use_or_mod(&self) -> bool282     fn is_use_or_mod(&self) -> bool {
283         (self.0 & 0b011) != 0
284     }
is_mod_or_def(&self) -> bool285     fn is_mod_or_def(&self) -> bool {
286         (self.0 & 0b110) != 0
287     }
288 }
289 
290 pub type MentionMap = SmallVec<[(InstIx, Mention); 2]>;
291 
292 #[derive(Debug, Clone, Copy)]
293 pub(crate) enum Location {
294     None,
295     Reg(RealReg),
296     Stack(SpillSlot),
297 }
298 
299 impl Location {
reg(&self) -> Option<RealReg>300     pub(crate) fn reg(&self) -> Option<RealReg> {
301         match self {
302             Location::Reg(reg) => Some(*reg),
303             _ => None,
304         }
305     }
spill(&self) -> Option<SpillSlot>306     pub(crate) fn spill(&self) -> Option<SpillSlot> {
307         match self {
308             Location::Stack(slot) => Some(*slot),
309             _ => None,
310         }
311     }
is_none(&self) -> bool312     pub(crate) fn is_none(&self) -> bool {
313         match self {
314             Location::None => true,
315             _ => false,
316         }
317     }
318 }
319 
320 impl fmt::Display for Location {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result321     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
322         match self {
323             Location::None => write!(fmt, "none"),
324             Location::Reg(reg) => write!(fmt, "{:?}", reg),
325             Location::Stack(slot) => write!(fmt, "{:?}", slot),
326         }
327     }
328 }
329 
330 /// A group of live intervals.
331 pub struct Intervals {
332     virtuals: Vec<VirtualInterval>,
333     fixeds: Vec<FixedInterval>,
334 }
335 
336 impl Intervals {
get(&self, int_id: IntId) -> &VirtualInterval337     fn get(&self, int_id: IntId) -> &VirtualInterval {
338         &self.virtuals[int_id.0]
339     }
get_mut(&mut self, int_id: IntId) -> &mut VirtualInterval340     fn get_mut(&mut self, int_id: IntId) -> &mut VirtualInterval {
341         &mut self.virtuals[int_id.0]
342     }
num_virtual_intervals(&self) -> usize343     fn num_virtual_intervals(&self) -> usize {
344         self.virtuals.len()
345     }
346 
347     // Mutators.
set_reg(&mut self, int_id: IntId, reg: RealReg)348     fn set_reg(&mut self, int_id: IntId, reg: RealReg) {
349         let int = self.get_mut(int_id);
350         debug_assert!(int.location.is_none());
351         int.location = Location::Reg(reg);
352     }
set_spill(&mut self, int_id: IntId, slot: SpillSlot)353     fn set_spill(&mut self, int_id: IntId, slot: SpillSlot) {
354         let int = self.get_mut(int_id);
355         debug_assert!(int.location.spill().is_none());
356         int.location = Location::Stack(slot);
357     }
push_interval(&mut self, int: VirtualInterval)358     fn push_interval(&mut self, int: VirtualInterval) {
359         debug_assert!(int.id.0 == self.virtuals.len());
360         self.virtuals.push(int);
361     }
set_child(&mut self, int_id: IntId, child_id: IntId)362     fn set_child(&mut self, int_id: IntId, child_id: IntId) {
363         if let Some(prev_child) = self.virtuals[int_id.0].child.clone() {
364             self.virtuals[child_id.0].child = Some(prev_child);
365             self.virtuals[prev_child.0].parent = Some(child_id);
366         }
367         self.virtuals[int_id.0].child = Some(child_id);
368     }
369 }
370 
371 /// Finds the first use for the current interval that's located after the given
372 /// `pos` (included), in a broad sense of use (any of use, def or mod).
373 ///
374 /// Extends to the left, that is, "modified" means "used".
375 #[inline(never)]
next_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint>376 fn next_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint> {
377     if log_enabled!(Level::Trace) {
378         trace!("find next use of {} after {:?}", interval, pos);
379     }
380 
381     let mentions = interval.mentions();
382     let target = InstPoint::max(pos, interval.start);
383 
384     let ret = match mentions.binary_search_by_key(&target.iix(), |mention| mention.0) {
385         Ok(index) => {
386             // Either the selected index is a perfect match, or the next mention is
387             // the correct answer.
388             let mention = &mentions[index];
389             if target.pt() == Point::Use {
390                 if mention.1.is_use_or_mod() {
391                     Some(InstPoint::new_use(mention.0))
392                 } else {
393                     Some(InstPoint::new_def(mention.0))
394                 }
395             } else if target.pt() == Point::Def && mention.1.is_mod_or_def() {
396                 Some(target)
397             } else if index == mentions.len() - 1 {
398                 None
399             } else {
400                 let mention = &mentions[index + 1];
401                 if mention.1.is_use_or_mod() {
402                     Some(InstPoint::new_use(mention.0))
403                 } else {
404                     Some(InstPoint::new_def(mention.0))
405                 }
406             }
407         }
408 
409         Err(index) => {
410             if index == mentions.len() {
411                 None
412             } else {
413                 let mention = &mentions[index];
414                 if mention.1.is_use_or_mod() {
415                     Some(InstPoint::new_use(mention.0))
416                 } else {
417                     Some(InstPoint::new_def(mention.0))
418                 }
419             }
420         }
421     };
422 
423     // TODO once the mentions are properly split, this could be removed, in
424     // theory.
425     let ret = match ret {
426         Some(pos) => {
427             if pos <= interval.end {
428                 Some(pos)
429             } else {
430                 None
431             }
432         }
433         None => None,
434     };
435 
436     ret
437 }
438 
439 /// Finds the last use of a vreg before a given target, including it in possible
440 /// return values.
441 /// Extends to the right, that is, modified means "def".
442 #[inline(never)]
last_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint>443 fn last_use(interval: &VirtualInterval, pos: InstPoint, _reg_uses: &RegUses) -> Option<InstPoint> {
444     if log_enabled!(Level::Trace) {
445         trace!("searching last use of {} before {:?}", interval, pos,);
446     }
447 
448     let mentions = interval.mentions();
449 
450     let target = InstPoint::min(pos, interval.end);
451 
452     let ret = match mentions.binary_search_by_key(&target.iix(), |mention| mention.0) {
453         Ok(index) => {
454             // Either the selected index is a perfect match, or the previous mention
455             // is the correct answer.
456             let mention = &mentions[index];
457             if target.pt() == Point::Def {
458                 if mention.1.is_mod_or_def() {
459                     Some(InstPoint::new_def(mention.0))
460                 } else {
461                     Some(InstPoint::new_use(mention.0))
462                 }
463             } else if target.pt() == Point::Use && mention.1.is_use() {
464                 Some(target)
465             } else if index == 0 {
466                 None
467             } else {
468                 let mention = &mentions[index - 1];
469                 if mention.1.is_mod_or_def() {
470                     Some(InstPoint::new_def(mention.0))
471                 } else {
472                     Some(InstPoint::new_use(mention.0))
473                 }
474             }
475         }
476 
477         Err(index) => {
478             if index == 0 {
479                 None
480             } else {
481                 let mention = &mentions[index - 1];
482                 if mention.1.is_mod_or_def() {
483                     Some(InstPoint::new_def(mention.0))
484                 } else {
485                     Some(InstPoint::new_use(mention.0))
486                 }
487             }
488         }
489     };
490 
491     // TODO once the mentions are properly split, this could be removed, in
492     // theory.
493     let ret = match ret {
494         Some(pos) => {
495             if pos >= interval.start {
496                 Some(pos)
497             } else {
498                 None
499             }
500         }
501         None => None,
502     };
503 
504     trace!("mentions: {:?}", mentions);
505     trace!("found: {:?}", ret);
506 
507     ret
508 }
509 
510 /// Checks that each register class has its own scratch register in addition to one available
511 /// register, and creates a mapping of register class -> scratch register.
compute_scratches( reg_universe: &RealRegUniverse, ) -> Result<Vec<Option<RealReg>>, RegAllocError>512 fn compute_scratches(
513     reg_universe: &RealRegUniverse,
514 ) -> Result<Vec<Option<RealReg>>, RegAllocError> {
515     let mut scratches_by_rc = vec![None; NUM_REG_CLASSES];
516     for i in 0..NUM_REG_CLASSES {
517         if let Some(info) = &reg_universe.allocable_by_class[i] {
518             if info.first == info.last {
519                 return Err(RegAllocError::Other(
520                     "at least 2 registers required for linear scan".into(),
521                 ));
522             }
523             let scratch = if let Some(suggested_reg) = info.suggested_scratch {
524                 reg_universe.regs[suggested_reg].0
525             } else {
526                 return Err(RegAllocError::MissingSuggestedScratchReg(
527                     RegClass::rc_from_u32(i as u32),
528                 ));
529             };
530             scratches_by_rc[i] = Some(scratch);
531         }
532     }
533     Ok(scratches_by_rc)
534 }
535 
536 /// Allocator top level.
537 ///
538 /// `func` is modified so that, when this function returns, it will contain no VirtualReg uses.
539 ///
540 /// Allocation can fail if there are insufficient registers to even generate spill/reload code, or
541 /// if the function appears to have any undefined VirtualReg/RealReg uses.
542 #[inline(never)]
run<F: Function>( func: &mut F, reg_universe: &RealRegUniverse, use_checker: bool, opts: &LinearScanOptions, ) -> Result<RegAllocResult<F>, RegAllocError>543 pub(crate) fn run<F: Function>(
544     func: &mut F,
545     reg_universe: &RealRegUniverse,
546     use_checker: bool,
547     opts: &LinearScanOptions,
548 ) -> Result<RegAllocResult<F>, RegAllocError> {
549     let AnalysisInfo {
550         reg_vecs_and_bounds: reg_uses,
551         intervals,
552         liveins,
553         liveouts,
554         ..
555     } = analysis::run(func, reg_universe).map_err(|err| RegAllocError::Analysis(err))?;
556 
557     let scratches_by_rc = compute_scratches(reg_universe)?;
558 
559     let stats = if opts.stats {
560         let mut stats = Statistics::default();
561         stats.num_fixed = intervals.fixeds.len();
562         stats.num_virtual_ranges = intervals.virtuals.len();
563         stats.num_vregs = intervals
564             .virtuals
565             .iter()
566             .map(|virt| virt.vreg.get_index())
567             .fold(0, |a, b| usize::max(a, b));
568         stats.only_large = opts.large_stats;
569         Some(stats)
570     } else {
571         None
572     };
573 
574     if log_enabled!(Level::Trace) {
575         trace!("fixed intervals:");
576         for int in &intervals.fixeds {
577             trace!("{}", int);
578         }
579         trace!("");
580         trace!("unassigned intervals:");
581         for int in &intervals.virtuals {
582             trace!("{}", int);
583             for mention in &int.mentions {
584                 trace!("  mention @ {:?}: {:?}", mention.0, mention.1);
585             }
586         }
587         trace!("");
588     }
589 
590     let (intervals, mut num_spill_slots) = assign_registers::run(
591         opts,
592         func,
593         &reg_uses,
594         reg_universe,
595         &scratches_by_rc,
596         intervals,
597         stats,
598     )?;
599 
600     let virtuals = &intervals.virtuals;
601 
602     let memory_moves = resolve_moves::run(
603         func,
604         &reg_uses,
605         virtuals,
606         &liveins,
607         &liveouts,
608         &mut num_spill_slots,
609         &scratches_by_rc,
610     );
611 
612     apply_registers(
613         func,
614         virtuals,
615         memory_moves,
616         reg_universe,
617         num_spill_slots,
618         use_checker,
619     )
620 }
621 
622 #[inline(never)]
set_registers<F: Function>( func: &mut F, virtual_intervals: &Vec<VirtualInterval>, reg_universe: &RealRegUniverse, use_checker: bool, memory_moves: &Vec<InstToInsertAndExtPoint>, ) -> Set<RealReg>623 fn set_registers<F: Function>(
624     func: &mut F,
625     virtual_intervals: &Vec<VirtualInterval>,
626     reg_universe: &RealRegUniverse,
627     use_checker: bool,
628     memory_moves: &Vec<InstToInsertAndExtPoint>,
629 ) -> Set<RealReg> {
630     info!("set_registers");
631 
632     // Set up checker state, if indicated by our configuration.
633     let mut checker: Option<CheckerContext> = None;
634     let mut insn_blocks: Vec<BlockIx> = vec![];
635     if use_checker {
636         checker = Some(CheckerContext::new(
637             func,
638             reg_universe,
639             memory_moves,
640             &[],
641             &[],
642             &[],
643         ));
644         insn_blocks.resize(func.insns().len(), BlockIx::new(0));
645         for block_ix in func.blocks() {
646             for insn_ix in func.block_insns(block_ix) {
647                 insn_blocks[insn_ix.get() as usize] = block_ix;
648             }
649         }
650     }
651 
652     let mut clobbered_registers = Set::empty();
653 
654     // Collect all the regs per instruction and mention set.
655     let capacity = virtual_intervals
656         .iter()
657         .map(|int| int.mentions.len())
658         .fold(0, |a, b| a + b);
659 
660     if capacity == 0 {
661         // No virtual registers have been allocated, exit early.
662         return clobbered_registers;
663     }
664 
665     let mut mention_map = Vec::with_capacity(capacity);
666 
667     for int in virtual_intervals {
668         let rreg = match int.location.reg() {
669             Some(rreg) => rreg,
670             _ => continue,
671         };
672         trace!("int: {}", int);
673         trace!("  {:?}", int.mentions);
674         for &mention in &int.mentions {
675             mention_map.push((mention.0, mention.1, int.vreg, rreg));
676         }
677     }
678 
679     // Sort by instruction index.
680     mention_map.sort_unstable_by_key(|quad| quad.0);
681 
682     // Iterate over all the mentions.
683     let mut mapper = MentionRegUsageMapper::new();
684 
685     let flush_inst = |func: &mut F,
686                       mapper: &mut MentionRegUsageMapper,
687                       iix: InstIx,
688                       checker: Option<&mut CheckerContext>| {
689         trace!("map_regs for {:?}", iix);
690         let mut inst = func.get_insn_mut(iix);
691         F::map_regs(&mut inst, mapper);
692 
693         if let Some(checker) = checker {
694             let block_ix = insn_blocks[iix.get() as usize];
695             checker
696                 .handle_insn(reg_universe, func, block_ix, iix, mapper)
697                 .unwrap();
698         }
699 
700         mapper.clear();
701     };
702 
703     let mut prev_iix = mention_map[0].0;
704     for (iix, mention_set, vreg, rreg) in mention_map {
705         if prev_iix != iix {
706             // Flush previous instruction.
707             flush_inst(func, &mut mapper, prev_iix, checker.as_mut());
708             prev_iix = iix;
709         }
710 
711         trace!(
712             "{:?}: {:?} is in {:?} at {:?}",
713             iix,
714             vreg,
715             rreg,
716             mention_set
717         );
718 
719         // Fill in new information at the given index.
720         if mention_set.is_use() {
721             if let Some(prev_rreg) = mapper.lookup_use(vreg) {
722                 debug_assert_eq!(prev_rreg, rreg, "different use allocs for {:?}", vreg);
723             }
724             mapper.set_use(vreg, rreg);
725         }
726 
727         let included_in_clobbers = func.is_included_in_clobbers(func.get_insn(iix));
728         if mention_set.is_mod() {
729             if let Some(prev_rreg) = mapper.lookup_use(vreg) {
730                 debug_assert_eq!(prev_rreg, rreg, "different use allocs for {:?}", vreg);
731             }
732             if let Some(prev_rreg) = mapper.lookup_def(vreg) {
733                 debug_assert_eq!(prev_rreg, rreg, "different def allocs for {:?}", vreg);
734             }
735 
736             mapper.set_use(vreg, rreg);
737             mapper.set_def(vreg, rreg);
738             if included_in_clobbers {
739                 clobbered_registers.insert(rreg);
740             }
741         }
742 
743         if mention_set.is_def() {
744             if let Some(prev_rreg) = mapper.lookup_def(vreg) {
745                 debug_assert_eq!(prev_rreg, rreg, "different def allocs for {:?}", vreg);
746             }
747 
748             mapper.set_def(vreg, rreg);
749             if included_in_clobbers {
750                 clobbered_registers.insert(rreg);
751             }
752         }
753     }
754 
755     // Flush last instruction.
756     flush_inst(func, &mut mapper, prev_iix, checker.as_mut());
757 
758     clobbered_registers
759 }
760 
761 /// Fills in the register assignments into instructions.
762 #[inline(never)]
apply_registers<F: Function>( func: &mut F, virtual_intervals: &Vec<VirtualInterval>, memory_moves: Vec<InstToInsertAndExtPoint>, reg_universe: &RealRegUniverse, num_spill_slots: u32, use_checker: bool, ) -> Result<RegAllocResult<F>, RegAllocError>763 fn apply_registers<F: Function>(
764     func: &mut F,
765     virtual_intervals: &Vec<VirtualInterval>,
766     memory_moves: Vec<InstToInsertAndExtPoint>,
767     reg_universe: &RealRegUniverse,
768     num_spill_slots: u32,
769     use_checker: bool,
770 ) -> Result<RegAllocResult<F>, RegAllocError> {
771     info!("apply_registers");
772 
773     let clobbered_registers = set_registers(
774         func,
775         virtual_intervals,
776         reg_universe,
777         use_checker,
778         &memory_moves,
779     );
780 
781     let safepoint_insns = vec![];
782     let (final_insns, target_map, new_to_old_insn_map, new_safepoint_insns) =
783         add_spills_reloads_and_moves(func, &safepoint_insns, memory_moves)
784             .map_err(|e| RegAllocError::Other(e))?;
785     assert!(new_safepoint_insns.is_empty()); // because `safepoint_insns` is also empty.
786 
787     // And now remove from the clobbered registers set, all those not available to the allocator.
788     // But not removing the reserved regs, since we might have modified those.
789     clobbered_registers.filter_map(|&reg| {
790         if reg.get_index() >= reg_universe.allocable {
791             None
792         } else {
793             Some(reg)
794         }
795     });
796 
797     Ok(RegAllocResult {
798         insns: final_insns,
799         target_map,
800         orig_insn_map: new_to_old_insn_map,
801         clobbered_registers,
802         num_spill_slots,
803         block_annotations: None,
804         stackmaps: vec![],
805         new_safepoint_insns,
806     })
807 }
808