1 use super::{next_use, Fragments, IntId, Intervals, Location, MentionMap, RegUses};
2 use crate::{
3     data_structures::{BlockIx, InstPoint, Point},
4     inst_stream::{InstToInsert, InstToInsertAndPoint},
5     sparse_set::SparseSet,
6     Function, RealReg, Reg, SpillSlot, TypedIxVec, VirtualReg, Writable,
7 };
8 
9 use log::{debug, info, trace};
10 use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet};
11 use std::env;
12 use std::fmt;
13 
14 #[inline(never)]
run<F: Function>( func: &F, reg_uses: &RegUses, mention_map: &HashMap<Reg, MentionMap>, intervals: &Intervals, virtual_intervals: &Vec<IntId>, fragments: &Fragments, liveouts: &TypedIxVec<BlockIx, SparseSet<Reg>>, spill_slot: &mut u32, scratches_by_rc: &[Option<RealReg>], ) -> Vec<InstToInsertAndPoint>15 pub(crate) fn run<F: Function>(
16     func: &F,
17     reg_uses: &RegUses,
18     mention_map: &HashMap<Reg, MentionMap>,
19     intervals: &Intervals,
20     virtual_intervals: &Vec<IntId>,
21     fragments: &Fragments,
22     liveouts: &TypedIxVec<BlockIx, SparseSet<Reg>>,
23     spill_slot: &mut u32,
24     scratches_by_rc: &[Option<RealReg>],
25 ) -> Vec<InstToInsertAndPoint> {
26     let mut memory_moves = HashMap::default();
27 
28     let mut parallel_reloads = HashMap::default();
29     let mut spills = HashMap::default();
30 
31     info!("resolve_moves");
32 
33     let mut block_ends = HashSet::default();
34     let mut block_starts = HashSet::default();
35     for bix in func.blocks() {
36         let insts = func.block_insns(bix);
37         block_ends.insert(insts.last());
38         block_starts.insert(insts.first());
39     }
40 
41     for &int_id in virtual_intervals {
42         let (parent_end, parent_loc, loc) = {
43             let interval = intervals.get(int_id);
44             let loc = interval.location;
45 
46             let parent_id = match interval.parent {
47                 Some(pid) => pid,
48                 None => {
49                     // In unreachable code, it's possible that a given interval has no
50                     // parents and is assigned to a stack location for its whole lifetime.
51                     //
52                     // In reachable code, the analysis only create intervals for virtual
53                     // registers with at least one register use, so a parentless interval (=
54                     // hasn't ever been split) can't live in a stack slot.
55                     debug_assert!(
56                         loc.spill().is_none()
57                             || (next_use(
58                                 mention_map,
59                                 intervals,
60                                 int_id,
61                                 InstPoint::min_value(),
62                                 reg_uses,
63                                 fragments
64                             )
65                             .is_none())
66                     );
67                     continue;
68                 }
69             };
70 
71             let parent = intervals.get(parent_id);
72 
73             // If this is a move between blocks, handle it as such.
74             if parent.end.pt == Point::Def
75                 && interval.start.pt == Point::Use
76                 && block_ends.contains(&parent.end.iix)
77                 && block_starts.contains(&interval.start.iix)
78             {
79                 continue;
80             }
81 
82             (parent.end, parent.location, loc)
83         };
84 
85         let child_start = intervals.get(int_id).start;
86         let vreg = intervals.vreg(int_id);
87 
88         match loc {
89             Location::None => panic!("interval has no location after regalloc!"),
90 
91             Location::Reg(rreg) => {
92                 // Reconnect with the parent location, by adding a move if needed.
93                 match next_use(
94                     mention_map,
95                     intervals,
96                     int_id,
97                     child_start,
98                     reg_uses,
99                     fragments,
100                 ) {
101                     Some(next_use) => {
102                         // No need to reload before a new definition.
103                         if next_use.pt == Point::Def {
104                             continue;
105                         }
106                     }
107                     None => {}
108                 };
109 
110                 let mut at_inst = child_start;
111                 match at_inst.pt {
112                     Point::Use => {
113                         at_inst.pt = Point::Reload;
114                     }
115                     Point::Def => {
116                         at_inst.pt = Point::Spill;
117                     }
118                     _ => unreachable!(),
119                 }
120                 let entry = parallel_reloads.entry(at_inst).or_insert(Vec::new());
121 
122                 match parent_loc {
123                     Location::None => unreachable!(),
124 
125                     Location::Reg(from_rreg) => {
126                         if from_rreg != rreg {
127                             debug!(
128                                 "inblock fixup: {:?} move {:?} -> {:?} at {:?}",
129                                 int_id, from_rreg, rreg, at_inst
130                             );
131                             entry.push(MoveOp::new_move(from_rreg, rreg, vreg));
132                         }
133                     }
134 
135                     Location::Stack(spill) => {
136                         debug!(
137                             "inblock fixup: {:?} reload {:?} -> {:?} at {:?}",
138                             int_id, spill, rreg, at_inst
139                         );
140                         entry.push(MoveOp::new_reload(spill, rreg, vreg));
141                     }
142                 }
143             }
144 
145             Location::Stack(spill) => {
146                 // This interval has been spilled (i.e. split). Spill after the last def
147                 // or before the last use.
148                 let mut at_inst = parent_end;
149                 at_inst.pt = if at_inst.pt == Point::Use {
150                     Point::Reload
151                 } else {
152                     debug_assert!(at_inst.pt == Point::Def);
153                     Point::Spill
154                 };
155 
156                 match parent_loc {
157                     Location::None => unreachable!(),
158 
159                     Location::Reg(rreg) => {
160                         debug!(
161                             "inblock fixup: {:?} spill {:?} -> {:?} at {:?}",
162                             int_id, rreg, spill, at_inst
163                         );
164                         spills
165                             .entry(at_inst)
166                             .or_insert(Vec::new())
167                             .push(InstToInsert::Spill {
168                                 to_slot: spill,
169                                 from_reg: rreg,
170                                 for_vreg: vreg,
171                             });
172                     }
173 
174                     Location::Stack(parent_spill) => {
175                         debug_assert_eq!(parent_spill, spill);
176                     }
177                 }
178             }
179         }
180     }
181 
182     // Flush the memory moves caused by in-block fixups. Conceptually, the spills
183     // must happen after the right locations have been set, that is, after the
184     // reloads. Reloads may include several moves that must happen in parallel
185     // (e.g. if two real regs must be swapped), so process them first. Once all
186     // the parallel assignments have been done, push forward all the spills.
187     for (at_inst, mut parallel_moves) in parallel_reloads {
188         let ordered_moves = schedule_moves(&mut parallel_moves);
189         let insts = emit_moves(ordered_moves, spill_slot, scratches_by_rc);
190         memory_moves.insert(at_inst, insts);
191     }
192     for (at_inst, mut spills) in spills {
193         memory_moves
194             .entry(at_inst)
195             .or_insert(Vec::new())
196             .append(&mut spills);
197     }
198 
199     let mut parallel_move_map = HashMap::default();
200     enum BlockPos {
201         Start,
202         End,
203     }
204 
205     // Figure the sequence of parallel moves to insert at block boundaries:
206     // - for each block
207     //  - for each liveout vreg in this block
208     //    - for each successor of this block
209     //      - if the locations allocated in the block and its successor don't
210     //      match, insert a pending move from one location to the other.
211     //
212     // Once that's done:
213     // - resolve cycles in the pending moves
214     // - generate real moves from the pending moves.
215     let mut seen_successors = HashSet::default();
216     for block in func.blocks() {
217         let successors = func.block_succs(block);
218         seen_successors.clear();
219 
220         // Where to insert the fixup move, if needed? If there's more than one
221         // successor to the current block, inserting in the current block will
222         // impact all the successors.
223         //
224         // We assume critical edges have been split, so
225         // if the current block has more than one successor, then its successors
226         // have at most one predecessor.
227         let cur_has_one_succ = successors.len() == 1;
228 
229         for succ in successors {
230             if !seen_successors.insert(succ) {
231                 continue;
232             }
233 
234             for &reg in liveouts[block].iter() {
235                 let vreg = if let Some(vreg) = reg.as_virtual_reg() {
236                     vreg
237                 } else {
238                     continue;
239                 };
240 
241                 // Find the interval for this (vreg, inst) pair.
242                 let (succ_first_inst, succ_id) = {
243                     let first_inst = InstPoint::new_use(func.block_insns(succ).first());
244                     let found = match find_enclosing_interval(
245                         vreg,
246                         first_inst,
247                         intervals,
248                         &virtual_intervals,
249                     ) {
250                         Some(found) => found,
251                         // The vreg is unused in this successor, no need to update its
252                         // location.
253                         None => continue,
254                     };
255                     (first_inst, found)
256                 };
257 
258                 let (cur_last_inst, cur_id) = {
259                     let last_inst = func.block_insns(block).last();
260                     // see XXX above
261                     let last_inst = InstPoint::new_def(last_inst);
262                     let cur_id =
263                         find_enclosing_interval(vreg, last_inst, intervals, &virtual_intervals)
264                             .expect(&format!(
265                                 "no interval for given {:?}:{:?} pair in current {:?}",
266                                 vreg, last_inst, block
267                             ));
268                     (last_inst, cur_id)
269                 };
270 
271                 if succ_id == cur_id {
272                     continue;
273                 }
274 
275                 let (at_inst, block_pos) = if cur_has_one_succ {
276                     let mut pos = cur_last_inst;
277                     // Before the control flow instruction.
278                     pos.pt = Point::Reload;
279                     (pos, BlockPos::End)
280                 } else {
281                     let mut pos = succ_first_inst;
282                     pos.pt = Point::Reload;
283                     (pos, BlockPos::Start)
284                 };
285 
286                 let pending_moves = parallel_move_map
287                     .entry(at_inst)
288                     .or_insert((Vec::new(), block_pos));
289 
290                 match (
291                     intervals.get(cur_id).location,
292                     intervals.get(succ_id).location,
293                 ) {
294                     (Location::Reg(cur_rreg), Location::Reg(succ_rreg)) => {
295                         if cur_rreg == succ_rreg {
296                             continue;
297                         }
298                         debug!(
299               "boundary fixup: move {:?} -> {:?} at {:?} for {:?} between {:?} and {:?}",
300               cur_rreg,
301               succ_rreg,
302               at_inst,
303               vreg,
304               block,
305               succ
306             );
307                         pending_moves
308                             .0
309                             .push(MoveOp::new_move(cur_rreg, succ_rreg, vreg));
310                     }
311 
312                     (Location::Reg(cur_rreg), Location::Stack(spillslot)) => {
313                         debug!(
314               "boundary fixup: spill {:?} -> {:?} at {:?} for {:?} between {:?} and {:?}",
315               cur_rreg,
316               spillslot,
317               at_inst,
318               vreg,
319               block,
320               succ
321             );
322                         pending_moves
323                             .0
324                             .push(MoveOp::new_spill(cur_rreg, spillslot, vreg));
325                     }
326 
327                     (Location::Stack(spillslot), Location::Reg(rreg)) => {
328                         debug!(
329               "boundary fixup: reload {:?} -> {:?} at {:?} for {:?} between {:?} and {:?}",
330               spillslot,
331               rreg,
332               at_inst,
333               vreg,
334               block,
335               succ
336             );
337                         pending_moves
338                             .0
339                             .push(MoveOp::new_reload(spillslot, rreg, vreg));
340                     }
341 
342                     (Location::Stack(left_spill_slot), Location::Stack(right_spill_slot)) => {
343                         // Stack to stack should not happen here, since two ranges for the
344                         // same vreg can't be intersecting, so the same stack slot ought to
345                         // be reused in this case.
346                         debug_assert_eq!(
347               left_spill_slot, right_spill_slot,
348               "Moves from stack to stack only happen on the same vreg, thus the same stack slot"
349             );
350                         continue;
351                     }
352 
353                     (_, _) => {
354                         panic!("register or stack slots must have been allocated.");
355                     }
356                 };
357             }
358         }
359 
360         // Flush the memory moves caused by block fixups for this block.
361         for (at_inst, parallel_moves) in parallel_move_map.iter_mut() {
362             let ordered_moves = schedule_moves(&mut parallel_moves.0);
363             let mut insts = emit_moves(ordered_moves, spill_slot, scratches_by_rc);
364 
365             // If at_inst pointed to a block start, then insert block fixups *before*
366             // inblock fixups;
367             // otherwise it pointed to a block end, then insert block fixups *after*
368             // inblock fixups.
369             let mut entry = memory_moves.entry(*at_inst).or_insert(Vec::new());
370             match parallel_moves.1 {
371                 BlockPos::Start => {
372                     insts.append(&mut entry);
373                     *entry = insts;
374                 }
375                 BlockPos::End => {
376                     entry.append(&mut insts);
377                 }
378             }
379         }
380         parallel_move_map.clear();
381     }
382     debug!("");
383 
384     let mut insts_and_points = Vec::<InstToInsertAndPoint>::new();
385     for (at, insts) in memory_moves {
386         for inst in insts {
387             insts_and_points.push(InstToInsertAndPoint::new(inst, at));
388         }
389     }
390 
391     insts_and_points
392 }
393 
394 #[inline(never)]
find_enclosing_interval( vreg: VirtualReg, inst: InstPoint, intervals: &Intervals, virtual_intervals: &Vec<IntId>, ) -> Option<IntId>395 fn find_enclosing_interval(
396     vreg: VirtualReg,
397     inst: InstPoint,
398     intervals: &Intervals,
399     virtual_intervals: &Vec<IntId>,
400 ) -> Option<IntId> {
401     // The list of virtual intervals is sorted by vreg; find one interval for this
402     // vreg.
403     let index = virtual_intervals
404         .binary_search_by_key(&vreg, |&int_id| intervals.vreg(int_id))
405         .expect("should find at least one virtual interval for this vreg");
406 
407     // Rewind back to the first interval for this vreg, since there might be
408     // several ones.
409     let mut index = index;
410     while index > 0 && intervals.vreg(virtual_intervals[index - 1]) == vreg {
411         index -= 1;
412     }
413 
414     // Now iterates on all the intervals for this virtual register, until one
415     // works.
416     let mut int_id = virtual_intervals[index];
417     loop {
418         let int = intervals.get(int_id);
419         if int.start <= inst && inst <= int.end {
420             return Some(int_id);
421         }
422         // TODO reenable this if there are several fragments per interval again.
423         //if intervals.covers(int_id, inst, fragments) {
424         //return Some(int_id);
425         //}
426 
427         index += 1;
428         if index == virtual_intervals.len() {
429             return None;
430         }
431 
432         int_id = virtual_intervals[index];
433         if intervals.vreg(int_id) != vreg {
434             return None;
435         }
436     }
437 }
438 
439 #[derive(PartialEq, Debug)]
440 enum MoveOperand {
441     Reg(RealReg),
442     Stack(SpillSlot),
443 }
444 
445 impl MoveOperand {
aliases(&self, other: &Self) -> bool446     fn aliases(&self, other: &Self) -> bool {
447         self == other
448     }
449 }
450 
451 struct MoveOp {
452     from: MoveOperand,
453     to: MoveOperand,
454     vreg: VirtualReg,
455     cycle_begin: Option<usize>,
456     cycle_end: Option<usize>,
457 }
458 
459 impl fmt::Debug for MoveOp {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result460     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
461         write!(fmt, "{:?}: {:?} -> {:?}", self.vreg, self.from, self.to)?;
462         if let Some(ref begin) = self.cycle_begin {
463             write!(fmt, ", start of cycle #{}", begin)?;
464         }
465         if let Some(ref end) = self.cycle_end {
466             write!(fmt, ", end of cycle #{}", end)?;
467         }
468         Ok(())
469     }
470 }
471 
472 impl MoveOp {
new_move(from: RealReg, to: RealReg, vreg: VirtualReg) -> Self473     fn new_move(from: RealReg, to: RealReg, vreg: VirtualReg) -> Self {
474         Self {
475             from: MoveOperand::Reg(from),
476             to: MoveOperand::Reg(to),
477             vreg,
478             cycle_begin: None,
479             cycle_end: None,
480         }
481     }
482 
new_spill(from: RealReg, to: SpillSlot, vreg: VirtualReg) -> Self483     fn new_spill(from: RealReg, to: SpillSlot, vreg: VirtualReg) -> Self {
484         Self {
485             from: MoveOperand::Reg(from),
486             to: MoveOperand::Stack(to),
487             vreg,
488             cycle_begin: None,
489             cycle_end: None,
490         }
491     }
492 
new_reload(from: SpillSlot, to: RealReg, vreg: VirtualReg) -> Self493     fn new_reload(from: SpillSlot, to: RealReg, vreg: VirtualReg) -> Self {
494         Self {
495             from: MoveOperand::Stack(from),
496             to: MoveOperand::Reg(to),
497             vreg,
498             cycle_begin: None,
499             cycle_end: None,
500         }
501     }
502 
gen_inst(&self) -> InstToInsert503     fn gen_inst(&self) -> InstToInsert {
504         match self.from {
505             MoveOperand::Reg(from) => match self.to {
506                 MoveOperand::Reg(to) => InstToInsert::Move {
507                     to_reg: Writable::from_reg(to),
508                     from_reg: from,
509                     for_vreg: self.vreg,
510                 },
511                 MoveOperand::Stack(to) => InstToInsert::Spill {
512                     to_slot: to,
513                     from_reg: from,
514                     for_vreg: self.vreg,
515                 },
516             },
517             MoveOperand::Stack(from) => match self.to {
518                 MoveOperand::Reg(to) => InstToInsert::Reload {
519                     to_reg: Writable::from_reg(to),
520                     from_slot: from,
521                     for_vreg: self.vreg,
522                 },
523                 MoveOperand::Stack(_to) => unreachable!("stack to stack move"),
524             },
525         }
526     }
527 }
528 
find_blocking_move<'a>( pending: &'a mut Vec<MoveOp>, last: &MoveOp, ) -> Option<(usize, &'a mut MoveOp)>529 fn find_blocking_move<'a>(
530     pending: &'a mut Vec<MoveOp>,
531     last: &MoveOp,
532 ) -> Option<(usize, &'a mut MoveOp)> {
533     for (i, other) in pending.iter_mut().enumerate() {
534         if other.from.aliases(&last.to) {
535             return Some((i, other));
536         }
537     }
538     None
539 }
540 
find_cycled_move<'a>( stack: &'a mut Vec<MoveOp>, from: &mut usize, last: &MoveOp, ) -> Option<&'a mut MoveOp>541 fn find_cycled_move<'a>(
542     stack: &'a mut Vec<MoveOp>,
543     from: &mut usize,
544     last: &MoveOp,
545 ) -> Option<&'a mut MoveOp> {
546     for i in *from..stack.len() {
547         *from += 1;
548         let other = &stack[i];
549         if other.from.aliases(&last.to) {
550             return Some(&mut stack[i]);
551         }
552     }
553     None
554 }
555 
556 /// Given a pending list of moves, returns a list of moves ordered in a correct
557 /// way, i.e., no move clobbers another one.
558 #[inline(never)]
schedule_moves(pending: &mut Vec<MoveOp>) -> Vec<MoveOp>559 fn schedule_moves(pending: &mut Vec<MoveOp>) -> Vec<MoveOp> {
560     let mut ordered_moves = Vec::new();
561 
562     let mut num_cycles = 0;
563     let mut cur_cycles = 0;
564 
565     let show_debug = env::var("MOVES").is_ok();
566     if show_debug {
567         trace!("pending moves: {:#?}", pending);
568     }
569 
570     while let Some(pm) = pending.pop() {
571         if show_debug {
572             trace!("handling pending move {:?}", pm);
573         }
574         debug_assert!(
575             pm.from != pm.to,
576             "spurious moves should not have been inserted"
577         );
578 
579         let mut stack = vec![pm];
580 
581         while !stack.is_empty() {
582             let blocking_pair = find_blocking_move(pending, stack.last().unwrap());
583 
584             if let Some((blocking_idx, blocking)) = blocking_pair {
585                 if show_debug {
586                     trace!("found blocker: {:?}", blocking);
587                 }
588                 let mut stack_cur = 0;
589 
590                 let has_cycles = if let Some(mut cycled) =
591                     find_cycled_move(&mut stack, &mut stack_cur, blocking)
592                 {
593                     if show_debug {
594                         trace!("found cycle: {:?}", cycled);
595                     }
596                     debug_assert!(cycled.cycle_end.is_none());
597                     cycled.cycle_end = Some(cur_cycles);
598                     true
599                 } else {
600                     false
601                 };
602 
603                 if has_cycles {
604                     loop {
605                         match find_cycled_move(&mut stack, &mut stack_cur, blocking) {
606                             Some(ref mut cycled) => {
607                                 if show_debug {
608                                     trace!("found more cycles ending on blocker: {:?}", cycled);
609                                 }
610                                 debug_assert!(cycled.cycle_end.is_none());
611                                 cycled.cycle_end = Some(cur_cycles);
612                             }
613                             None => break,
614                         }
615                     }
616 
617                     debug_assert!(blocking.cycle_begin.is_none());
618                     blocking.cycle_begin = Some(cur_cycles);
619                     cur_cycles += 1;
620                 }
621 
622                 let blocking = pending.remove(blocking_idx);
623                 stack.push(blocking);
624             } else {
625                 // There's no blocking move! We can push this in the ordered list of
626                 // moves.
627                 // TODO IonMonkey has more optimizations for this case.
628                 let last = stack.pop().unwrap();
629                 ordered_moves.push(last);
630             }
631         }
632 
633         if num_cycles < cur_cycles {
634             num_cycles = cur_cycles;
635         }
636         cur_cycles = 0;
637     }
638 
639     ordered_moves
640 }
641 
642 #[inline(never)]
emit_moves( ordered_moves: Vec<MoveOp>, num_spill_slots: &mut u32, scratches_by_rc: &[Option<RealReg>], ) -> Vec<InstToInsert>643 fn emit_moves(
644     ordered_moves: Vec<MoveOp>,
645     num_spill_slots: &mut u32,
646     scratches_by_rc: &[Option<RealReg>],
647 ) -> Vec<InstToInsert> {
648     let mut spill_slot = None;
649     let mut in_cycle = false;
650 
651     let mut insts = Vec::new();
652 
653     let show_debug = env::var("MOVES").is_ok();
654     if show_debug {
655         trace!("emit_moves");
656     }
657 
658     for mov in ordered_moves {
659         if let Some(_) = &mov.cycle_end {
660             debug_assert!(in_cycle);
661 
662             // There is some pattern:
663             //   (A -> B)
664             //   (B -> A)
665             // This case handles (B -> A), which we reach last. We emit a move from
666             // the saved value of B, to A.
667             match mov.to {
668                 MoveOperand::Reg(dst_reg) => {
669                     let inst = InstToInsert::Reload {
670                         to_reg: Writable::from_reg(dst_reg),
671                         from_slot: spill_slot.expect("should have a cycle spill slot"),
672                         for_vreg: mov.vreg,
673                     };
674                     insts.push(inst);
675                     if show_debug {
676                         trace!(
677                             "finishing cycle: {:?} -> {:?}",
678                             spill_slot.unwrap(),
679                             dst_reg
680                         );
681                     }
682                 }
683                 MoveOperand::Stack(dst_spill) => {
684                     let scratch = scratches_by_rc[mov.vreg.get_class() as usize]
685                         .expect("missing scratch reg");
686                     let inst = InstToInsert::Reload {
687                         to_reg: Writable::from_reg(scratch),
688                         from_slot: spill_slot.expect("should have a cycle spill slot"),
689                         for_vreg: mov.vreg,
690                     };
691                     insts.push(inst);
692                     let inst = InstToInsert::Spill {
693                         to_slot: dst_spill,
694                         from_reg: scratch,
695                         for_vreg: mov.vreg,
696                     };
697                     insts.push(inst);
698                     if show_debug {
699                         trace!(
700                             "finishing cycle: {:?} -> {:?} -> {:?}",
701                             spill_slot.unwrap(),
702                             scratch,
703                             dst_spill
704                         );
705                     }
706                 }
707             };
708 
709             in_cycle = false;
710             continue;
711         }
712 
713         if let Some(_) = &mov.cycle_begin {
714             debug_assert!(!in_cycle);
715 
716             // There is some pattern:
717             //   (A -> B)
718             //   (B -> A)
719             // This case handles (A -> B), which we reach first. We save B, then allow
720             // the original move to continue.
721             match spill_slot {
722                 Some(_) => {}
723                 None => {
724                     spill_slot = Some(SpillSlot::new(*num_spill_slots));
725                     *num_spill_slots += 1;
726                 }
727             }
728 
729             match mov.to {
730                 MoveOperand::Reg(src_reg) => {
731                     let inst = InstToInsert::Spill {
732                         to_slot: spill_slot.unwrap(),
733                         from_reg: src_reg,
734                         for_vreg: mov.vreg,
735                     };
736                     insts.push(inst);
737                     if show_debug {
738                         trace!("starting cycle: {:?} -> {:?}", src_reg, spill_slot.unwrap());
739                     }
740                 }
741                 MoveOperand::Stack(src_spill) => {
742                     let scratch = scratches_by_rc[mov.vreg.get_class() as usize]
743                         .expect("missing scratch reg");
744                     let inst = InstToInsert::Reload {
745                         to_reg: Writable::from_reg(scratch),
746                         from_slot: src_spill,
747                         for_vreg: mov.vreg,
748                     };
749                     insts.push(inst);
750                     let inst = InstToInsert::Spill {
751                         to_slot: spill_slot.expect("should have a cycle spill slot"),
752                         from_reg: scratch,
753                         for_vreg: mov.vreg,
754                     };
755                     insts.push(inst);
756                     if show_debug {
757                         trace!(
758                             "starting cycle: {:?} -> {:?} -> {:?}",
759                             src_spill,
760                             scratch,
761                             spill_slot.unwrap()
762                         );
763                     }
764                 }
765             };
766 
767             in_cycle = true;
768         }
769 
770         // A normal move which is not part of a cycle.
771         insts.push(mov.gen_inst());
772         if show_debug {
773             trace!("moving {:?} -> {:?}", mov.from, mov.to);
774         }
775     }
776 
777     insts
778 }
779