1 //! Spilling pass.
2 //!
3 //! The spilling pass is the first to run after the liveness analysis. Its primary function is to
4 //! ensure that the register pressure never exceeds the number of available registers by moving
5 //! some SSA values to spill slots on the stack. This is encoded in the affinity of the value's
6 //! live range.
7 //!
8 //! Some instruction operand constraints may require additional registers to resolve. Since this
9 //! can cause spilling, the spilling pass is also responsible for resolving those constraints by
10 //! inserting copies. The extra constraints are:
11 //!
12 //! 1. A value used by a tied operand must be killed by the instruction. This is resolved by
13 //!    inserting a copy to a temporary value when necessary.
14 //! 2. When the same value is used more than once by an instruction, the operand constraints must
15 //!    be compatible. Otherwise, the value must be copied into a new register for some of the
16 //!    operands.
17 
18 use crate::cursor::{Cursor, EncCursor};
19 use crate::dominator_tree::DominatorTree;
20 use crate::ir::{ArgumentLoc, Block, Function, Inst, InstBuilder, SigRef, Value, ValueLoc};
21 use crate::isa::registers::{RegClass, RegClassIndex, RegClassMask, RegUnit};
22 use crate::isa::{ConstraintKind, EncInfo, RecipeConstraints, RegInfo, TargetIsa};
23 use crate::regalloc::affinity::Affinity;
24 use crate::regalloc::live_value_tracker::{LiveValue, LiveValueTracker};
25 use crate::regalloc::liveness::Liveness;
26 use crate::regalloc::pressure::Pressure;
27 use crate::regalloc::virtregs::VirtRegs;
28 use crate::timing;
29 use crate::topo_order::TopoOrder;
30 use alloc::vec::Vec;
31 use core::fmt;
32 
33 /// Return a top-level register class which contains `unit`.
toprc_containing_regunit(unit: RegUnit, reginfo: &RegInfo) -> RegClass34 fn toprc_containing_regunit(unit: RegUnit, reginfo: &RegInfo) -> RegClass {
35     let bank = reginfo.bank_containing_regunit(unit).unwrap();
36     reginfo.classes[bank.first_toprc..(bank.first_toprc + bank.num_toprcs)]
37         .iter()
38         .find(|&rc| rc.contains(unit))
39         .expect("reg unit should be in a toprc")
40 }
41 
42 /// Persistent data structures for the spilling pass.
43 pub struct Spilling {
44     spills: Vec<Value>,
45     reg_uses: Vec<RegUse>,
46 }
47 
48 /// Context data structure that gets instantiated once per pass.
49 struct Context<'a> {
50     // Current instruction as well as reference to function and ISA.
51     cur: EncCursor<'a>,
52 
53     // Cached ISA information.
54     reginfo: RegInfo,
55     encinfo: EncInfo,
56 
57     // References to contextual data structures we need.
58     domtree: &'a DominatorTree,
59     liveness: &'a mut Liveness,
60     virtregs: &'a VirtRegs,
61     topo: &'a mut TopoOrder,
62 
63     // Current register pressure.
64     pressure: Pressure,
65 
66     // Values spilled for the current instruction. These values have already been removed from the
67     // pressure tracker, but they are still present in the live value tracker and their affinity
68     // hasn't been changed yet.
69     spills: &'a mut Vec<Value>,
70 
71     // Uses of register values in the current instruction.
72     reg_uses: &'a mut Vec<RegUse>,
73 }
74 
75 impl Spilling {
76     /// Create a new spilling data structure.
new() -> Self77     pub fn new() -> Self {
78         Self {
79             spills: Vec::new(),
80             reg_uses: Vec::new(),
81         }
82     }
83 
84     /// Clear all data structures in this spilling pass.
clear(&mut self)85     pub fn clear(&mut self) {
86         self.spills.clear();
87         self.reg_uses.clear();
88     }
89 
90     /// Run the spilling algorithm over `func`.
run( &mut self, isa: &dyn TargetIsa, func: &mut Function, domtree: &DominatorTree, liveness: &mut Liveness, virtregs: &VirtRegs, topo: &mut TopoOrder, tracker: &mut LiveValueTracker, )91     pub fn run(
92         &mut self,
93         isa: &dyn TargetIsa,
94         func: &mut Function,
95         domtree: &DominatorTree,
96         liveness: &mut Liveness,
97         virtregs: &VirtRegs,
98         topo: &mut TopoOrder,
99         tracker: &mut LiveValueTracker,
100     ) {
101         let _tt = timing::ra_spilling();
102         log::trace!("Spilling for:\n{}", func.display(isa));
103         let reginfo = isa.register_info();
104         let usable_regs = isa.allocatable_registers(func);
105         let mut ctx = Context {
106             cur: EncCursor::new(func, isa),
107             reginfo: isa.register_info(),
108             encinfo: isa.encoding_info(),
109             domtree,
110             liveness,
111             virtregs,
112             topo,
113             pressure: Pressure::new(&reginfo, &usable_regs),
114             spills: &mut self.spills,
115             reg_uses: &mut self.reg_uses,
116         };
117         ctx.run(tracker)
118     }
119 }
120 
121 impl<'a> Context<'a> {
run(&mut self, tracker: &mut LiveValueTracker)122     fn run(&mut self, tracker: &mut LiveValueTracker) {
123         self.topo.reset(self.cur.func.layout.blocks());
124         while let Some(block) = self.topo.next(&self.cur.func.layout, self.domtree) {
125             self.visit_block(block, tracker);
126         }
127     }
128 
visit_block(&mut self, block: Block, tracker: &mut LiveValueTracker)129     fn visit_block(&mut self, block: Block, tracker: &mut LiveValueTracker) {
130         log::trace!("Spilling {}:", block);
131         self.cur.goto_top(block);
132         self.visit_block_header(block, tracker);
133         tracker.drop_dead_params();
134         self.process_spills(tracker);
135 
136         while let Some(inst) = self.cur.next_inst() {
137             if !self.cur.func.dfg[inst].opcode().is_ghost() {
138                 self.visit_inst(inst, block, tracker);
139             } else {
140                 let (_throughs, kills) = tracker.process_ghost(inst);
141                 self.free_regs(kills);
142             }
143             tracker.drop_dead(inst);
144             self.process_spills(tracker);
145         }
146     }
147 
148     // Take all live registers in `regs` from the pressure set.
149     // This doesn't cause any spilling, it is assumed there are enough registers.
take_live_regs(&mut self, regs: &[LiveValue])150     fn take_live_regs(&mut self, regs: &[LiveValue]) {
151         for lv in regs {
152             if !lv.is_dead {
153                 if let Affinity::Reg(rci) = lv.affinity {
154                     let rc = self.reginfo.rc(rci);
155                     self.pressure.take(rc);
156                 }
157             }
158         }
159     }
160 
161     // Free all registers in `kills` from the pressure set.
free_regs(&mut self, kills: &[LiveValue])162     fn free_regs(&mut self, kills: &[LiveValue]) {
163         for lv in kills {
164             if let Affinity::Reg(rci) = lv.affinity {
165                 if !self.spills.contains(&lv.value) {
166                     let rc = self.reginfo.rc(rci);
167                     self.pressure.free(rc);
168                 }
169             }
170         }
171     }
172 
173     // Free all dead registers in `regs` from the pressure set.
free_dead_regs(&mut self, regs: &[LiveValue])174     fn free_dead_regs(&mut self, regs: &[LiveValue]) {
175         for lv in regs {
176             if lv.is_dead {
177                 if let Affinity::Reg(rci) = lv.affinity {
178                     if !self.spills.contains(&lv.value) {
179                         let rc = self.reginfo.rc(rci);
180                         self.pressure.free(rc);
181                     }
182                 }
183             }
184         }
185     }
186 
visit_block_header(&mut self, block: Block, tracker: &mut LiveValueTracker)187     fn visit_block_header(&mut self, block: Block, tracker: &mut LiveValueTracker) {
188         let (liveins, params) = tracker.block_top(
189             block,
190             &self.cur.func.dfg,
191             self.liveness,
192             &self.cur.func.layout,
193             self.domtree,
194         );
195 
196         // Count the live-in registers. These should already fit in registers; they did at the
197         // dominator.
198         self.pressure.reset();
199         self.take_live_regs(liveins);
200 
201         // A block can have an arbitrary (up to 2^16...) number of parameters, so they are not
202         // guaranteed to fit in registers.
203         for lv in params {
204             if let Affinity::Reg(rci) = lv.affinity {
205                 let rc = self.reginfo.rc(rci);
206                 'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
207                     log::trace!("Need {} reg for block param {}", rc, lv.value);
208                     match self.spill_candidate(mask, liveins) {
209                         Some(cand) => {
210                             log::trace!(
211                                 "Spilling live-in {} to make room for {} block param {}",
212                                 cand,
213                                 rc,
214                                 lv.value
215                             );
216                             self.spill_reg(cand);
217                         }
218                         None => {
219                             // We can't spill any of the live-in registers, so we have to spill an
220                             // block argument. Since the current spill metric would consider all the
221                             // block arguments equal, just spill the present register.
222                             log::trace!("Spilling {} block argument {}", rc, lv.value);
223 
224                             // Since `spill_reg` will free a register, add the current one here.
225                             self.pressure.take(rc);
226                             self.spill_reg(lv.value);
227                             break 'try_take;
228                         }
229                     }
230                 }
231             }
232         }
233 
234         // The transient pressure counts for the block arguments are accurate. Just preserve them.
235         self.pressure.preserve_transient();
236         self.free_dead_regs(params);
237     }
238 
visit_inst(&mut self, inst: Inst, block: Block, tracker: &mut LiveValueTracker)239     fn visit_inst(&mut self, inst: Inst, block: Block, tracker: &mut LiveValueTracker) {
240         log::trace!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
241         debug_assert_eq!(self.cur.current_inst(), Some(inst));
242         debug_assert_eq!(self.cur.current_block(), Some(block));
243 
244         let constraints = self
245             .encinfo
246             .operand_constraints(self.cur.func.encodings[inst]);
247 
248         // We may need to resolve register constraints if there are any noteworthy uses.
249         debug_assert!(self.reg_uses.is_empty());
250         self.collect_reg_uses(inst, block, constraints);
251 
252         // Calls usually have fixed register uses.
253         let call_sig = self.cur.func.dfg.call_signature(inst);
254         if let Some(sig) = call_sig {
255             self.collect_abi_reg_uses(inst, sig);
256         }
257 
258         if !self.reg_uses.is_empty() {
259             self.process_reg_uses(inst, tracker);
260         }
261 
262         // Update the live value tracker with this instruction.
263         let (throughs, kills, defs) = tracker.process_inst(inst, &self.cur.func.dfg, self.liveness);
264 
265         // Remove kills from the pressure tracker.
266         self.free_regs(kills);
267 
268         // If inst is a call, spill all register values that are live across the call.
269         // This means that we don't currently take advantage of callee-saved registers.
270         // TODO: Be more sophisticated.
271         let opcode = self.cur.func.dfg[inst].opcode();
272         if call_sig.is_some() || opcode.clobbers_all_regs() {
273             for lv in throughs {
274                 if lv.affinity.is_reg() && !self.spills.contains(&lv.value) {
275                     self.spill_reg(lv.value);
276                 }
277             }
278         }
279 
280         // Make sure we have enough registers for the register defs.
281         // Dead defs are included here. They need a register too.
282         // No need to process call return values, they are in fixed registers.
283         if let Some(constraints) = constraints {
284             for op in constraints.outs {
285                 if op.kind != ConstraintKind::Stack {
286                     // Add register def to pressure, spill if needed.
287                     while let Err(mask) = self.pressure.take_transient(op.regclass) {
288                         log::trace!("Need {} reg from {} throughs", op.regclass, throughs.len());
289                         match self.spill_candidate(mask, throughs) {
290                             Some(cand) => self.spill_reg(cand),
291                             None => panic!(
292                                 "Ran out of {} registers for {}",
293                                 op.regclass,
294                                 self.cur.display_inst(inst)
295                             ),
296                         }
297                     }
298                 }
299             }
300             self.pressure.reset_transient();
301         }
302 
303         // Restore pressure state, compute pressure with affinities from `defs`.
304         // Exclude dead defs. Includes call return values.
305         // This won't cause spilling.
306         self.take_live_regs(defs);
307     }
308 
309     // Collect register uses that are noteworthy in one of the following ways:
310     //
311     // 1. It's a fixed register constraint.
312     // 2. It's a use of a spilled value.
313     // 3. It's a tied register constraint and the value isn't killed.
314     //
315     // We are assuming here that if a value is used both by a fixed register operand and a register
316     // class operand, they two are compatible. We are also assuming that two register class
317     // operands are always compatible.
collect_reg_uses( &mut self, inst: Inst, block: Block, constraints: Option<&RecipeConstraints>, )318     fn collect_reg_uses(
319         &mut self,
320         inst: Inst,
321         block: Block,
322         constraints: Option<&RecipeConstraints>,
323     ) {
324         let args = self.cur.func.dfg.inst_args(inst);
325         let num_fixed_ins = if let Some(constraints) = constraints {
326             for (idx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
327                 let mut reguse = RegUse::new(arg, idx, op.regclass.into());
328                 let lr = &self.liveness[arg];
329                 match op.kind {
330                     ConstraintKind::Stack => continue,
331                     ConstraintKind::FixedReg(_) => reguse.fixed = true,
332                     ConstraintKind::Tied(_) => {
333                         // A tied operand must kill the used value.
334                         reguse.tied = !lr.killed_at(inst, block, &self.cur.func.layout);
335                     }
336                     ConstraintKind::FixedTied(_) => {
337                         reguse.fixed = true;
338                         reguse.tied = !lr.killed_at(inst, block, &self.cur.func.layout);
339                     }
340                     ConstraintKind::Reg => {}
341                 }
342                 if lr.affinity.is_stack() {
343                     reguse.spilled = true;
344                 }
345 
346                 // Only collect the interesting register uses.
347                 if reguse.fixed || reguse.tied || reguse.spilled {
348                     log::trace!("  reguse: {}", reguse);
349                     self.reg_uses.push(reguse);
350                 }
351             }
352             constraints.ins.len()
353         } else {
354             // A non-ghost instruction with no constraints can't have any
355             // fixed operands.
356             0
357         };
358 
359         // Similarly, for return instructions, collect uses of ABI-defined
360         // return values.
361         if self.cur.func.dfg[inst].opcode().is_return() {
362             debug_assert_eq!(
363                 self.cur.func.dfg.inst_variable_args(inst).len(),
364                 self.cur.func.signature.returns.len(),
365                 "The non-fixed arguments in a return should follow the function's signature."
366             );
367             for (ret_idx, (ret, &arg)) in
368                 self.cur.func.signature.returns.iter().zip(args).enumerate()
369             {
370                 let idx = num_fixed_ins + ret_idx;
371                 let unit = match ret.location {
372                     ArgumentLoc::Unassigned => {
373                         panic!("function return signature should be legalized")
374                     }
375                     ArgumentLoc::Reg(unit) => unit,
376                     ArgumentLoc::Stack(_) => continue,
377                 };
378                 let toprc = toprc_containing_regunit(unit, &self.reginfo);
379                 let mut reguse = RegUse::new(arg, idx, toprc.into());
380                 reguse.fixed = true;
381 
382                 log::trace!("  reguse: {}", reguse);
383                 self.reg_uses.push(reguse);
384             }
385         }
386     }
387 
388     // Collect register uses from the ABI input constraints.
collect_abi_reg_uses(&mut self, inst: Inst, sig: SigRef)389     fn collect_abi_reg_uses(&mut self, inst: Inst, sig: SigRef) {
390         let num_fixed_args = self.cur.func.dfg[inst]
391             .opcode()
392             .constraints()
393             .num_fixed_value_arguments();
394         let args = self.cur.func.dfg.inst_variable_args(inst);
395         for (idx, (abi, &arg)) in self.cur.func.dfg.signatures[sig]
396             .params
397             .iter()
398             .zip(args)
399             .enumerate()
400         {
401             if abi.location.is_reg() {
402                 let (rci, spilled) = match self.liveness[arg].affinity {
403                     Affinity::Reg(rci) => (rci, false),
404                     Affinity::Stack => (
405                         self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
406                         true,
407                     ),
408                     Affinity::Unassigned => panic!("Missing affinity for {}", arg),
409                 };
410                 let mut reguse = RegUse::new(arg, num_fixed_args + idx, rci);
411                 reguse.fixed = true;
412                 reguse.spilled = spilled;
413                 self.reg_uses.push(reguse);
414             }
415         }
416     }
417 
418     // Process multiple register uses to resolve potential conflicts.
419     //
420     // Look for multiple uses of the same value in `self.reg_uses` and insert copies as necessary.
421     // Trigger spilling if any of the temporaries cause the register pressure to become too high.
422     //
423     // Leave `self.reg_uses` empty.
process_reg_uses(&mut self, inst: Inst, tracker: &LiveValueTracker)424     fn process_reg_uses(&mut self, inst: Inst, tracker: &LiveValueTracker) {
425         // We're looking for multiple uses of the same value, so start by sorting by value. The
426         // secondary `opidx` key makes it possible to use an unstable (non-allocating) sort.
427         self.reg_uses.sort_unstable_by_key(|u| (u.value, u.opidx));
428 
429         self.cur.use_srcloc(inst);
430         for i in 0..self.reg_uses.len() {
431             let ru = self.reg_uses[i];
432 
433             // Do we need to insert a copy for this use?
434             let need_copy = if ru.tied {
435                 true
436             } else if ru.fixed {
437                 // This is a fixed register use which doesn't necessarily require a copy.
438                 // Make a copy only if this is not the first use of the value.
439                 self.reg_uses
440                     .get(i.wrapping_sub(1))
441                     .map_or(false, |ru2| ru2.value == ru.value)
442             } else {
443                 false
444             };
445 
446             if need_copy {
447                 let copy = self.insert_copy(ru.value, ru.rci);
448                 self.cur.func.dfg.inst_args_mut(inst)[ru.opidx as usize] = copy;
449             }
450 
451             // Even if we don't insert a copy, we may need to account for register pressure for the
452             // reload pass.
453             if need_copy || ru.spilled {
454                 let rc = self.reginfo.rc(ru.rci);
455                 while let Err(mask) = self.pressure.take_transient(rc) {
456                     log::trace!("Copy of {} reg causes spill", rc);
457                     // Spill a live register that is *not* used by the current instruction.
458                     // Spilling a use wouldn't help.
459                     //
460                     // Do allow spilling of block arguments on branches. This is safe since we spill
461                     // the whole virtual register which includes the matching block parameter value
462                     // at the branch destination. It is also necessary since there can be
463                     // arbitrarily many block arguments.
464                     match {
465                         let args = if self.cur.func.dfg[inst].opcode().is_branch() {
466                             self.cur.func.dfg.inst_fixed_args(inst)
467                         } else {
468                             self.cur.func.dfg.inst_args(inst)
469                         };
470                         self.spill_candidate(
471                             mask,
472                             tracker.live().iter().filter(|lv| !args.contains(&lv.value)),
473                         )
474                     } {
475                         Some(cand) => self.spill_reg(cand),
476                         None => panic!(
477                             "Ran out of {} registers when inserting copy before {}",
478                             rc,
479                             self.cur.display_inst(inst)
480                         ),
481                     }
482                 }
483             }
484         }
485         self.pressure.reset_transient();
486         self.reg_uses.clear()
487     }
488 
489     // Find a spill candidate from `candidates` whose top-level register class is in `mask`.
spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value> where II: IntoIterator<Item = &'ii LiveValue>,490     fn spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value>
491     where
492         II: IntoIterator<Item = &'ii LiveValue>,
493     {
494         // Find the best viable spill candidate.
495         //
496         // The very simple strategy implemented here is to spill the value with the earliest def in
497         // the reverse post-order. This strategy depends on a good reload pass to generate good
498         // code.
499         //
500         // We know that all candidate defs dominate the current instruction, so one of them will
501         // dominate the others. That is the earliest def.
502         candidates
503             .into_iter()
504             .filter_map(|lv| {
505                 // Viable candidates are registers in one of the `mask` classes, and not already in
506                 // the spill set.
507                 if let Affinity::Reg(rci) = lv.affinity {
508                     let rc = self.reginfo.rc(rci);
509                     if (mask & (1 << rc.toprc)) != 0 && !self.spills.contains(&lv.value) {
510                         // Here, `lv` is a viable spill candidate.
511                         return Some(lv.value);
512                     }
513                 }
514                 None
515             })
516             .min_by(|&a, &b| {
517                 // Find the minimum candidate according to the RPO of their defs.
518                 self.domtree.rpo_cmp(
519                     self.cur.func.dfg.value_def(a),
520                     self.cur.func.dfg.value_def(b),
521                     &self.cur.func.layout,
522                 )
523             })
524     }
525 
526     /// Spill `value` immediately by
527     ///
528     /// 1. Changing its affinity to `Stack` which marks the spill.
529     /// 2. Removing the value from the pressure tracker.
530     /// 3. Adding the value to `self.spills` for later reference by `process_spills`.
531     ///
532     /// Note that this does not update the cached affinity in the live value tracker. Call
533     /// `process_spills` to do that.
spill_reg(&mut self, value: Value)534     fn spill_reg(&mut self, value: Value) {
535         if let Affinity::Reg(rci) = self.liveness.spill(value) {
536             let rc = self.reginfo.rc(rci);
537             self.pressure.free(rc);
538             self.spills.push(value);
539             log::trace!("Spilled {}:{} -> {}", value, rc, self.pressure);
540         } else {
541             panic!("Cannot spill {} that was already on the stack", value);
542         }
543 
544         // Assign a spill slot for the whole virtual register.
545         let ss = self
546             .cur
547             .func
548             .stack_slots
549             .make_spill_slot(self.cur.func.dfg.value_type(value));
550         for &v in self.virtregs.congruence_class(&value) {
551             self.liveness.spill(v);
552             self.cur.func.locations[v] = ValueLoc::Stack(ss);
553         }
554     }
555 
556     /// Process any pending spills in the `self.spills` vector.
557     ///
558     /// It is assumed that spills are removed from the pressure tracker immediately, see
559     /// `spill_reg` above.
560     ///
561     /// We also need to update the live range affinity and remove spilled values from the live
562     /// value tracker.
process_spills(&mut self, tracker: &mut LiveValueTracker)563     fn process_spills(&mut self, tracker: &mut LiveValueTracker) {
564         if !self.spills.is_empty() {
565             tracker.process_spills(|v| self.spills.contains(&v));
566             self.spills.clear()
567         }
568     }
569 
570     /// Insert a `copy value` before the current instruction and give it a live range extending to
571     /// the current instruction.
572     ///
573     /// Returns the new local value created.
insert_copy(&mut self, value: Value, rci: RegClassIndex) -> Value574     fn insert_copy(&mut self, value: Value, rci: RegClassIndex) -> Value {
575         let copy = self.cur.ins().copy(value);
576         let inst = self.cur.built_inst();
577 
578         // Update live ranges.
579         self.liveness.create_dead(copy, inst, Affinity::Reg(rci));
580         self.liveness.extend_locally(
581             copy,
582             self.cur.func.layout.pp_block(inst),
583             self.cur.current_inst().expect("must be at an instruction"),
584             &self.cur.func.layout,
585         );
586 
587         copy
588     }
589 }
590 
591 /// Struct representing a register use of a value.
592 /// Used to detect multiple uses of the same value with incompatible register constraints.
593 #[derive(Clone, Copy)]
594 struct RegUse {
595     value: Value,
596     opidx: u16,
597 
598     // Register class required by the use.
599     rci: RegClassIndex,
600 
601     // A use with a fixed register constraint.
602     fixed: bool,
603 
604     // A register use of a spilled value.
605     spilled: bool,
606 
607     // A use with a tied register constraint *and* the used value is not killed.
608     tied: bool,
609 }
610 
611 impl RegUse {
new(value: Value, idx: usize, rci: RegClassIndex) -> Self612     fn new(value: Value, idx: usize, rci: RegClassIndex) -> Self {
613         Self {
614             value,
615             opidx: idx as u16,
616             rci,
617             fixed: false,
618             spilled: false,
619             tied: false,
620         }
621     }
622 }
623 
624 impl fmt::Display for RegUse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result625     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
626         write!(f, "{}@op{}", self.value, self.opidx)?;
627         if self.fixed {
628             write!(f, "/fixed")?;
629         }
630         if self.spilled {
631             write!(f, "/spilled")?;
632         }
633         if self.tied {
634             write!(f, "/tied")?;
635         }
636         Ok(())
637     }
638 }
639