1 //! Spilling pass.
2 //!
3 //! The spilling pass is the first to run after the liveness analysis. Its primary function is to
4 //! ensure that the register pressure never exceeds the number of available registers by moving
5 //! some SSA values to spill slots on the stack. This is encoded in the affinity of the value's
6 //! live range.
7 //!
8 //! Some instruction operand constraints may require additional registers to resolve. Since this
9 //! can cause spilling, the spilling pass is also responsible for resolving those constraints by
10 //! inserting copies. The extra constraints are:
11 //!
12 //! 1. A value used by a tied operand must be killed by the instruction. This is resolved by
13 //!    inserting a copy to a temporary value when necessary.
14 //! 2. When the same value is used more than once by an instruction, the operand constraints must
15 //!    be compatible. Otherwise, the value must be copied into a new register for some of the
16 //!    operands.
17 
18 use crate::cursor::{Cursor, EncCursor};
19 use crate::dominator_tree::DominatorTree;
20 use crate::ir::{ArgumentLoc, Block, Function, Inst, InstBuilder, SigRef, Value, ValueLoc};
21 use crate::isa::registers::{RegClass, RegClassIndex, RegClassMask, RegUnit};
22 use crate::isa::{ConstraintKind, EncInfo, RecipeConstraints, RegInfo, TargetIsa};
23 use crate::regalloc::affinity::Affinity;
24 use crate::regalloc::live_value_tracker::{LiveValue, LiveValueTracker};
25 use crate::regalloc::liveness::Liveness;
26 use crate::regalloc::pressure::Pressure;
27 use crate::regalloc::virtregs::VirtRegs;
28 use crate::timing;
29 use crate::topo_order::TopoOrder;
30 use alloc::vec::Vec;
31 use core::fmt;
32 use log::debug;
33 
34 /// Return a top-level register class which contains `unit`.
toprc_containing_regunit(unit: RegUnit, reginfo: &RegInfo) -> RegClass35 fn toprc_containing_regunit(unit: RegUnit, reginfo: &RegInfo) -> RegClass {
36     let bank = reginfo.bank_containing_regunit(unit).unwrap();
37     reginfo.classes[bank.first_toprc..(bank.first_toprc + bank.num_toprcs)]
38         .iter()
39         .find(|&rc| rc.contains(unit))
40         .expect("reg unit should be in a toprc")
41 }
42 
43 /// Persistent data structures for the spilling pass.
44 pub struct Spilling {
45     spills: Vec<Value>,
46     reg_uses: Vec<RegUse>,
47 }
48 
49 /// Context data structure that gets instantiated once per pass.
50 struct Context<'a> {
51     // Current instruction as well as reference to function and ISA.
52     cur: EncCursor<'a>,
53 
54     // Cached ISA information.
55     reginfo: RegInfo,
56     encinfo: EncInfo,
57 
58     // References to contextual data structures we need.
59     domtree: &'a DominatorTree,
60     liveness: &'a mut Liveness,
61     virtregs: &'a VirtRegs,
62     topo: &'a mut TopoOrder,
63 
64     // Current register pressure.
65     pressure: Pressure,
66 
67     // Values spilled for the current instruction. These values have already been removed from the
68     // pressure tracker, but they are still present in the live value tracker and their affinity
69     // hasn't been changed yet.
70     spills: &'a mut Vec<Value>,
71 
72     // Uses of register values in the current instruction.
73     reg_uses: &'a mut Vec<RegUse>,
74 }
75 
76 impl Spilling {
77     /// Create a new spilling data structure.
new() -> Self78     pub fn new() -> Self {
79         Self {
80             spills: Vec::new(),
81             reg_uses: Vec::new(),
82         }
83     }
84 
85     /// Clear all data structures in this spilling pass.
clear(&mut self)86     pub fn clear(&mut self) {
87         self.spills.clear();
88         self.reg_uses.clear();
89     }
90 
91     /// Run the spilling algorithm over `func`.
run( &mut self, isa: &dyn TargetIsa, func: &mut Function, domtree: &DominatorTree, liveness: &mut Liveness, virtregs: &VirtRegs, topo: &mut TopoOrder, tracker: &mut LiveValueTracker, )92     pub fn run(
93         &mut self,
94         isa: &dyn TargetIsa,
95         func: &mut Function,
96         domtree: &DominatorTree,
97         liveness: &mut Liveness,
98         virtregs: &VirtRegs,
99         topo: &mut TopoOrder,
100         tracker: &mut LiveValueTracker,
101     ) {
102         let _tt = timing::ra_spilling();
103         debug!("Spilling for:\n{}", func.display(isa));
104         let reginfo = isa.register_info();
105         let usable_regs = isa.allocatable_registers(func);
106         let mut ctx = Context {
107             cur: EncCursor::new(func, isa),
108             reginfo: isa.register_info(),
109             encinfo: isa.encoding_info(),
110             domtree,
111             liveness,
112             virtregs,
113             topo,
114             pressure: Pressure::new(&reginfo, &usable_regs),
115             spills: &mut self.spills,
116             reg_uses: &mut self.reg_uses,
117         };
118         ctx.run(tracker)
119     }
120 }
121 
122 impl<'a> Context<'a> {
run(&mut self, tracker: &mut LiveValueTracker)123     fn run(&mut self, tracker: &mut LiveValueTracker) {
124         self.topo.reset(self.cur.func.layout.blocks());
125         while let Some(block) = self.topo.next(&self.cur.func.layout, self.domtree) {
126             self.visit_block(block, tracker);
127         }
128     }
129 
visit_block(&mut self, block: Block, tracker: &mut LiveValueTracker)130     fn visit_block(&mut self, block: Block, tracker: &mut LiveValueTracker) {
131         debug!("Spilling {}:", block);
132         self.cur.goto_top(block);
133         self.visit_block_header(block, tracker);
134         tracker.drop_dead_params();
135         self.process_spills(tracker);
136 
137         while let Some(inst) = self.cur.next_inst() {
138             if !self.cur.func.dfg[inst].opcode().is_ghost() {
139                 self.visit_inst(inst, block, tracker);
140             } else {
141                 let (_throughs, kills) = tracker.process_ghost(inst);
142                 self.free_regs(kills);
143             }
144             tracker.drop_dead(inst);
145             self.process_spills(tracker);
146         }
147     }
148 
149     // Take all live registers in `regs` from the pressure set.
150     // This doesn't cause any spilling, it is assumed there are enough registers.
take_live_regs(&mut self, regs: &[LiveValue])151     fn take_live_regs(&mut self, regs: &[LiveValue]) {
152         for lv in regs {
153             if !lv.is_dead {
154                 if let Affinity::Reg(rci) = lv.affinity {
155                     let rc = self.reginfo.rc(rci);
156                     self.pressure.take(rc);
157                 }
158             }
159         }
160     }
161 
162     // Free all registers in `kills` from the pressure set.
free_regs(&mut self, kills: &[LiveValue])163     fn free_regs(&mut self, kills: &[LiveValue]) {
164         for lv in kills {
165             if let Affinity::Reg(rci) = lv.affinity {
166                 if !self.spills.contains(&lv.value) {
167                     let rc = self.reginfo.rc(rci);
168                     self.pressure.free(rc);
169                 }
170             }
171         }
172     }
173 
174     // Free all dead registers in `regs` from the pressure set.
free_dead_regs(&mut self, regs: &[LiveValue])175     fn free_dead_regs(&mut self, regs: &[LiveValue]) {
176         for lv in regs {
177             if lv.is_dead {
178                 if let Affinity::Reg(rci) = lv.affinity {
179                     if !self.spills.contains(&lv.value) {
180                         let rc = self.reginfo.rc(rci);
181                         self.pressure.free(rc);
182                     }
183                 }
184             }
185         }
186     }
187 
visit_block_header(&mut self, block: Block, tracker: &mut LiveValueTracker)188     fn visit_block_header(&mut self, block: Block, tracker: &mut LiveValueTracker) {
189         let (liveins, params) = tracker.block_top(
190             block,
191             &self.cur.func.dfg,
192             self.liveness,
193             &self.cur.func.layout,
194             self.domtree,
195         );
196 
197         // Count the live-in registers. These should already fit in registers; they did at the
198         // dominator.
199         self.pressure.reset();
200         self.take_live_regs(liveins);
201 
202         // A block can have an arbitrary (up to 2^16...) number of parameters, so they are not
203         // guaranteed to fit in registers.
204         for lv in params {
205             if let Affinity::Reg(rci) = lv.affinity {
206                 let rc = self.reginfo.rc(rci);
207                 'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
208                     debug!("Need {} reg for block param {}", rc, lv.value);
209                     match self.spill_candidate(mask, liveins) {
210                         Some(cand) => {
211                             debug!(
212                                 "Spilling live-in {} to make room for {} block param {}",
213                                 cand, rc, lv.value
214                             );
215                             self.spill_reg(cand);
216                         }
217                         None => {
218                             // We can't spill any of the live-in registers, so we have to spill an
219                             // block argument. Since the current spill metric would consider all the
220                             // block arguments equal, just spill the present register.
221                             debug!("Spilling {} block argument {}", rc, lv.value);
222 
223                             // Since `spill_reg` will free a register, add the current one here.
224                             self.pressure.take(rc);
225                             self.spill_reg(lv.value);
226                             break 'try_take;
227                         }
228                     }
229                 }
230             }
231         }
232 
233         // The transient pressure counts for the block arguments are accurate. Just preserve them.
234         self.pressure.preserve_transient();
235         self.free_dead_regs(params);
236     }
237 
visit_inst(&mut self, inst: Inst, block: Block, tracker: &mut LiveValueTracker)238     fn visit_inst(&mut self, inst: Inst, block: Block, tracker: &mut LiveValueTracker) {
239         debug!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
240         debug_assert_eq!(self.cur.current_inst(), Some(inst));
241         debug_assert_eq!(self.cur.current_block(), Some(block));
242 
243         let constraints = self
244             .encinfo
245             .operand_constraints(self.cur.func.encodings[inst]);
246 
247         // We may need to resolve register constraints if there are any noteworthy uses.
248         debug_assert!(self.reg_uses.is_empty());
249         self.collect_reg_uses(inst, block, constraints);
250 
251         // Calls usually have fixed register uses.
252         let call_sig = self.cur.func.dfg.call_signature(inst);
253         if let Some(sig) = call_sig {
254             self.collect_abi_reg_uses(inst, sig);
255         }
256 
257         if !self.reg_uses.is_empty() {
258             self.process_reg_uses(inst, tracker);
259         }
260 
261         // Update the live value tracker with this instruction.
262         let (throughs, kills, defs) = tracker.process_inst(inst, &self.cur.func.dfg, self.liveness);
263 
264         // Remove kills from the pressure tracker.
265         self.free_regs(kills);
266 
267         // If inst is a call, spill all register values that are live across the call.
268         // This means that we don't currently take advantage of callee-saved registers.
269         // TODO: Be more sophisticated.
270         let opcode = self.cur.func.dfg[inst].opcode();
271         if call_sig.is_some() || opcode.clobbers_all_regs() {
272             for lv in throughs {
273                 if lv.affinity.is_reg() && !self.spills.contains(&lv.value) {
274                     self.spill_reg(lv.value);
275                 }
276             }
277         }
278 
279         // Make sure we have enough registers for the register defs.
280         // Dead defs are included here. They need a register too.
281         // No need to process call return values, they are in fixed registers.
282         if let Some(constraints) = constraints {
283             for op in constraints.outs {
284                 if op.kind != ConstraintKind::Stack {
285                     // Add register def to pressure, spill if needed.
286                     while let Err(mask) = self.pressure.take_transient(op.regclass) {
287                         debug!("Need {} reg from {} throughs", op.regclass, throughs.len());
288                         match self.spill_candidate(mask, throughs) {
289                             Some(cand) => self.spill_reg(cand),
290                             None => panic!(
291                                 "Ran out of {} registers for {}",
292                                 op.regclass,
293                                 self.cur.display_inst(inst)
294                             ),
295                         }
296                     }
297                 }
298             }
299             self.pressure.reset_transient();
300         }
301 
302         // Restore pressure state, compute pressure with affinities from `defs`.
303         // Exclude dead defs. Includes call return values.
304         // This won't cause spilling.
305         self.take_live_regs(defs);
306     }
307 
308     // Collect register uses that are noteworthy in one of the following ways:
309     //
310     // 1. It's a fixed register constraint.
311     // 2. It's a use of a spilled value.
312     // 3. It's a tied register constraint and the value isn't killed.
313     //
314     // We are assuming here that if a value is used both by a fixed register operand and a register
315     // class operand, they two are compatible. We are also assuming that two register class
316     // operands are always compatible.
collect_reg_uses( &mut self, inst: Inst, block: Block, constraints: Option<&RecipeConstraints>, )317     fn collect_reg_uses(
318         &mut self,
319         inst: Inst,
320         block: Block,
321         constraints: Option<&RecipeConstraints>,
322     ) {
323         let args = self.cur.func.dfg.inst_args(inst);
324         let num_fixed_ins = if let Some(constraints) = constraints {
325             for (idx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
326                 let mut reguse = RegUse::new(arg, idx, op.regclass.into());
327                 let lr = &self.liveness[arg];
328                 match op.kind {
329                     ConstraintKind::Stack => continue,
330                     ConstraintKind::FixedReg(_) => reguse.fixed = true,
331                     ConstraintKind::Tied(_) => {
332                         // A tied operand must kill the used value.
333                         reguse.tied = !lr.killed_at(inst, block, &self.cur.func.layout);
334                     }
335                     ConstraintKind::FixedTied(_) => {
336                         reguse.fixed = true;
337                         reguse.tied = !lr.killed_at(inst, block, &self.cur.func.layout);
338                     }
339                     ConstraintKind::Reg => {}
340                 }
341                 if lr.affinity.is_stack() {
342                     reguse.spilled = true;
343                 }
344 
345                 // Only collect the interesting register uses.
346                 if reguse.fixed || reguse.tied || reguse.spilled {
347                     debug!("  reguse: {}", reguse);
348                     self.reg_uses.push(reguse);
349                 }
350             }
351             constraints.ins.len()
352         } else {
353             // A non-ghost instruction with no constraints can't have any
354             // fixed operands.
355             0
356         };
357 
358         // Similarly, for return instructions, collect uses of ABI-defined
359         // return values.
360         if self.cur.func.dfg[inst].opcode().is_return() {
361             debug_assert_eq!(
362                 self.cur.func.dfg.inst_variable_args(inst).len(),
363                 self.cur.func.signature.returns.len(),
364                 "The non-fixed arguments in a return should follow the function's signature."
365             );
366             for (ret_idx, (ret, &arg)) in
367                 self.cur.func.signature.returns.iter().zip(args).enumerate()
368             {
369                 let idx = num_fixed_ins + ret_idx;
370                 let unit = match ret.location {
371                     ArgumentLoc::Unassigned => {
372                         panic!("function return signature should be legalized")
373                     }
374                     ArgumentLoc::Reg(unit) => unit,
375                     ArgumentLoc::Stack(_) => continue,
376                 };
377                 let toprc = toprc_containing_regunit(unit, &self.reginfo);
378                 let mut reguse = RegUse::new(arg, idx, toprc.into());
379                 reguse.fixed = true;
380 
381                 debug!("  reguse: {}", reguse);
382                 self.reg_uses.push(reguse);
383             }
384         }
385     }
386 
387     // Collect register uses from the ABI input constraints.
collect_abi_reg_uses(&mut self, inst: Inst, sig: SigRef)388     fn collect_abi_reg_uses(&mut self, inst: Inst, sig: SigRef) {
389         let num_fixed_args = self.cur.func.dfg[inst]
390             .opcode()
391             .constraints()
392             .num_fixed_value_arguments();
393         let args = self.cur.func.dfg.inst_variable_args(inst);
394         for (idx, (abi, &arg)) in self.cur.func.dfg.signatures[sig]
395             .params
396             .iter()
397             .zip(args)
398             .enumerate()
399         {
400             if abi.location.is_reg() {
401                 let (rci, spilled) = match self.liveness[arg].affinity {
402                     Affinity::Reg(rci) => (rci, false),
403                     Affinity::Stack => (
404                         self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
405                         true,
406                     ),
407                     Affinity::Unassigned => panic!("Missing affinity for {}", arg),
408                 };
409                 let mut reguse = RegUse::new(arg, num_fixed_args + idx, rci);
410                 reguse.fixed = true;
411                 reguse.spilled = spilled;
412                 self.reg_uses.push(reguse);
413             }
414         }
415     }
416 
417     // Process multiple register uses to resolve potential conflicts.
418     //
419     // Look for multiple uses of the same value in `self.reg_uses` and insert copies as necessary.
420     // Trigger spilling if any of the temporaries cause the register pressure to become too high.
421     //
422     // Leave `self.reg_uses` empty.
process_reg_uses(&mut self, inst: Inst, tracker: &LiveValueTracker)423     fn process_reg_uses(&mut self, inst: Inst, tracker: &LiveValueTracker) {
424         // We're looking for multiple uses of the same value, so start by sorting by value. The
425         // secondary `opidx` key makes it possible to use an unstable (non-allocating) sort.
426         self.reg_uses.sort_unstable_by_key(|u| (u.value, u.opidx));
427 
428         self.cur.use_srcloc(inst);
429         for i in 0..self.reg_uses.len() {
430             let ru = self.reg_uses[i];
431 
432             // Do we need to insert a copy for this use?
433             let need_copy = if ru.tied {
434                 true
435             } else if ru.fixed {
436                 // This is a fixed register use which doesn't necessarily require a copy.
437                 // Make a copy only if this is not the first use of the value.
438                 self.reg_uses
439                     .get(i.wrapping_sub(1))
440                     .map_or(false, |ru2| ru2.value == ru.value)
441             } else {
442                 false
443             };
444 
445             if need_copy {
446                 let copy = self.insert_copy(ru.value, ru.rci);
447                 self.cur.func.dfg.inst_args_mut(inst)[ru.opidx as usize] = copy;
448             }
449 
450             // Even if we don't insert a copy, we may need to account for register pressure for the
451             // reload pass.
452             if need_copy || ru.spilled {
453                 let rc = self.reginfo.rc(ru.rci);
454                 while let Err(mask) = self.pressure.take_transient(rc) {
455                     debug!("Copy of {} reg causes spill", rc);
456                     // Spill a live register that is *not* used by the current instruction.
457                     // Spilling a use wouldn't help.
458                     //
459                     // Do allow spilling of block arguments on branches. This is safe since we spill
460                     // the whole virtual register which includes the matching block parameter value
461                     // at the branch destination. It is also necessary since there can be
462                     // arbitrarily many block arguments.
463                     match {
464                         let args = if self.cur.func.dfg[inst].opcode().is_branch() {
465                             self.cur.func.dfg.inst_fixed_args(inst)
466                         } else {
467                             self.cur.func.dfg.inst_args(inst)
468                         };
469                         self.spill_candidate(
470                             mask,
471                             tracker.live().iter().filter(|lv| !args.contains(&lv.value)),
472                         )
473                     } {
474                         Some(cand) => self.spill_reg(cand),
475                         None => panic!(
476                             "Ran out of {} registers when inserting copy before {}",
477                             rc,
478                             self.cur.display_inst(inst)
479                         ),
480                     }
481                 }
482             }
483         }
484         self.pressure.reset_transient();
485         self.reg_uses.clear()
486     }
487 
488     // Find a spill candidate from `candidates` whose top-level register class is in `mask`.
spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value> where II: IntoIterator<Item = &'ii LiveValue>,489     fn spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value>
490     where
491         II: IntoIterator<Item = &'ii LiveValue>,
492     {
493         // Find the best viable spill candidate.
494         //
495         // The very simple strategy implemented here is to spill the value with the earliest def in
496         // the reverse post-order. This strategy depends on a good reload pass to generate good
497         // code.
498         //
499         // We know that all candidate defs dominate the current instruction, so one of them will
500         // dominate the others. That is the earliest def.
501         candidates
502             .into_iter()
503             .filter_map(|lv| {
504                 // Viable candidates are registers in one of the `mask` classes, and not already in
505                 // the spill set.
506                 if let Affinity::Reg(rci) = lv.affinity {
507                     let rc = self.reginfo.rc(rci);
508                     if (mask & (1 << rc.toprc)) != 0 && !self.spills.contains(&lv.value) {
509                         // Here, `lv` is a viable spill candidate.
510                         return Some(lv.value);
511                     }
512                 }
513                 None
514             })
515             .min_by(|&a, &b| {
516                 // Find the minimum candidate according to the RPO of their defs.
517                 self.domtree.rpo_cmp(
518                     self.cur.func.dfg.value_def(a),
519                     self.cur.func.dfg.value_def(b),
520                     &self.cur.func.layout,
521                 )
522             })
523     }
524 
525     /// Spill `value` immediately by
526     ///
527     /// 1. Changing its affinity to `Stack` which marks the spill.
528     /// 2. Removing the value from the pressure tracker.
529     /// 3. Adding the value to `self.spills` for later reference by `process_spills`.
530     ///
531     /// Note that this does not update the cached affinity in the live value tracker. Call
532     /// `process_spills` to do that.
spill_reg(&mut self, value: Value)533     fn spill_reg(&mut self, value: Value) {
534         if let Affinity::Reg(rci) = self.liveness.spill(value) {
535             let rc = self.reginfo.rc(rci);
536             self.pressure.free(rc);
537             self.spills.push(value);
538             debug!("Spilled {}:{} -> {}", value, rc, self.pressure);
539         } else {
540             panic!("Cannot spill {} that was already on the stack", value);
541         }
542 
543         // Assign a spill slot for the whole virtual register.
544         let ss = self
545             .cur
546             .func
547             .stack_slots
548             .make_spill_slot(self.cur.func.dfg.value_type(value));
549         for &v in self.virtregs.congruence_class(&value) {
550             self.liveness.spill(v);
551             self.cur.func.locations[v] = ValueLoc::Stack(ss);
552         }
553     }
554 
555     /// Process any pending spills in the `self.spills` vector.
556     ///
557     /// It is assumed that spills are removed from the pressure tracker immediately, see
558     /// `spill_reg` above.
559     ///
560     /// We also need to update the live range affinity and remove spilled values from the live
561     /// value tracker.
process_spills(&mut self, tracker: &mut LiveValueTracker)562     fn process_spills(&mut self, tracker: &mut LiveValueTracker) {
563         if !self.spills.is_empty() {
564             tracker.process_spills(|v| self.spills.contains(&v));
565             self.spills.clear()
566         }
567     }
568 
569     /// Insert a `copy value` before the current instruction and give it a live range extending to
570     /// the current instruction.
571     ///
572     /// Returns the new local value created.
insert_copy(&mut self, value: Value, rci: RegClassIndex) -> Value573     fn insert_copy(&mut self, value: Value, rci: RegClassIndex) -> Value {
574         let copy = self.cur.ins().copy(value);
575         let inst = self.cur.built_inst();
576 
577         // Update live ranges.
578         self.liveness.create_dead(copy, inst, Affinity::Reg(rci));
579         self.liveness.extend_locally(
580             copy,
581             self.cur.func.layout.pp_block(inst),
582             self.cur.current_inst().expect("must be at an instruction"),
583             &self.cur.func.layout,
584         );
585 
586         copy
587     }
588 }
589 
590 /// Struct representing a register use of a value.
591 /// Used to detect multiple uses of the same value with incompatible register constraints.
592 #[derive(Clone, Copy)]
593 struct RegUse {
594     value: Value,
595     opidx: u16,
596 
597     // Register class required by the use.
598     rci: RegClassIndex,
599 
600     // A use with a fixed register constraint.
601     fixed: bool,
602 
603     // A register use of a spilled value.
604     spilled: bool,
605 
606     // A use with a tied register constraint *and* the used value is not killed.
607     tied: bool,
608 }
609 
610 impl RegUse {
new(value: Value, idx: usize, rci: RegClassIndex) -> Self611     fn new(value: Value, idx: usize, rci: RegClassIndex) -> Self {
612         Self {
613             value,
614             opidx: idx as u16,
615             rci,
616             fixed: false,
617             spilled: false,
618             tied: false,
619         }
620     }
621 }
622 
623 impl fmt::Display for RegUse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result624     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
625         write!(f, "{}@op{}", self.value, self.opidx)?;
626         if self.fixed {
627             write!(f, "/fixed")?;
628         }
629         if self.spilled {
630             write!(f, "/spilled")?;
631         }
632         if self.tied {
633             write!(f, "/tied")?;
634         }
635         Ok(())
636     }
637 }
638