1 //! This implements the VCode container: a CFG of Insts that have been lowered.
2 //!
3 //! VCode is virtual-register code. An instruction in VCode is almost a machine
4 //! instruction; however, its register slots can refer to virtual registers in
5 //! addition to real machine registers.
6 //!
7 //! VCode is structured with traditional basic blocks, and
8 //! each block must be terminated by an unconditional branch (one target), a
9 //! conditional branch (two targets), or a return (no targets). Note that this
10 //! slightly differs from the machine code of most ISAs: in most ISAs, a
11 //! conditional branch has one target (and the not-taken case falls through).
12 //! However, we expect that machine backends will elide branches to the following
13 //! block (i.e., zero-offset jumps), and will be able to codegen a branch-cond /
14 //! branch-uncond pair if *both* targets are not fallthrough. This allows us to
15 //! play with layout prior to final binary emission, as well, if we want.
16 //!
17 //! See the main module comment in `mod.rs` for more details on the VCode-based
18 //! backend pipeline.
19
20 use crate::ir::{self, types, Constant, ConstantData, SourceLoc};
21 use crate::machinst::*;
22 use crate::settings;
23 use crate::timing;
24 use regalloc::Function as RegallocFunction;
25 use regalloc::Set as RegallocSet;
26 use regalloc::{
27 BlockIx, InstIx, PrettyPrint, Range, RegAllocResult, RegClass, RegUsageCollector,
28 RegUsageMapper, SpillSlot, StackmapRequestInfo,
29 };
30
31 use alloc::boxed::Box;
32 use alloc::{borrow::Cow, vec::Vec};
33 use cranelift_entity::{entity_impl, Keys, PrimaryMap};
34 use std::cell::RefCell;
35 use std::collections::HashMap;
36 use std::fmt;
37 use std::iter;
38 use std::string::String;
39
40 /// Index referring to an instruction in VCode.
41 pub type InsnIndex = u32;
42 /// Index referring to a basic block in VCode.
43 pub type BlockIndex = u32;
44 /// Range of an instructions in VCode.
45 pub type InsnRange = core::ops::Range<InsnIndex>;
46
47 /// VCodeInst wraps all requirements for a MachInst to be in VCode: it must be
48 /// a `MachInst` and it must be able to emit itself at least to a `SizeCodeSink`.
49 pub trait VCodeInst: MachInst + MachInstEmit {}
50 impl<I: MachInst + MachInstEmit> VCodeInst for I {}
51
52 /// A function in "VCode" (virtualized-register code) form, after lowering.
53 /// This is essentially a standard CFG of basic blocks, where each basic block
54 /// consists of lowered instructions produced by the machine-specific backend.
55 pub struct VCode<I: VCodeInst> {
56 /// Function liveins.
57 liveins: RegallocSet<RealReg>,
58
59 /// Function liveouts.
60 liveouts: RegallocSet<RealReg>,
61
62 /// VReg IR-level types.
63 vreg_types: Vec<Type>,
64
65 /// Do we have any ref values among our vregs?
66 have_ref_values: bool,
67
68 /// Lowered machine instructions in order corresponding to the original IR.
69 insts: Vec<I>,
70
71 /// Source locations for each instruction. (`SourceLoc` is a `u32`, so it is
72 /// reasonable to keep one of these per instruction.)
73 srclocs: Vec<SourceLoc>,
74
75 /// Entry block.
76 entry: BlockIndex,
77
78 /// Block instruction indices.
79 block_ranges: Vec<(InsnIndex, InsnIndex)>,
80
81 /// Block successors: index range in the successor-list below.
82 block_succ_range: Vec<(usize, usize)>,
83
84 /// Block successor lists, concatenated into one Vec. The `block_succ_range`
85 /// list of tuples above gives (start, end) ranges within this list that
86 /// correspond to each basic block's successors.
87 block_succs: Vec<BlockIx>,
88
89 /// Block-order information.
90 block_order: BlockLoweringOrder,
91
92 /// ABI object.
93 abi: Box<dyn ABICallee<I = I>>,
94
95 /// Constant information used during code emission. This should be
96 /// immutable across function compilations within the same module.
97 emit_info: I::Info,
98
99 /// Safepoint instruction indices. Filled in post-regalloc. (Prior to
100 /// regalloc, the safepoint instructions are listed in the separate
101 /// `StackmapRequestInfo` held separate from the `VCode`.)
102 safepoint_insns: Vec<InsnIndex>,
103
104 /// For each safepoint entry in `safepoint_insns`, a list of `SpillSlot`s.
105 /// These are used to generate actual stack maps at emission. Filled in
106 /// post-regalloc.
107 safepoint_slots: Vec<Vec<SpillSlot>>,
108
109 /// Do we generate debug info?
110 generate_debug_info: bool,
111
112 /// Instruction end offsets, instruction indices at each label, and total
113 /// buffer size. Only present if `generate_debug_info` is set.
114 insts_layout: RefCell<(Vec<u32>, Vec<u32>, u32)>,
115
116 /// Constants.
117 constants: VCodeConstants,
118
119 /// Are any debug value-labels present? If not, we can skip the
120 /// post-emission analysis.
121 has_value_labels: bool,
122 }
123
124 /// A builder for a VCode function body. This builder is designed for the
125 /// lowering approach that we take: we traverse basic blocks in forward
126 /// (original IR) order, but within each basic block, we generate code from
127 /// bottom to top; and within each IR instruction that we visit in this reverse
128 /// order, we emit machine instructions in *forward* order again.
129 ///
130 /// Hence, to produce the final instructions in proper order, we perform two
131 /// swaps. First, the machine instructions (`I` instances) are produced in
132 /// forward order for an individual IR instruction. Then these are *reversed*
133 /// and concatenated to `bb_insns` at the end of the IR instruction lowering.
134 /// The `bb_insns` vec will thus contain all machine instructions for a basic
135 /// block, in reverse order. Finally, when we're done with a basic block, we
136 /// reverse the whole block's vec of instructions again, and concatenate onto
137 /// the VCode's insts.
138 pub struct VCodeBuilder<I: VCodeInst> {
139 /// In-progress VCode.
140 vcode: VCode<I>,
141
142 /// In-progress stack map-request info.
143 stack_map_info: StackmapRequestInfo,
144
145 /// Index of the last block-start in the vcode.
146 block_start: InsnIndex,
147
148 /// Start of succs for the current block in the concatenated succs list.
149 succ_start: usize,
150
151 /// Current source location.
152 cur_srcloc: SourceLoc,
153 }
154
155 impl<I: VCodeInst> VCodeBuilder<I> {
156 /// Create a new VCodeBuilder.
new( abi: Box<dyn ABICallee<I = I>>, emit_info: I::Info, block_order: BlockLoweringOrder, constants: VCodeConstants, ) -> VCodeBuilder<I>157 pub fn new(
158 abi: Box<dyn ABICallee<I = I>>,
159 emit_info: I::Info,
160 block_order: BlockLoweringOrder,
161 constants: VCodeConstants,
162 ) -> VCodeBuilder<I> {
163 let reftype_class = I::ref_type_regclass(abi.flags());
164 let vcode = VCode::new(
165 abi,
166 emit_info,
167 block_order,
168 constants,
169 /* generate_debug_info = */ true,
170 );
171 let stack_map_info = StackmapRequestInfo {
172 reftype_class,
173 reftyped_vregs: vec![],
174 safepoint_insns: vec![],
175 };
176
177 VCodeBuilder {
178 vcode,
179 stack_map_info,
180 block_start: 0,
181 succ_start: 0,
182 cur_srcloc: SourceLoc::default(),
183 }
184 }
185
186 /// Access the ABI object.
abi(&mut self) -> &mut dyn ABICallee<I = I>187 pub fn abi(&mut self) -> &mut dyn ABICallee<I = I> {
188 &mut *self.vcode.abi
189 }
190
191 /// Access to the BlockLoweringOrder object.
block_order(&self) -> &BlockLoweringOrder192 pub fn block_order(&self) -> &BlockLoweringOrder {
193 &self.vcode.block_order
194 }
195
196 /// Set the type of a VReg.
set_vreg_type(&mut self, vreg: VirtualReg, ty: Type)197 pub fn set_vreg_type(&mut self, vreg: VirtualReg, ty: Type) {
198 if self.vcode.vreg_types.len() <= vreg.get_index() {
199 self.vcode
200 .vreg_types
201 .resize(vreg.get_index() + 1, ir::types::I8);
202 }
203 self.vcode.vreg_types[vreg.get_index()] = ty;
204 if is_reftype(ty) {
205 self.stack_map_info.reftyped_vregs.push(vreg);
206 self.vcode.have_ref_values = true;
207 }
208 }
209
210 /// Are there any reference-typed values at all among the vregs?
have_ref_values(&self) -> bool211 pub fn have_ref_values(&self) -> bool {
212 self.vcode.have_ref_values()
213 }
214
215 /// Set the current block as the entry block.
set_entry(&mut self, block: BlockIndex)216 pub fn set_entry(&mut self, block: BlockIndex) {
217 self.vcode.entry = block;
218 }
219
220 /// End the current basic block. Must be called after emitting vcode insts
221 /// for IR insts and prior to ending the function (building the VCode).
end_bb(&mut self)222 pub fn end_bb(&mut self) {
223 let start_idx = self.block_start;
224 let end_idx = self.vcode.insts.len() as InsnIndex;
225 self.block_start = end_idx;
226 // Add the instruction index range to the list of blocks.
227 self.vcode.block_ranges.push((start_idx, end_idx));
228 // End the successors list.
229 let succ_end = self.vcode.block_succs.len();
230 self.vcode
231 .block_succ_range
232 .push((self.succ_start, succ_end));
233 self.succ_start = succ_end;
234 }
235
236 /// Push an instruction for the current BB and current IR inst within the BB.
push(&mut self, insn: I, is_safepoint: bool)237 pub fn push(&mut self, insn: I, is_safepoint: bool) {
238 match insn.is_term() {
239 MachTerminator::None | MachTerminator::Ret => {}
240 MachTerminator::Uncond(target) => {
241 self.vcode.block_succs.push(BlockIx::new(target.get()));
242 }
243 MachTerminator::Cond(true_branch, false_branch) => {
244 self.vcode.block_succs.push(BlockIx::new(true_branch.get()));
245 self.vcode
246 .block_succs
247 .push(BlockIx::new(false_branch.get()));
248 }
249 MachTerminator::Indirect(targets) => {
250 for target in targets {
251 self.vcode.block_succs.push(BlockIx::new(target.get()));
252 }
253 }
254 }
255 if insn.defines_value_label().is_some() {
256 self.vcode.has_value_labels = true;
257 }
258 self.vcode.insts.push(insn);
259 self.vcode.srclocs.push(self.cur_srcloc);
260 if is_safepoint {
261 self.stack_map_info
262 .safepoint_insns
263 .push(InstIx::new((self.vcode.insts.len() - 1) as u32));
264 }
265 }
266
267 /// Get the current source location.
get_srcloc(&self) -> SourceLoc268 pub fn get_srcloc(&self) -> SourceLoc {
269 self.cur_srcloc
270 }
271
272 /// Set the current source location.
set_srcloc(&mut self, srcloc: SourceLoc)273 pub fn set_srcloc(&mut self, srcloc: SourceLoc) {
274 self.cur_srcloc = srcloc;
275 }
276
277 /// Access the constants.
constants(&mut self) -> &mut VCodeConstants278 pub fn constants(&mut self) -> &mut VCodeConstants {
279 &mut self.vcode.constants
280 }
281
282 /// Build the final VCode, returning the vcode itself as well as auxiliary
283 /// information, such as the stack map request information.
build(self) -> (VCode<I>, StackmapRequestInfo)284 pub fn build(self) -> (VCode<I>, StackmapRequestInfo) {
285 // TODO: come up with an abstraction for "vcode and auxiliary data". The
286 // auxiliary data needs to be separate from the vcode so that it can be
287 // referenced as the vcode is mutated (e.g. by the register allocator).
288 (self.vcode, self.stack_map_info)
289 }
290 }
291
is_redundant_move<I: VCodeInst>(insn: &I) -> bool292 fn is_redundant_move<I: VCodeInst>(insn: &I) -> bool {
293 if let Some((to, from)) = insn.is_move() {
294 to.to_reg() == from
295 } else {
296 false
297 }
298 }
299
300 /// Is this type a reference type?
is_reftype(ty: Type) -> bool301 fn is_reftype(ty: Type) -> bool {
302 ty == types::R64 || ty == types::R32
303 }
304
305 impl<I: VCodeInst> VCode<I> {
306 /// New empty VCode.
new( abi: Box<dyn ABICallee<I = I>>, emit_info: I::Info, block_order: BlockLoweringOrder, constants: VCodeConstants, generate_debug_info: bool, ) -> VCode<I>307 fn new(
308 abi: Box<dyn ABICallee<I = I>>,
309 emit_info: I::Info,
310 block_order: BlockLoweringOrder,
311 constants: VCodeConstants,
312 generate_debug_info: bool,
313 ) -> VCode<I> {
314 VCode {
315 liveins: abi.liveins(),
316 liveouts: abi.liveouts(),
317 vreg_types: vec![],
318 have_ref_values: false,
319 insts: vec![],
320 srclocs: vec![],
321 entry: 0,
322 block_ranges: vec![],
323 block_succ_range: vec![],
324 block_succs: vec![],
325 block_order,
326 abi,
327 emit_info,
328 safepoint_insns: vec![],
329 safepoint_slots: vec![],
330 generate_debug_info,
331 insts_layout: RefCell::new((vec![], vec![], 0)),
332 constants,
333 has_value_labels: false,
334 }
335 }
336
337 /// Returns the flags controlling this function's compilation.
flags(&self) -> &settings::Flags338 pub fn flags(&self) -> &settings::Flags {
339 self.abi.flags()
340 }
341
342 /// Get the IR-level type of a VReg.
vreg_type(&self, vreg: VirtualReg) -> Type343 pub fn vreg_type(&self, vreg: VirtualReg) -> Type {
344 self.vreg_types[vreg.get_index()]
345 }
346
347 /// Are there any reference-typed values at all among the vregs?
have_ref_values(&self) -> bool348 pub fn have_ref_values(&self) -> bool {
349 self.have_ref_values
350 }
351
352 /// Get the entry block.
entry(&self) -> BlockIndex353 pub fn entry(&self) -> BlockIndex {
354 self.entry
355 }
356
357 /// Get the number of blocks. Block indices will be in the range `0 ..
358 /// (self.num_blocks() - 1)`.
num_blocks(&self) -> usize359 pub fn num_blocks(&self) -> usize {
360 self.block_ranges.len()
361 }
362
363 /// Stack frame size for the full function's body.
frame_size(&self) -> u32364 pub fn frame_size(&self) -> u32 {
365 self.abi.frame_size()
366 }
367
368 /// Inbound stack-args size.
stack_args_size(&self) -> u32369 pub fn stack_args_size(&self) -> u32 {
370 self.abi.stack_args_size()
371 }
372
373 /// Get the successors for a block.
succs(&self, block: BlockIndex) -> &[BlockIx]374 pub fn succs(&self, block: BlockIndex) -> &[BlockIx] {
375 let (start, end) = self.block_succ_range[block as usize];
376 &self.block_succs[start..end]
377 }
378
379 /// Take the results of register allocation, with a sequence of
380 /// instructions including spliced fill/reload/move instructions, and replace
381 /// the VCode with them.
replace_insns_from_regalloc(&mut self, result: RegAllocResult<Self>)382 pub fn replace_insns_from_regalloc(&mut self, result: RegAllocResult<Self>) {
383 // Record the spillslot count and clobbered registers for the ABI/stack
384 // setup code.
385 self.abi.set_num_spillslots(result.num_spill_slots as usize);
386 self.abi
387 .set_clobbered(result.clobbered_registers.map(|r| Writable::from_reg(*r)));
388
389 let mut final_insns = vec![];
390 let mut final_block_ranges = vec![(0, 0); self.num_blocks()];
391 let mut final_srclocs = vec![];
392 let mut final_safepoint_insns = vec![];
393 let mut safept_idx = 0;
394
395 assert!(result.target_map.elems().len() == self.num_blocks());
396 for block in 0..self.num_blocks() {
397 let start = result.target_map.elems()[block].get() as usize;
398 let end = if block == self.num_blocks() - 1 {
399 result.insns.len()
400 } else {
401 result.target_map.elems()[block + 1].get() as usize
402 };
403 let block = block as BlockIndex;
404 let final_start = final_insns.len() as InsnIndex;
405
406 if block == self.entry {
407 // Start with the prologue.
408 let prologue = self.abi.gen_prologue();
409 let len = prologue.len();
410 final_insns.extend(prologue.into_iter());
411 final_srclocs.extend(iter::repeat(SourceLoc::default()).take(len));
412 }
413
414 for i in start..end {
415 let insn = &result.insns[i];
416
417 // Elide redundant moves at this point (we only know what is
418 // redundant once registers are allocated).
419 if is_redundant_move(insn) {
420 continue;
421 }
422
423 // Is there a srcloc associated with this insn? Look it up based on original
424 // instruction index (if new insn corresponds to some original insn, i.e., is not
425 // an inserted load/spill/move).
426 let orig_iix = result.orig_insn_map[InstIx::new(i as u32)];
427 let srcloc = if orig_iix.is_invalid() {
428 SourceLoc::default()
429 } else {
430 self.srclocs[orig_iix.get() as usize]
431 };
432
433 // Whenever encountering a return instruction, replace it
434 // with the epilogue.
435 let is_ret = insn.is_term() == MachTerminator::Ret;
436 if is_ret {
437 let epilogue = self.abi.gen_epilogue();
438 let len = epilogue.len();
439 final_insns.extend(epilogue.into_iter());
440 final_srclocs.extend(iter::repeat(srcloc).take(len));
441 } else {
442 final_insns.push(insn.clone());
443 final_srclocs.push(srcloc);
444 }
445
446 // Was this instruction a safepoint instruction? Add its final
447 // index to the safepoint insn-index list if so.
448 if safept_idx < result.new_safepoint_insns.len()
449 && (result.new_safepoint_insns[safept_idx].get() as usize) == i
450 {
451 let idx = final_insns.len() - 1;
452 final_safepoint_insns.push(idx as InsnIndex);
453 safept_idx += 1;
454 }
455 }
456
457 let final_end = final_insns.len() as InsnIndex;
458 final_block_ranges[block as usize] = (final_start, final_end);
459 }
460
461 debug_assert!(final_insns.len() == final_srclocs.len());
462
463 self.insts = final_insns;
464 self.srclocs = final_srclocs;
465 self.block_ranges = final_block_ranges;
466 self.safepoint_insns = final_safepoint_insns;
467
468 // Save safepoint slot-lists. These will be passed to the `EmitState`
469 // for the machine backend during emission so that it can do
470 // target-specific translations of slot numbers to stack offsets.
471 self.safepoint_slots = result.stackmaps;
472 }
473
474 /// Emit the instructions to a `MachBuffer`, containing fixed-up code and external
475 /// reloc/trap/etc. records ready for use.
emit(&self) -> MachBuffer<I> where I: MachInstEmit,476 pub fn emit(&self) -> MachBuffer<I>
477 where
478 I: MachInstEmit,
479 {
480 let _tt = timing::vcode_emit();
481 let mut buffer = MachBuffer::new();
482 let mut state = I::State::new(&*self.abi);
483
484 // The first M MachLabels are reserved for block indices, the next N MachLabels for
485 // constants.
486 buffer.reserve_labels_for_blocks(self.num_blocks() as BlockIndex);
487 buffer.reserve_labels_for_constants(&self.constants);
488
489 let mut inst_ends = vec![0; self.insts.len()];
490 let mut label_insn_iix = vec![0; self.num_blocks()];
491
492 let mut safepoint_idx = 0;
493 let mut cur_srcloc = None;
494 for block in 0..self.num_blocks() {
495 let block = block as BlockIndex;
496 let new_offset = I::align_basic_block(buffer.cur_offset());
497 while new_offset > buffer.cur_offset() {
498 // Pad with NOPs up to the aligned block offset.
499 let nop = I::gen_nop((new_offset - buffer.cur_offset()) as usize);
500 nop.emit(&mut buffer, &self.emit_info, &mut Default::default());
501 }
502 assert_eq!(buffer.cur_offset(), new_offset);
503
504 let (start, end) = self.block_ranges[block as usize];
505 buffer.bind_label(MachLabel::from_block(block));
506 label_insn_iix[block as usize] = start;
507 for iix in start..end {
508 let srcloc = self.srclocs[iix as usize];
509 if cur_srcloc != Some(srcloc) {
510 if cur_srcloc.is_some() {
511 buffer.end_srcloc();
512 }
513 buffer.start_srcloc(srcloc);
514 cur_srcloc = Some(srcloc);
515 }
516 state.pre_sourceloc(cur_srcloc.unwrap_or(SourceLoc::default()));
517
518 if safepoint_idx < self.safepoint_insns.len()
519 && self.safepoint_insns[safepoint_idx] == iix
520 {
521 if self.safepoint_slots[safepoint_idx].len() > 0 {
522 let stack_map = self.abi.spillslots_to_stack_map(
523 &self.safepoint_slots[safepoint_idx][..],
524 &state,
525 );
526 state.pre_safepoint(stack_map);
527 }
528 safepoint_idx += 1;
529 }
530
531 self.insts[iix as usize].emit(&mut buffer, &self.emit_info, &mut state);
532
533 if self.generate_debug_info {
534 // Buffer truncation may have happened since last inst append; trim inst-end
535 // layout info as appropriate.
536 let l = &mut inst_ends[0..iix as usize];
537 for end in l.iter_mut().rev() {
538 if *end > buffer.cur_offset() {
539 *end = buffer.cur_offset();
540 } else {
541 break;
542 }
543 }
544 inst_ends[iix as usize] = buffer.cur_offset();
545 }
546 }
547
548 if cur_srcloc.is_some() {
549 buffer.end_srcloc();
550 cur_srcloc = None;
551 }
552
553 // Do we need an island? Get the worst-case size of the next BB and see if, having
554 // emitted that many bytes, we will be beyond the deadline.
555 if block < (self.num_blocks() - 1) as BlockIndex {
556 let next_block = block + 1;
557 let next_block_range = self.block_ranges[next_block as usize];
558 let next_block_size = next_block_range.1 - next_block_range.0;
559 let worst_case_next_bb = I::worst_case_size() * next_block_size;
560 if buffer.island_needed(worst_case_next_bb) {
561 buffer.emit_island();
562 }
563 }
564 }
565
566 // Emit the constants used by the function.
567 for (constant, data) in self.constants.iter() {
568 let label = buffer.get_label_for_constant(constant);
569 buffer.defer_constant(label, data.alignment(), data.as_slice(), u32::max_value());
570 }
571
572 if self.generate_debug_info {
573 for end in inst_ends.iter_mut().rev() {
574 if *end > buffer.cur_offset() {
575 *end = buffer.cur_offset();
576 } else {
577 break;
578 }
579 }
580 *self.insts_layout.borrow_mut() = (inst_ends, label_insn_iix, buffer.cur_offset());
581 }
582
583 buffer
584 }
585
586 /// Generates value-label ranges.
value_labels_ranges(&self) -> ValueLabelsRanges587 pub fn value_labels_ranges(&self) -> ValueLabelsRanges {
588 if !self.has_value_labels {
589 return ValueLabelsRanges::default();
590 }
591
592 let layout = &self.insts_layout.borrow();
593 debug::compute(&self.insts, &layout.0[..], &layout.1[..])
594 }
595
596 /// Get the offsets of stackslots.
stackslot_offsets(&self) -> &PrimaryMap<StackSlot, u32>597 pub fn stackslot_offsets(&self) -> &PrimaryMap<StackSlot, u32> {
598 self.abi.stackslot_offsets()
599 }
600
601 /// Get the IR block for a BlockIndex, if one exists.
bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block>602 pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
603 self.block_order.lowered_order()[block as usize].orig_block()
604 }
605 }
606
607 impl<I: VCodeInst> RegallocFunction for VCode<I> {
608 type Inst = I;
609
insns(&self) -> &[I]610 fn insns(&self) -> &[I] {
611 &self.insts[..]
612 }
613
insns_mut(&mut self) -> &mut [I]614 fn insns_mut(&mut self) -> &mut [I] {
615 &mut self.insts[..]
616 }
617
get_insn(&self, insn: InstIx) -> &I618 fn get_insn(&self, insn: InstIx) -> &I {
619 &self.insts[insn.get() as usize]
620 }
621
get_insn_mut(&mut self, insn: InstIx) -> &mut I622 fn get_insn_mut(&mut self, insn: InstIx) -> &mut I {
623 &mut self.insts[insn.get() as usize]
624 }
625
blocks(&self) -> Range<BlockIx>626 fn blocks(&self) -> Range<BlockIx> {
627 Range::new(BlockIx::new(0), self.block_ranges.len())
628 }
629
entry_block(&self) -> BlockIx630 fn entry_block(&self) -> BlockIx {
631 BlockIx::new(self.entry)
632 }
633
block_insns(&self, block: BlockIx) -> Range<InstIx>634 fn block_insns(&self, block: BlockIx) -> Range<InstIx> {
635 let (start, end) = self.block_ranges[block.get() as usize];
636 Range::new(InstIx::new(start), (end - start) as usize)
637 }
638
block_succs(&self, block: BlockIx) -> Cow<[BlockIx]>639 fn block_succs(&self, block: BlockIx) -> Cow<[BlockIx]> {
640 let (start, end) = self.block_succ_range[block.get() as usize];
641 Cow::Borrowed(&self.block_succs[start..end])
642 }
643
is_ret(&self, insn: InstIx) -> bool644 fn is_ret(&self, insn: InstIx) -> bool {
645 match self.insts[insn.get() as usize].is_term() {
646 MachTerminator::Ret => true,
647 _ => false,
648 }
649 }
650
is_included_in_clobbers(&self, insn: &I) -> bool651 fn is_included_in_clobbers(&self, insn: &I) -> bool {
652 insn.is_included_in_clobbers()
653 }
654
get_regs(insn: &I, collector: &mut RegUsageCollector)655 fn get_regs(insn: &I, collector: &mut RegUsageCollector) {
656 insn.get_regs(collector)
657 }
658
map_regs<RUM: RegUsageMapper>(insn: &mut I, mapper: &RUM)659 fn map_regs<RUM: RegUsageMapper>(insn: &mut I, mapper: &RUM) {
660 insn.map_regs(mapper);
661 }
662
is_move(&self, insn: &I) -> Option<(Writable<Reg>, Reg)>663 fn is_move(&self, insn: &I) -> Option<(Writable<Reg>, Reg)> {
664 insn.is_move()
665 }
666
get_num_vregs(&self) -> usize667 fn get_num_vregs(&self) -> usize {
668 self.vreg_types.len()
669 }
670
get_spillslot_size(&self, regclass: RegClass, vreg: VirtualReg) -> u32671 fn get_spillslot_size(&self, regclass: RegClass, vreg: VirtualReg) -> u32 {
672 let ty = self.vreg_type(vreg);
673 self.abi.get_spillslot_size(regclass, ty)
674 }
675
gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, vreg: Option<VirtualReg>) -> I676 fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, vreg: Option<VirtualReg>) -> I {
677 let ty = vreg.map(|v| self.vreg_type(v));
678 self.abi.gen_spill(to_slot, from_reg, ty)
679 }
680
gen_reload( &self, to_reg: Writable<RealReg>, from_slot: SpillSlot, vreg: Option<VirtualReg>, ) -> I681 fn gen_reload(
682 &self,
683 to_reg: Writable<RealReg>,
684 from_slot: SpillSlot,
685 vreg: Option<VirtualReg>,
686 ) -> I {
687 let ty = vreg.map(|v| self.vreg_type(v));
688 self.abi.gen_reload(to_reg, from_slot, ty)
689 }
690
gen_move(&self, to_reg: Writable<RealReg>, from_reg: RealReg, vreg: VirtualReg) -> I691 fn gen_move(&self, to_reg: Writable<RealReg>, from_reg: RealReg, vreg: VirtualReg) -> I {
692 let ty = self.vreg_type(vreg);
693 I::gen_move(to_reg.map(|r| r.to_reg()), from_reg.to_reg(), ty)
694 }
695
gen_zero_len_nop(&self) -> I696 fn gen_zero_len_nop(&self) -> I {
697 I::gen_nop(0)
698 }
699
maybe_direct_reload(&self, insn: &I, reg: VirtualReg, slot: SpillSlot) -> Option<I>700 fn maybe_direct_reload(&self, insn: &I, reg: VirtualReg, slot: SpillSlot) -> Option<I> {
701 insn.maybe_direct_reload(reg, slot)
702 }
703
func_liveins(&self) -> RegallocSet<RealReg>704 fn func_liveins(&self) -> RegallocSet<RealReg> {
705 self.liveins.clone()
706 }
707
func_liveouts(&self) -> RegallocSet<RealReg>708 fn func_liveouts(&self) -> RegallocSet<RealReg> {
709 self.liveouts.clone()
710 }
711 }
712
713 impl<I: VCodeInst> fmt::Debug for VCode<I> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result714 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
715 writeln!(f, "VCode_Debug {{")?;
716 writeln!(f, " Entry block: {}", self.entry)?;
717
718 for block in 0..self.num_blocks() {
719 writeln!(f, "Block {}:", block,)?;
720 for succ in self.succs(block as BlockIndex) {
721 writeln!(f, " (successor: Block {})", succ.get())?;
722 }
723 let (start, end) = self.block_ranges[block];
724 writeln!(f, " (instruction range: {} .. {})", start, end)?;
725 for inst in start..end {
726 writeln!(f, " Inst {}: {:?}", inst, self.insts[inst as usize])?;
727 }
728 }
729
730 writeln!(f, "}}")?;
731 Ok(())
732 }
733 }
734
735 /// Pretty-printing with `RealRegUniverse` context.
736 impl<I: VCodeInst> PrettyPrint for VCode<I> {
show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String737 fn show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String {
738 use std::fmt::Write;
739
740 let mut s = String::new();
741 write!(&mut s, "VCode_ShowWithRRU {{{{\n").unwrap();
742 write!(&mut s, " Entry block: {}\n", self.entry).unwrap();
743
744 let mut state = Default::default();
745 let mut safepoint_idx = 0;
746 for i in 0..self.num_blocks() {
747 let block = i as BlockIndex;
748
749 write!(&mut s, "Block {}:\n", block).unwrap();
750 if let Some(bb) = self.bindex_to_bb(block) {
751 write!(&mut s, " (original IR block: {})\n", bb).unwrap();
752 }
753 for succ in self.succs(block) {
754 write!(&mut s, " (successor: Block {})\n", succ.get()).unwrap();
755 }
756 let (start, end) = self.block_ranges[block as usize];
757 write!(&mut s, " (instruction range: {} .. {})\n", start, end).unwrap();
758 for inst in start..end {
759 if safepoint_idx < self.safepoint_insns.len()
760 && self.safepoint_insns[safepoint_idx] == inst
761 {
762 write!(
763 &mut s,
764 " (safepoint: slots {:?} with EmitState {:?})\n",
765 self.safepoint_slots[safepoint_idx], state,
766 )
767 .unwrap();
768 safepoint_idx += 1;
769 }
770 write!(
771 &mut s,
772 " Inst {}: {}\n",
773 inst,
774 self.insts[inst as usize].pretty_print(mb_rru, &mut state)
775 )
776 .unwrap();
777 }
778 }
779
780 write!(&mut s, "}}}}\n").unwrap();
781
782 s
783 }
784 }
785
786 /// This structure tracks the large constants used in VCode that will be emitted separately by the
787 /// [MachBuffer].
788 ///
789 /// First, during the lowering phase, constants are inserted using
790 /// [VCodeConstants.insert]; an intermediate handle, [VCodeConstant], tracks what constants are
791 /// used in this phase. Some deduplication is performed, when possible, as constant
792 /// values are inserted.
793 ///
794 /// Secondly, during the emission phase, the [MachBuffer] assigns [MachLabel]s for each of the
795 /// constants so that instructions can refer to the value's memory location. The [MachBuffer]
796 /// then writes the constant values to the buffer.
797 #[derive(Default)]
798 pub struct VCodeConstants {
799 constants: PrimaryMap<VCodeConstant, VCodeConstantData>,
800 pool_uses: HashMap<Constant, VCodeConstant>,
801 well_known_uses: HashMap<*const [u8], VCodeConstant>,
802 }
803 impl VCodeConstants {
804 /// Initialize the structure with the expected number of constants.
with_capacity(expected_num_constants: usize) -> Self805 pub fn with_capacity(expected_num_constants: usize) -> Self {
806 Self {
807 constants: PrimaryMap::with_capacity(expected_num_constants),
808 pool_uses: HashMap::with_capacity(expected_num_constants),
809 well_known_uses: HashMap::new(),
810 }
811 }
812
813 /// Insert a constant; using this method indicates that a constant value will be used and thus
814 /// will be emitted to the `MachBuffer`. The current implementation can deduplicate constants
815 /// that are [VCodeConstantData::Pool] or [VCodeConstantData::WellKnown] but not
816 /// [VCodeConstantData::Generated].
insert(&mut self, data: VCodeConstantData) -> VCodeConstant817 pub fn insert(&mut self, data: VCodeConstantData) -> VCodeConstant {
818 match data {
819 VCodeConstantData::Generated(_) => self.constants.push(data),
820 VCodeConstantData::Pool(constant, _) => match self.pool_uses.get(&constant) {
821 None => {
822 let vcode_constant = self.constants.push(data);
823 self.pool_uses.insert(constant, vcode_constant);
824 vcode_constant
825 }
826 Some(&vcode_constant) => vcode_constant,
827 },
828 VCodeConstantData::WellKnown(data_ref) => {
829 match self.well_known_uses.get(&(data_ref as *const [u8])) {
830 None => {
831 let vcode_constant = self.constants.push(data);
832 self.well_known_uses
833 .insert(data_ref as *const [u8], vcode_constant);
834 vcode_constant
835 }
836 Some(&vcode_constant) => vcode_constant,
837 }
838 }
839 }
840 }
841
842 /// Retrieve a byte slice for the given [VCodeConstant], if available.
get(&self, constant: VCodeConstant) -> Option<&[u8]>843 pub fn get(&self, constant: VCodeConstant) -> Option<&[u8]> {
844 self.constants.get(constant).map(|d| d.as_slice())
845 }
846
847 /// Return the number of constants inserted.
len(&self) -> usize848 pub fn len(&self) -> usize {
849 self.constants.len()
850 }
851
852 /// Iterate over the [VCodeConstant] keys inserted in this structure.
keys(&self) -> Keys<VCodeConstant>853 pub fn keys(&self) -> Keys<VCodeConstant> {
854 self.constants.keys()
855 }
856
857 /// Iterate over the [VCodeConstant] keys and the data (as a byte slice) inserted in this
858 /// structure.
iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)>859 pub fn iter(&self) -> impl Iterator<Item = (VCodeConstant, &VCodeConstantData)> {
860 self.constants.iter()
861 }
862 }
863
864 /// A use of a constant by one or more VCode instructions; see [VCodeConstants].
865 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
866 pub struct VCodeConstant(u32);
867 entity_impl!(VCodeConstant);
868
869 /// Identify the different types of constant that can be inserted into [VCodeConstants]. Tracking
870 /// these separately instead of as raw byte buffers allows us to avoid some duplication.
871 pub enum VCodeConstantData {
872 /// A constant already present in the Cranelift IR
873 /// [ConstantPool](crate::ir::constant::ConstantPool).
874 Pool(Constant, ConstantData),
875 /// A reference to a well-known constant value that is statically encoded within the compiler.
876 WellKnown(&'static [u8]),
877 /// A constant value generated during lowering; the value may depend on the instruction context
878 /// which makes it difficult to de-duplicate--if possible, use other variants.
879 Generated(ConstantData),
880 }
881 impl VCodeConstantData {
882 /// Retrieve the constant data as a byte slice.
as_slice(&self) -> &[u8]883 pub fn as_slice(&self) -> &[u8] {
884 match self {
885 VCodeConstantData::Pool(_, d) | VCodeConstantData::Generated(d) => d.as_slice(),
886 VCodeConstantData::WellKnown(d) => d,
887 }
888 }
889
890 /// Calculate the alignment of the constant data.
alignment(&self) -> u32891 pub fn alignment(&self) -> u32 {
892 if self.as_slice().len() <= 8 {
893 8
894 } else {
895 16
896 }
897 }
898 }
899
900 #[cfg(test)]
901 mod test {
902 use super::*;
903 use std::mem::size_of;
904
905 #[test]
size_of_constant_structs()906 fn size_of_constant_structs() {
907 assert_eq!(size_of::<Constant>(), 4);
908 assert_eq!(size_of::<VCodeConstant>(), 4);
909 assert_eq!(size_of::<ConstantData>(), 24);
910 assert_eq!(size_of::<VCodeConstantData>(), 32);
911 assert_eq!(
912 size_of::<PrimaryMap<VCodeConstant, VCodeConstantData>>(),
913 24
914 );
915 // TODO The VCodeConstants structure's memory size could be further optimized.
916 // With certain versions of Rust, each `HashMap` in `VCodeConstants` occupied at
917 // least 48 bytes, making an empty `VCodeConstants` cost 120 bytes.
918 }
919 }
920