1 //! A simple GVN pass.
2 
3 use crate::cursor::{Cursor, FuncCursor};
4 use crate::dominator_tree::DominatorTree;
5 use crate::ir::{Function, Inst, InstructionData, Opcode, Type};
6 use crate::scoped_hash_map::ScopedHashMap;
7 use crate::timing;
8 use core::cell::{Ref, RefCell};
9 use core::hash::{Hash, Hasher};
10 use std::vec::Vec;
11 
12 /// Test whether the given opcode is unsafe to even consider for GVN.
trivially_unsafe_for_gvn(opcode: Opcode) -> bool13 fn trivially_unsafe_for_gvn(opcode: Opcode) -> bool {
14     opcode.is_call()
15         || opcode.is_branch()
16         || opcode.is_terminator()
17         || opcode.is_return()
18         || opcode.can_trap()
19         || opcode.other_side_effects()
20         || opcode.can_store()
21         || opcode.writes_cpu_flags()
22 }
23 
24 /// Test that, if the specified instruction is a load, it doesn't have the `readonly` memflag.
is_load_and_not_readonly(inst_data: &InstructionData) -> bool25 fn is_load_and_not_readonly(inst_data: &InstructionData) -> bool {
26     match *inst_data {
27         InstructionData::Load { flags, .. } | InstructionData::LoadComplex { flags, .. } => {
28             !flags.readonly()
29         }
30         _ => inst_data.opcode().can_load(),
31     }
32 }
33 
34 /// Wrapper around `InstructionData` which implements `Eq` and `Hash`
35 #[derive(Clone)]
36 struct HashKey<'a, 'f: 'a> {
37     inst: InstructionData,
38     ty: Type,
39     pos: &'a RefCell<FuncCursor<'f>>,
40 }
41 impl<'a, 'f: 'a> Hash for HashKey<'a, 'f> {
hash<H: Hasher>(&self, state: &mut H)42     fn hash<H: Hasher>(&self, state: &mut H) {
43         let pool = &self.pos.borrow().func.dfg.value_lists;
44         self.inst.hash(state, pool);
45         self.ty.hash(state);
46     }
47 }
48 impl<'a, 'f: 'a> PartialEq for HashKey<'a, 'f> {
eq(&self, other: &Self) -> bool49     fn eq(&self, other: &Self) -> bool {
50         let pool = &self.pos.borrow().func.dfg.value_lists;
51         self.inst.eq(&other.inst, pool) && self.ty == other.ty
52     }
53 }
54 impl<'a, 'f: 'a> Eq for HashKey<'a, 'f> {}
55 
56 /// Perform simple GVN on `func`.
57 ///
do_simple_gvn(func: &mut Function, domtree: &mut DominatorTree)58 pub fn do_simple_gvn(func: &mut Function, domtree: &mut DominatorTree) {
59     let _tt = timing::gvn();
60     debug_assert!(domtree.is_valid());
61 
62     // Visit EBBs in a reverse post-order.
63     //
64     // The RefCell here is a bit ugly since the HashKeys in the ScopedHashMap
65     // need a reference to the function.
66     let pos = RefCell::new(FuncCursor::new(func));
67 
68     let mut visible_values: ScopedHashMap<HashKey, Inst> = ScopedHashMap::new();
69     let mut scope_stack: Vec<Inst> = Vec::new();
70 
71     for &ebb in domtree.cfg_postorder().iter().rev() {
72         {
73             // Pop any scopes that we just exited.
74             let layout = &pos.borrow().func.layout;
75             loop {
76                 if let Some(current) = scope_stack.last() {
77                     if domtree.dominates(*current, ebb, layout) {
78                         break;
79                     }
80                 } else {
81                     break;
82                 }
83                 scope_stack.pop();
84                 visible_values.decrement_depth();
85             }
86 
87             // Push a scope for the current block.
88             scope_stack.push(layout.first_inst(ebb).unwrap());
89             visible_values.increment_depth();
90         }
91 
92         pos.borrow_mut().goto_top(ebb);
93         while let Some(inst) = {
94             let mut pos = pos.borrow_mut();
95             pos.next_inst()
96         } {
97             // Resolve aliases, particularly aliases we created earlier.
98             pos.borrow_mut().func.dfg.resolve_aliases_in_arguments(inst);
99 
100             let func = Ref::map(pos.borrow(), |pos| &pos.func);
101 
102             let opcode = func.dfg[inst].opcode();
103 
104             if opcode.is_branch() && !opcode.is_terminator() {
105                 scope_stack.push(func.layout.next_inst(inst).unwrap());
106                 visible_values.increment_depth();
107             }
108 
109             if trivially_unsafe_for_gvn(opcode) {
110                 continue;
111             }
112 
113             // These are split up to separate concerns.
114             if is_load_and_not_readonly(&func.dfg[inst]) {
115                 continue;
116             }
117 
118             let ctrl_typevar = func.dfg.ctrl_typevar(inst);
119             let key = HashKey {
120                 inst: func.dfg[inst].clone(),
121                 ty: ctrl_typevar,
122                 pos: &pos,
123             };
124             use crate::scoped_hash_map::Entry::*;
125             match visible_values.entry(key) {
126                 Occupied(entry) => {
127                     debug_assert!(domtree.dominates(*entry.get(), inst, &func.layout));
128                     // If the redundant instruction is representing the current
129                     // scope, pick a new representative.
130                     let old = scope_stack.last_mut().unwrap();
131                     if *old == inst {
132                         *old = func.layout.next_inst(inst).unwrap();
133                     }
134                     // Replace the redundant instruction and remove it.
135                     drop(func);
136                     let mut pos = pos.borrow_mut();
137                     pos.func.dfg.replace_with_aliases(inst, *entry.get());
138                     pos.remove_inst_and_step_back();
139                 }
140                 Vacant(entry) => {
141                     entry.insert(inst);
142                 }
143             }
144         }
145     }
146 }
147