1 //! Memory management for executable code.
2 
3 use crate::unwind::UnwindRegistry;
4 use region;
5 use std::mem::ManuallyDrop;
6 use std::{cmp, mem};
7 use wasmtime_environ::{
8     isa::{unwind::UnwindInfo, TargetIsa},
9     Compilation, CompiledFunction,
10 };
11 use wasmtime_runtime::{Mmap, VMFunctionBody};
12 
13 struct CodeMemoryEntry {
14     mmap: ManuallyDrop<Mmap>,
15     registry: ManuallyDrop<UnwindRegistry>,
16 }
17 
18 impl CodeMemoryEntry {
with_capacity(cap: usize) -> Result<Self, String>19     fn with_capacity(cap: usize) -> Result<Self, String> {
20         let mmap = ManuallyDrop::new(Mmap::with_at_least(cap)?);
21         let registry = ManuallyDrop::new(UnwindRegistry::new(mmap.as_ptr() as usize));
22         Ok(Self { mmap, registry })
23     }
24 
range(&self) -> (usize, usize)25     fn range(&self) -> (usize, usize) {
26         let start = self.mmap.as_ptr() as usize;
27         let end = start + self.mmap.len();
28         (start, end)
29     }
30 }
31 
32 impl Drop for CodeMemoryEntry {
drop(&mut self)33     fn drop(&mut self) {
34         unsafe {
35             // The registry needs to be dropped before the mmap
36             ManuallyDrop::drop(&mut self.registry);
37             ManuallyDrop::drop(&mut self.mmap);
38         }
39     }
40 }
41 
42 /// Memory manager for executable code.
43 pub struct CodeMemory {
44     current: Option<CodeMemoryEntry>,
45     entries: Vec<CodeMemoryEntry>,
46     position: usize,
47     published: usize,
48 }
49 
_assert()50 fn _assert() {
51     fn _assert_send_sync<T: Send + Sync>() {}
52     _assert_send_sync::<CodeMemory>();
53 }
54 
55 impl CodeMemory {
56     /// Create a new `CodeMemory` instance.
new() -> Self57     pub fn new() -> Self {
58         Self {
59             current: None,
60             entries: Vec::new(),
61             position: 0,
62             published: 0,
63         }
64     }
65 
66     /// Allocate a continuous memory block for a single compiled function.
67     /// TODO: Reorganize the code that calls this to emit code directly into the
68     /// mmap region rather than into a Vec that we need to copy in.
allocate_for_function( &mut self, func: &CompiledFunction, ) -> Result<&mut [VMFunctionBody], String>69     pub fn allocate_for_function(
70         &mut self,
71         func: &CompiledFunction,
72     ) -> Result<&mut [VMFunctionBody], String> {
73         let size = Self::function_allocation_size(func);
74 
75         let (buf, registry, start) = self.allocate(size)?;
76 
77         let (_, _, vmfunc) = Self::copy_function(func, start as u32, buf, registry);
78 
79         Ok(vmfunc)
80     }
81 
82     /// Allocate a continuous memory block for a compilation.
allocate_for_compilation( &mut self, compilation: &Compilation, ) -> Result<Box<[&mut [VMFunctionBody]]>, String>83     pub fn allocate_for_compilation(
84         &mut self,
85         compilation: &Compilation,
86     ) -> Result<Box<[&mut [VMFunctionBody]]>, String> {
87         let total_len = compilation
88             .into_iter()
89             .fold(0, |acc, func| acc + Self::function_allocation_size(func));
90 
91         let (mut buf, registry, start) = self.allocate(total_len)?;
92         let mut result = Vec::with_capacity(compilation.len());
93         let mut start = start as u32;
94 
95         for func in compilation.into_iter() {
96             let (next_start, next_buf, vmfunc) = Self::copy_function(func, start, buf, registry);
97 
98             result.push(vmfunc);
99 
100             start = next_start;
101             buf = next_buf;
102         }
103 
104         Ok(result.into_boxed_slice())
105     }
106 
107     /// Make all allocated memory executable.
publish(&mut self, isa: &dyn TargetIsa)108     pub fn publish(&mut self, isa: &dyn TargetIsa) {
109         self.push_current(0)
110             .expect("failed to push current memory map");
111 
112         for CodeMemoryEntry {
113             mmap: m,
114             registry: r,
115         } in &mut self.entries[self.published..]
116         {
117             // Remove write access to the pages due to the relocation fixups.
118             r.publish(isa)
119                 .expect("failed to publish function unwind registry");
120 
121             if !m.is_empty() {
122                 unsafe {
123                     region::protect(m.as_mut_ptr(), m.len(), region::Protection::READ_EXECUTE)
124                 }
125                 .expect("unable to make memory readonly and executable");
126             }
127         }
128 
129         self.published = self.entries.len();
130     }
131 
132     /// Allocate `size` bytes of memory which can be made executable later by
133     /// calling `publish()`. Note that we allocate the memory as writeable so
134     /// that it can be written to and patched, though we make it readonly before
135     /// actually executing from it.
136     ///
137     /// A few values are returned:
138     ///
139     /// * A mutable slice which references the allocated memory
140     /// * A function table instance where unwind information is registered
141     /// * The offset within the current mmap that the slice starts at
142     ///
143     /// TODO: Add an alignment flag.
allocate(&mut self, size: usize) -> Result<(&mut [u8], &mut UnwindRegistry, usize), String>144     fn allocate(&mut self, size: usize) -> Result<(&mut [u8], &mut UnwindRegistry, usize), String> {
145         assert!(size > 0);
146 
147         if match &self.current {
148             Some(e) => e.mmap.len() - self.position < size,
149             None => true,
150         } {
151             self.push_current(cmp::max(0x10000, size))?;
152         }
153 
154         let old_position = self.position;
155         self.position += size;
156 
157         let e = self.current.as_mut().unwrap();
158 
159         Ok((
160             &mut e.mmap.as_mut_slice()[old_position..self.position],
161             &mut e.registry,
162             old_position,
163         ))
164     }
165 
166     /// Calculates the allocation size of the given compiled function.
function_allocation_size(func: &CompiledFunction) -> usize167     fn function_allocation_size(func: &CompiledFunction) -> usize {
168         match &func.unwind_info {
169             Some(UnwindInfo::WindowsX64(info)) => {
170                 // Windows unwind information is required to be emitted into code memory
171                 // This is because it must be a positive relative offset from the start of the memory
172                 // Account for necessary unwind information alignment padding (32-bit alignment)
173                 ((func.body.len() + 3) & !3) + info.emit_size()
174             }
175             _ => func.body.len(),
176         }
177     }
178 
179     /// Copies the data of the compiled function to the given buffer.
180     ///
181     /// This will also add the function to the current unwind registry.
copy_function<'a>( func: &CompiledFunction, func_start: u32, buf: &'a mut [u8], registry: &mut UnwindRegistry, ) -> (u32, &'a mut [u8], &'a mut [VMFunctionBody])182     fn copy_function<'a>(
183         func: &CompiledFunction,
184         func_start: u32,
185         buf: &'a mut [u8],
186         registry: &mut UnwindRegistry,
187     ) -> (u32, &'a mut [u8], &'a mut [VMFunctionBody]) {
188         let func_len = func.body.len();
189         let mut func_end = func_start + (func_len as u32);
190 
191         let (body, mut remainder) = buf.split_at_mut(func_len);
192         body.copy_from_slice(&func.body);
193         let vmfunc = Self::view_as_mut_vmfunc_slice(body);
194 
195         if let Some(UnwindInfo::WindowsX64(info)) = &func.unwind_info {
196             // Windows unwind information is written following the function body
197             // Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
198             let unwind_start = (func_end + 3) & !3;
199             let unwind_size = info.emit_size();
200             let padding = (unwind_start - func_end) as usize;
201 
202             let (slice, r) = remainder.split_at_mut(padding + unwind_size);
203 
204             info.emit(&mut slice[padding..]);
205 
206             func_end = unwind_start + (unwind_size as u32);
207             remainder = r;
208         }
209 
210         if let Some(info) = &func.unwind_info {
211             registry
212                 .register(func_start, func_len as u32, info)
213                 .expect("failed to register unwind information");
214         }
215 
216         (func_end, remainder, vmfunc)
217     }
218 
219     /// Convert mut a slice from u8 to VMFunctionBody.
view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody]220     fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
221         let byte_ptr: *mut [u8] = slice;
222         let body_ptr = byte_ptr as *mut [VMFunctionBody];
223         unsafe { &mut *body_ptr }
224     }
225 
226     /// Pushes the current entry and allocates a new one with the given size.
push_current(&mut self, new_size: usize) -> Result<(), String>227     fn push_current(&mut self, new_size: usize) -> Result<(), String> {
228         let previous = mem::replace(
229             &mut self.current,
230             if new_size == 0 {
231                 None
232             } else {
233                 Some(CodeMemoryEntry::with_capacity(cmp::max(0x10000, new_size))?)
234             },
235         );
236 
237         if let Some(e) = previous {
238             self.entries.push(e);
239         }
240 
241         self.position = 0;
242 
243         Ok(())
244     }
245 
246     /// Returns all published segment ranges.
published_ranges<'a>(&'a self) -> impl Iterator<Item = (usize, usize)> + 'a247     pub fn published_ranges<'a>(&'a self) -> impl Iterator<Item = (usize, usize)> + 'a {
248         self.entries[..self.published]
249             .iter()
250             .map(|entry| entry.range())
251     }
252 }
253