1 use crate::module::{MemoryPlan, MemoryStyle, ModuleLocal, TableStyle};
2 use crate::vmoffsets::VMOffsets;
3 use crate::{Tunables, INTERRUPTED, WASM_PAGE_SIZE};
4 use cranelift_codegen::cursor::FuncCursor;
5 use cranelift_codegen::ir;
6 use cranelift_codegen::ir::condcodes::*;
7 use cranelift_codegen::ir::immediates::{Offset32, Uimm64};
8 use cranelift_codegen::ir::types::*;
9 use cranelift_codegen::ir::{AbiParam, ArgumentPurpose, Function, InstBuilder, Signature};
10 use cranelift_codegen::isa::TargetFrontendConfig;
11 use cranelift_entity::EntityRef;
12 use cranelift_wasm::{
13 self, FuncIndex, GlobalIndex, GlobalVariable, MemoryIndex, SignatureIndex, TableIndex,
14 TargetEnvironment, WasmError, WasmResult,
15 };
16 #[cfg(feature = "lightbeam")]
17 use cranelift_wasm::{DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex};
18 use std::convert::TryFrom;
19
20 /// Compute an `ir::ExternalName` for a given wasm function index.
get_func_name(func_index: FuncIndex) -> ir::ExternalName21 pub fn get_func_name(func_index: FuncIndex) -> ir::ExternalName {
22 ir::ExternalName::user(0, func_index.as_u32())
23 }
24
25 /// An index type for builtin functions.
26 #[derive(Copy, Clone, Debug)]
27 pub struct BuiltinFunctionIndex(u32);
28
29 impl BuiltinFunctionIndex {
30 /// Returns an index for wasm's `memory.grow` builtin function.
get_memory32_grow_index() -> Self31 pub const fn get_memory32_grow_index() -> Self {
32 Self(0)
33 }
34 /// Returns an index for wasm's imported `memory.grow` builtin function.
get_imported_memory32_grow_index() -> Self35 pub const fn get_imported_memory32_grow_index() -> Self {
36 Self(1)
37 }
38 /// Returns an index for wasm's `memory.size` builtin function.
get_memory32_size_index() -> Self39 pub const fn get_memory32_size_index() -> Self {
40 Self(2)
41 }
42 /// Returns an index for wasm's imported `memory.size` builtin function.
get_imported_memory32_size_index() -> Self43 pub const fn get_imported_memory32_size_index() -> Self {
44 Self(3)
45 }
46 /// Returns an index for wasm's `table.copy` when both tables are locally
47 /// defined.
get_table_copy_index() -> Self48 pub const fn get_table_copy_index() -> Self {
49 Self(4)
50 }
51 /// Returns an index for wasm's `table.init`.
get_table_init_index() -> Self52 pub const fn get_table_init_index() -> Self {
53 Self(5)
54 }
55 /// Returns an index for wasm's `elem.drop`.
get_elem_drop_index() -> Self56 pub const fn get_elem_drop_index() -> Self {
57 Self(6)
58 }
59 /// Returns an index for wasm's `memory.copy` for locally defined memories.
get_defined_memory_copy_index() -> Self60 pub const fn get_defined_memory_copy_index() -> Self {
61 Self(7)
62 }
63 /// Returns an index for wasm's `memory.copy` for imported memories.
get_imported_memory_copy_index() -> Self64 pub const fn get_imported_memory_copy_index() -> Self {
65 Self(8)
66 }
67 /// Returns an index for wasm's `memory.fill` for locally defined memories.
get_memory_fill_index() -> Self68 pub const fn get_memory_fill_index() -> Self {
69 Self(9)
70 }
71 /// Returns an index for wasm's `memory.fill` for imported memories.
get_imported_memory_fill_index() -> Self72 pub const fn get_imported_memory_fill_index() -> Self {
73 Self(10)
74 }
75 /// Returns an index for wasm's `memory.init` instruction.
get_memory_init_index() -> Self76 pub const fn get_memory_init_index() -> Self {
77 Self(11)
78 }
79 /// Returns an index for wasm's `data.drop` instruction.
get_data_drop_index() -> Self80 pub const fn get_data_drop_index() -> Self {
81 Self(12)
82 }
83 /// Returns the total number of builtin functions.
builtin_functions_total_number() -> u3284 pub const fn builtin_functions_total_number() -> u32 {
85 13
86 }
87
88 /// Create a new `BuiltinFunctionIndex` from its index
from_u32(i: u32) -> Self89 pub const fn from_u32(i: u32) -> Self {
90 Self(i)
91 }
92
93 /// Return the index as an u32 number.
index(&self) -> u3294 pub const fn index(&self) -> u32 {
95 self.0
96 }
97 }
98
99 /// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`.
100 pub struct FuncEnvironment<'module_environment> {
101 /// Target-specified configuration.
102 target_config: TargetFrontendConfig,
103
104 /// The module-level environment which this function-level environment belongs to.
105 module: &'module_environment ModuleLocal,
106
107 /// The Cranelift global holding the vmctx address.
108 vmctx: Option<ir::GlobalValue>,
109
110 /// The external function signature for implementing wasm's `memory.size`
111 /// for locally-defined 32-bit memories.
112 memory32_size_sig: Option<ir::SigRef>,
113
114 /// The external function signature for implementing wasm's `memory.grow`
115 /// for locally-defined memories.
116 memory_grow_sig: Option<ir::SigRef>,
117
118 /// The external function signature for implementing wasm's `table.copy`
119 /// (it's the same for both local and imported tables).
120 table_copy_sig: Option<ir::SigRef>,
121
122 /// The external function signature for implementing wasm's `table.init`.
123 table_init_sig: Option<ir::SigRef>,
124
125 /// The external function signature for implementing wasm's `elem.drop`.
126 elem_drop_sig: Option<ir::SigRef>,
127
128 /// The external function signature for implementing wasm's `memory.copy`
129 /// (it's the same for both local and imported memories).
130 memory_copy_sig: Option<ir::SigRef>,
131
132 /// The external function signature for implementing wasm's `memory.fill`
133 /// (it's the same for both local and imported memories).
134 memory_fill_sig: Option<ir::SigRef>,
135
136 /// The external function signature for implementing wasm's `memory.init`.
137 memory_init_sig: Option<ir::SigRef>,
138
139 /// The external function signature for implementing wasm's `data.drop`.
140 data_drop_sig: Option<ir::SigRef>,
141
142 /// Offsets to struct fields accessed by JIT code.
143 pub(crate) offsets: VMOffsets,
144
145 tunables: &'module_environment Tunables,
146 }
147
148 impl<'module_environment> FuncEnvironment<'module_environment> {
new( target_config: TargetFrontendConfig, module: &'module_environment ModuleLocal, tunables: &'module_environment Tunables, ) -> Self149 pub fn new(
150 target_config: TargetFrontendConfig,
151 module: &'module_environment ModuleLocal,
152 tunables: &'module_environment Tunables,
153 ) -> Self {
154 Self {
155 target_config,
156 module,
157 vmctx: None,
158 memory32_size_sig: None,
159 memory_grow_sig: None,
160 table_copy_sig: None,
161 table_init_sig: None,
162 elem_drop_sig: None,
163 memory_copy_sig: None,
164 memory_fill_sig: None,
165 memory_init_sig: None,
166 data_drop_sig: None,
167 offsets: VMOffsets::new(target_config.pointer_bytes(), module),
168 tunables,
169 }
170 }
171
pointer_type(&self) -> ir::Type172 fn pointer_type(&self) -> ir::Type {
173 self.target_config.pointer_type()
174 }
175
vmctx(&mut self, func: &mut Function) -> ir::GlobalValue176 fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
177 self.vmctx.unwrap_or_else(|| {
178 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
179 self.vmctx = Some(vmctx);
180 vmctx
181 })
182 }
183
get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef184 fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
185 let sig = self.memory_grow_sig.unwrap_or_else(|| {
186 func.import_signature(Signature {
187 params: vec![
188 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
189 AbiParam::new(I32),
190 AbiParam::new(I32),
191 ],
192 returns: vec![AbiParam::new(I32)],
193 call_conv: self.target_config.default_call_conv,
194 })
195 });
196 self.memory_grow_sig = Some(sig);
197 sig
198 }
199
200 /// Return the memory.grow function signature to call for the given index, along with the
201 /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
get_memory_grow_func( &mut self, func: &mut Function, index: MemoryIndex, ) -> (ir::SigRef, usize, BuiltinFunctionIndex)202 fn get_memory_grow_func(
203 &mut self,
204 func: &mut Function,
205 index: MemoryIndex,
206 ) -> (ir::SigRef, usize, BuiltinFunctionIndex) {
207 if self.module.is_imported_memory(index) {
208 (
209 self.get_memory_grow_sig(func),
210 index.index(),
211 BuiltinFunctionIndex::get_imported_memory32_grow_index(),
212 )
213 } else {
214 (
215 self.get_memory_grow_sig(func),
216 self.module.defined_memory_index(index).unwrap().index(),
217 BuiltinFunctionIndex::get_memory32_grow_index(),
218 )
219 }
220 }
221
get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef222 fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
223 let sig = self.memory32_size_sig.unwrap_or_else(|| {
224 func.import_signature(Signature {
225 params: vec![
226 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
227 AbiParam::new(I32),
228 ],
229 returns: vec![AbiParam::new(I32)],
230 call_conv: self.target_config.default_call_conv,
231 })
232 });
233 self.memory32_size_sig = Some(sig);
234 sig
235 }
236
237 /// Return the memory.size function signature to call for the given index, along with the
238 /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
get_memory_size_func( &mut self, func: &mut Function, index: MemoryIndex, ) -> (ir::SigRef, usize, BuiltinFunctionIndex)239 fn get_memory_size_func(
240 &mut self,
241 func: &mut Function,
242 index: MemoryIndex,
243 ) -> (ir::SigRef, usize, BuiltinFunctionIndex) {
244 if self.module.is_imported_memory(index) {
245 (
246 self.get_memory32_size_sig(func),
247 index.index(),
248 BuiltinFunctionIndex::get_imported_memory32_size_index(),
249 )
250 } else {
251 (
252 self.get_memory32_size_sig(func),
253 self.module.defined_memory_index(index).unwrap().index(),
254 BuiltinFunctionIndex::get_memory32_size_index(),
255 )
256 }
257 }
258
get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef259 fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
260 let sig = self.table_copy_sig.unwrap_or_else(|| {
261 func.import_signature(Signature {
262 params: vec![
263 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
264 // Destination table index.
265 AbiParam::new(I32),
266 // Source table index.
267 AbiParam::new(I32),
268 // Index within destination table.
269 AbiParam::new(I32),
270 // Index within source table.
271 AbiParam::new(I32),
272 // Number of elements to copy.
273 AbiParam::new(I32),
274 ],
275 returns: vec![],
276 call_conv: self.target_config.default_call_conv,
277 })
278 });
279 self.table_copy_sig = Some(sig);
280 sig
281 }
282
get_table_copy_func( &mut self, func: &mut Function, dst_table_index: TableIndex, src_table_index: TableIndex, ) -> (ir::SigRef, usize, usize, BuiltinFunctionIndex)283 fn get_table_copy_func(
284 &mut self,
285 func: &mut Function,
286 dst_table_index: TableIndex,
287 src_table_index: TableIndex,
288 ) -> (ir::SigRef, usize, usize, BuiltinFunctionIndex) {
289 let sig = self.get_table_copy_sig(func);
290 (
291 sig,
292 dst_table_index.as_u32() as usize,
293 src_table_index.as_u32() as usize,
294 BuiltinFunctionIndex::get_table_copy_index(),
295 )
296 }
297
get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef298 fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
299 let sig = self.table_init_sig.unwrap_or_else(|| {
300 func.import_signature(Signature {
301 params: vec![
302 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
303 // Table index.
304 AbiParam::new(I32),
305 // Segment index.
306 AbiParam::new(I32),
307 // Destination index within table.
308 AbiParam::new(I32),
309 // Source index within segment.
310 AbiParam::new(I32),
311 // Number of elements to initialize.
312 AbiParam::new(I32),
313 ],
314 returns: vec![],
315 call_conv: self.target_config.default_call_conv,
316 })
317 });
318 self.table_init_sig = Some(sig);
319 sig
320 }
321
get_table_init_func( &mut self, func: &mut Function, table_index: TableIndex, ) -> (ir::SigRef, usize, BuiltinFunctionIndex)322 fn get_table_init_func(
323 &mut self,
324 func: &mut Function,
325 table_index: TableIndex,
326 ) -> (ir::SigRef, usize, BuiltinFunctionIndex) {
327 let sig = self.get_table_init_sig(func);
328 let table_index = table_index.as_u32() as usize;
329 (
330 sig,
331 table_index,
332 BuiltinFunctionIndex::get_table_init_index(),
333 )
334 }
335
get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef336 fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
337 let sig = self.elem_drop_sig.unwrap_or_else(|| {
338 func.import_signature(Signature {
339 params: vec![
340 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
341 // Element index.
342 AbiParam::new(I32),
343 ],
344 returns: vec![],
345 call_conv: self.target_config.default_call_conv,
346 })
347 });
348 self.elem_drop_sig = Some(sig);
349 sig
350 }
351
get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex)352 fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex) {
353 let sig = self.get_elem_drop_sig(func);
354 (sig, BuiltinFunctionIndex::get_elem_drop_index())
355 }
356
get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef357 fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
358 let sig = self.memory_copy_sig.unwrap_or_else(|| {
359 func.import_signature(Signature {
360 params: vec![
361 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
362 // Memory index.
363 AbiParam::new(I32),
364 // Destination address.
365 AbiParam::new(I32),
366 // Source address.
367 AbiParam::new(I32),
368 // Length.
369 AbiParam::new(I32),
370 ],
371 returns: vec![],
372 call_conv: self.target_config.default_call_conv,
373 })
374 });
375 self.memory_copy_sig = Some(sig);
376 sig
377 }
378
get_memory_copy_func( &mut self, func: &mut Function, memory_index: MemoryIndex, ) -> (ir::SigRef, usize, BuiltinFunctionIndex)379 fn get_memory_copy_func(
380 &mut self,
381 func: &mut Function,
382 memory_index: MemoryIndex,
383 ) -> (ir::SigRef, usize, BuiltinFunctionIndex) {
384 let sig = self.get_memory_copy_sig(func);
385 if let Some(defined_memory_index) = self.module.defined_memory_index(memory_index) {
386 (
387 sig,
388 defined_memory_index.index(),
389 BuiltinFunctionIndex::get_defined_memory_copy_index(),
390 )
391 } else {
392 (
393 sig,
394 memory_index.index(),
395 BuiltinFunctionIndex::get_imported_memory_copy_index(),
396 )
397 }
398 }
399
get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef400 fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
401 let sig = self.memory_fill_sig.unwrap_or_else(|| {
402 func.import_signature(Signature {
403 params: vec![
404 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
405 // Memory index.
406 AbiParam::new(I32),
407 // Destination address.
408 AbiParam::new(I32),
409 // Value.
410 AbiParam::new(I32),
411 // Length.
412 AbiParam::new(I32),
413 ],
414 returns: vec![],
415 call_conv: self.target_config.default_call_conv,
416 })
417 });
418 self.memory_fill_sig = Some(sig);
419 sig
420 }
421
get_memory_fill_func( &mut self, func: &mut Function, memory_index: MemoryIndex, ) -> (ir::SigRef, usize, BuiltinFunctionIndex)422 fn get_memory_fill_func(
423 &mut self,
424 func: &mut Function,
425 memory_index: MemoryIndex,
426 ) -> (ir::SigRef, usize, BuiltinFunctionIndex) {
427 let sig = self.get_memory_fill_sig(func);
428 if let Some(defined_memory_index) = self.module.defined_memory_index(memory_index) {
429 (
430 sig,
431 defined_memory_index.index(),
432 BuiltinFunctionIndex::get_memory_fill_index(),
433 )
434 } else {
435 (
436 sig,
437 memory_index.index(),
438 BuiltinFunctionIndex::get_imported_memory_fill_index(),
439 )
440 }
441 }
442
get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef443 fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
444 let sig = self.memory_init_sig.unwrap_or_else(|| {
445 func.import_signature(Signature {
446 params: vec![
447 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
448 // Memory index.
449 AbiParam::new(I32),
450 // Data index.
451 AbiParam::new(I32),
452 // Destination address.
453 AbiParam::new(I32),
454 // Source index within the data segment.
455 AbiParam::new(I32),
456 // Length.
457 AbiParam::new(I32),
458 ],
459 returns: vec![],
460 call_conv: self.target_config.default_call_conv,
461 })
462 });
463 self.memory_init_sig = Some(sig);
464 sig
465 }
466
get_memory_init_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex)467 fn get_memory_init_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex) {
468 let sig = self.get_memory_init_sig(func);
469 (sig, BuiltinFunctionIndex::get_memory_init_index())
470 }
471
get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef472 fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
473 let sig = self.data_drop_sig.unwrap_or_else(|| {
474 func.import_signature(Signature {
475 params: vec![
476 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
477 // Data index.
478 AbiParam::new(I32),
479 ],
480 returns: vec![],
481 call_conv: self.target_config.default_call_conv,
482 })
483 });
484 self.data_drop_sig = Some(sig);
485 sig
486 }
487
get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex)488 fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, BuiltinFunctionIndex) {
489 let sig = self.get_data_drop_sig(func);
490 (sig, BuiltinFunctionIndex::get_data_drop_index())
491 }
492
493 /// Translates load of builtin function and returns a pair of values `vmctx`
494 /// and address of the loaded function.
translate_load_builtin_function_address( &mut self, pos: &mut FuncCursor<'_>, callee_func_idx: BuiltinFunctionIndex, ) -> (ir::Value, ir::Value)495 fn translate_load_builtin_function_address(
496 &mut self,
497 pos: &mut FuncCursor<'_>,
498 callee_func_idx: BuiltinFunctionIndex,
499 ) -> (ir::Value, ir::Value) {
500 // We use an indirect call so that we don't have to patch the code at runtime.
501 let pointer_type = self.pointer_type();
502 let vmctx = self.vmctx(&mut pos.func);
503 let base = pos.ins().global_value(pointer_type, vmctx);
504
505 let mut mem_flags = ir::MemFlags::trusted();
506 mem_flags.set_readonly();
507
508 // Load the callee address.
509 let body_offset =
510 i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
511 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
512
513 (base, func_addr)
514 }
515 }
516
517 // TODO: This is necessary as if Lightbeam used `FuncEnvironment` directly it would cause
518 // a circular dependency graph. We should extract common types out into a separate
519 // crate that Lightbeam can use but until then we need this trait.
520 #[cfg(feature = "lightbeam")]
521 impl lightbeam::ModuleContext for FuncEnvironment<'_> {
522 type Signature = ir::Signature;
523 type GlobalType = ir::Type;
524
func_index(&self, defined_func_index: u32) -> u32525 fn func_index(&self, defined_func_index: u32) -> u32 {
526 self.module
527 .func_index(DefinedFuncIndex::from_u32(defined_func_index))
528 .as_u32()
529 }
530
defined_func_index(&self, func_index: u32) -> Option<u32>531 fn defined_func_index(&self, func_index: u32) -> Option<u32> {
532 self.module
533 .defined_func_index(FuncIndex::from_u32(func_index))
534 .map(DefinedFuncIndex::as_u32)
535 }
536
defined_global_index(&self, global_index: u32) -> Option<u32>537 fn defined_global_index(&self, global_index: u32) -> Option<u32> {
538 self.module
539 .defined_global_index(GlobalIndex::from_u32(global_index))
540 .map(DefinedGlobalIndex::as_u32)
541 }
542
global_type(&self, global_index: u32) -> &Self::GlobalType543 fn global_type(&self, global_index: u32) -> &Self::GlobalType {
544 &self.module.globals[GlobalIndex::from_u32(global_index)].ty
545 }
546
func_type_index(&self, func_idx: u32) -> u32547 fn func_type_index(&self, func_idx: u32) -> u32 {
548 self.module.functions[FuncIndex::from_u32(func_idx)].as_u32()
549 }
550
signature(&self, index: u32) -> &Self::Signature551 fn signature(&self, index: u32) -> &Self::Signature {
552 &self.module.signatures[SignatureIndex::from_u32(index)].1
553 }
554
defined_table_index(&self, table_index: u32) -> Option<u32>555 fn defined_table_index(&self, table_index: u32) -> Option<u32> {
556 self.module
557 .defined_table_index(TableIndex::from_u32(table_index))
558 .map(DefinedTableIndex::as_u32)
559 }
560
defined_memory_index(&self, memory_index: u32) -> Option<u32>561 fn defined_memory_index(&self, memory_index: u32) -> Option<u32> {
562 self.module
563 .defined_memory_index(MemoryIndex::from_u32(memory_index))
564 .map(DefinedMemoryIndex::as_u32)
565 }
566
vmctx_builtin_function(&self, func_index: u32) -> u32567 fn vmctx_builtin_function(&self, func_index: u32) -> u32 {
568 self.offsets
569 .vmctx_builtin_function(BuiltinFunctionIndex::from_u32(func_index))
570 }
571
vmctx_vmfunction_import_body(&self, func_index: u32) -> u32572 fn vmctx_vmfunction_import_body(&self, func_index: u32) -> u32 {
573 self.offsets
574 .vmctx_vmfunction_import_body(FuncIndex::from_u32(func_index))
575 }
vmctx_vmfunction_import_vmctx(&self, func_index: u32) -> u32576 fn vmctx_vmfunction_import_vmctx(&self, func_index: u32) -> u32 {
577 self.offsets
578 .vmctx_vmfunction_import_vmctx(FuncIndex::from_u32(func_index))
579 }
580
vmctx_vmglobal_import_from(&self, global_index: u32) -> u32581 fn vmctx_vmglobal_import_from(&self, global_index: u32) -> u32 {
582 self.offsets
583 .vmctx_vmglobal_import_from(GlobalIndex::from_u32(global_index))
584 }
vmctx_vmglobal_definition(&self, defined_global_index: u32) -> u32585 fn vmctx_vmglobal_definition(&self, defined_global_index: u32) -> u32 {
586 self.offsets
587 .vmctx_vmglobal_definition(DefinedGlobalIndex::from_u32(defined_global_index))
588 }
vmctx_vmmemory_import_from(&self, memory_index: u32) -> u32589 fn vmctx_vmmemory_import_from(&self, memory_index: u32) -> u32 {
590 self.offsets
591 .vmctx_vmmemory_import_from(MemoryIndex::from_u32(memory_index))
592 }
vmctx_vmmemory_definition(&self, defined_memory_index: u32) -> u32593 fn vmctx_vmmemory_definition(&self, defined_memory_index: u32) -> u32 {
594 self.offsets
595 .vmctx_vmmemory_definition(DefinedMemoryIndex::from_u32(defined_memory_index))
596 }
vmctx_vmmemory_definition_base(&self, defined_memory_index: u32) -> u32597 fn vmctx_vmmemory_definition_base(&self, defined_memory_index: u32) -> u32 {
598 self.offsets
599 .vmctx_vmmemory_definition_base(DefinedMemoryIndex::from_u32(defined_memory_index))
600 }
vmctx_vmmemory_definition_current_length(&self, defined_memory_index: u32) -> u32601 fn vmctx_vmmemory_definition_current_length(&self, defined_memory_index: u32) -> u32 {
602 self.offsets
603 .vmctx_vmmemory_definition_current_length(DefinedMemoryIndex::from_u32(
604 defined_memory_index,
605 ))
606 }
vmmemory_definition_base(&self) -> u8607 fn vmmemory_definition_base(&self) -> u8 {
608 self.offsets.vmmemory_definition_base()
609 }
vmmemory_definition_current_length(&self) -> u8610 fn vmmemory_definition_current_length(&self) -> u8 {
611 self.offsets.vmmemory_definition_current_length()
612 }
vmctx_vmtable_import_from(&self, table_index: u32) -> u32613 fn vmctx_vmtable_import_from(&self, table_index: u32) -> u32 {
614 self.offsets
615 .vmctx_vmtable_import_from(TableIndex::from_u32(table_index))
616 }
vmctx_vmtable_definition(&self, defined_table_index: u32) -> u32617 fn vmctx_vmtable_definition(&self, defined_table_index: u32) -> u32 {
618 self.offsets
619 .vmctx_vmtable_definition(DefinedTableIndex::from_u32(defined_table_index))
620 }
vmctx_vmtable_definition_base(&self, defined_table_index: u32) -> u32621 fn vmctx_vmtable_definition_base(&self, defined_table_index: u32) -> u32 {
622 self.offsets
623 .vmctx_vmtable_definition_base(DefinedTableIndex::from_u32(defined_table_index))
624 }
vmctx_vmtable_definition_current_elements(&self, defined_table_index: u32) -> u32625 fn vmctx_vmtable_definition_current_elements(&self, defined_table_index: u32) -> u32 {
626 self.offsets
627 .vmctx_vmtable_definition_current_elements(DefinedTableIndex::from_u32(
628 defined_table_index,
629 ))
630 }
vmtable_definition_base(&self) -> u8631 fn vmtable_definition_base(&self) -> u8 {
632 self.offsets.vmtable_definition_base()
633 }
vmtable_definition_current_elements(&self) -> u8634 fn vmtable_definition_current_elements(&self) -> u8 {
635 self.offsets.vmtable_definition_current_elements()
636 }
vmcaller_checked_anyfunc_type_index(&self) -> u8637 fn vmcaller_checked_anyfunc_type_index(&self) -> u8 {
638 self.offsets.vmcaller_checked_anyfunc_type_index()
639 }
vmcaller_checked_anyfunc_func_ptr(&self) -> u8640 fn vmcaller_checked_anyfunc_func_ptr(&self) -> u8 {
641 self.offsets.vmcaller_checked_anyfunc_func_ptr()
642 }
vmcaller_checked_anyfunc_vmctx(&self) -> u8643 fn vmcaller_checked_anyfunc_vmctx(&self) -> u8 {
644 self.offsets.vmcaller_checked_anyfunc_vmctx()
645 }
size_of_vmcaller_checked_anyfunc(&self) -> u8646 fn size_of_vmcaller_checked_anyfunc(&self) -> u8 {
647 self.offsets.size_of_vmcaller_checked_anyfunc()
648 }
vmctx_vmshared_signature_id(&self, signature_idx: u32) -> u32649 fn vmctx_vmshared_signature_id(&self, signature_idx: u32) -> u32 {
650 self.offsets
651 .vmctx_vmshared_signature_id(SignatureIndex::from_u32(signature_idx))
652 }
653
654 // TODO: type of a global
655 }
656
657 impl<'module_environment> TargetEnvironment for FuncEnvironment<'module_environment> {
target_config(&self) -> TargetFrontendConfig658 fn target_config(&self) -> TargetFrontendConfig {
659 self.target_config
660 }
661
reference_type(&self) -> ir::Type662 fn reference_type(&self) -> ir::Type {
663 // For now, the only reference types we support are `externref`, which
664 // don't require tracing GC and stack maps. So we just use the target's
665 // pointer type. This will have to change once we move to tracing GC.
666 self.pointer_type()
667 }
668 }
669
670 impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'module_environment> {
is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool671 fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
672 // The first two parameters are the vmctx and caller vmctx. The rest are
673 // the wasm parameters.
674 index >= 2
675 }
676
make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> WasmResult<ir::Table>677 fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> WasmResult<ir::Table> {
678 let pointer_type = self.pointer_type();
679
680 let (ptr, base_offset, current_elements_offset) = {
681 let vmctx = self.vmctx(func);
682 if let Some(def_index) = self.module.defined_table_index(index) {
683 let base_offset =
684 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
685 let current_elements_offset = i32::try_from(
686 self.offsets
687 .vmctx_vmtable_definition_current_elements(def_index),
688 )
689 .unwrap();
690 (vmctx, base_offset, current_elements_offset)
691 } else {
692 let from_offset = self.offsets.vmctx_vmtable_import_from(index);
693 let table = func.create_global_value(ir::GlobalValueData::Load {
694 base: vmctx,
695 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
696 global_type: pointer_type,
697 readonly: true,
698 });
699 let base_offset = i32::from(self.offsets.vmtable_definition_base());
700 let current_elements_offset =
701 i32::from(self.offsets.vmtable_definition_current_elements());
702 (table, base_offset, current_elements_offset)
703 }
704 };
705
706 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
707 base: ptr,
708 offset: Offset32::new(base_offset),
709 global_type: pointer_type,
710 readonly: false,
711 });
712 let bound_gv = func.create_global_value(ir::GlobalValueData::Load {
713 base: ptr,
714 offset: Offset32::new(current_elements_offset),
715 global_type: self.offsets.type_of_vmtable_definition_current_elements(),
716 readonly: false,
717 });
718
719 let element_size = match self.module.table_plans[index].style {
720 TableStyle::CallerChecksSignature => {
721 u64::from(self.offsets.size_of_vmcaller_checked_anyfunc())
722 }
723 };
724
725 Ok(func.create_table(ir::TableData {
726 base_gv,
727 min_size: Uimm64::new(0),
728 bound_gv,
729 element_size: Uimm64::new(element_size),
730 index_type: I32,
731 }))
732 }
733
translate_table_grow( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: TableIndex, _: ir::Value, _: ir::Value, ) -> WasmResult<ir::Value>734 fn translate_table_grow(
735 &mut self,
736 _: cranelift_codegen::cursor::FuncCursor<'_>,
737 _: TableIndex,
738 _: ir::Value,
739 _: ir::Value,
740 ) -> WasmResult<ir::Value> {
741 Err(WasmError::Unsupported(
742 "the `table.grow` instruction is not supported yet".into(),
743 ))
744 }
745
translate_table_get( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: TableIndex, _: ir::Value, ) -> WasmResult<ir::Value>746 fn translate_table_get(
747 &mut self,
748 _: cranelift_codegen::cursor::FuncCursor<'_>,
749 _: TableIndex,
750 _: ir::Value,
751 ) -> WasmResult<ir::Value> {
752 Err(WasmError::Unsupported(
753 "the `table.get` instruction is not supported yet".into(),
754 ))
755 }
756
translate_table_set( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: TableIndex, _: ir::Value, _: ir::Value, ) -> WasmResult<()>757 fn translate_table_set(
758 &mut self,
759 _: cranelift_codegen::cursor::FuncCursor<'_>,
760 _: TableIndex,
761 _: ir::Value,
762 _: ir::Value,
763 ) -> WasmResult<()> {
764 Err(WasmError::Unsupported(
765 "the `table.set` instruction is not supported yet".into(),
766 ))
767 }
768
translate_table_fill( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: TableIndex, _: ir::Value, _: ir::Value, _: ir::Value, ) -> WasmResult<()>769 fn translate_table_fill(
770 &mut self,
771 _: cranelift_codegen::cursor::FuncCursor<'_>,
772 _: TableIndex,
773 _: ir::Value,
774 _: ir::Value,
775 _: ir::Value,
776 ) -> WasmResult<()> {
777 Err(WasmError::Unsupported(
778 "the `table.fill` instruction is not supported yet".into(),
779 ))
780 }
781
translate_ref_func( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: u32, ) -> WasmResult<ir::Value>782 fn translate_ref_func(
783 &mut self,
784 _: cranelift_codegen::cursor::FuncCursor<'_>,
785 _: u32,
786 ) -> WasmResult<ir::Value> {
787 Err(WasmError::Unsupported(
788 "the `ref.func` instruction is not supported yet".into(),
789 ))
790 }
791
translate_custom_global_get( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: cranelift_wasm::GlobalIndex, ) -> WasmResult<ir::Value>792 fn translate_custom_global_get(
793 &mut self,
794 _: cranelift_codegen::cursor::FuncCursor<'_>,
795 _: cranelift_wasm::GlobalIndex,
796 ) -> WasmResult<ir::Value> {
797 unreachable!("we don't make any custom globals")
798 }
799
translate_custom_global_set( &mut self, _: cranelift_codegen::cursor::FuncCursor<'_>, _: cranelift_wasm::GlobalIndex, _: ir::Value, ) -> WasmResult<()>800 fn translate_custom_global_set(
801 &mut self,
802 _: cranelift_codegen::cursor::FuncCursor<'_>,
803 _: cranelift_wasm::GlobalIndex,
804 _: ir::Value,
805 ) -> WasmResult<()> {
806 unreachable!("we don't make any custom globals")
807 }
808
make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<ir::Heap>809 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<ir::Heap> {
810 let pointer_type = self.pointer_type();
811
812 let (ptr, base_offset, current_length_offset) = {
813 let vmctx = self.vmctx(func);
814 if let Some(def_index) = self.module.defined_memory_index(index) {
815 let base_offset =
816 i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
817 let current_length_offset = i32::try_from(
818 self.offsets
819 .vmctx_vmmemory_definition_current_length(def_index),
820 )
821 .unwrap();
822 (vmctx, base_offset, current_length_offset)
823 } else {
824 let from_offset = self.offsets.vmctx_vmmemory_import_from(index);
825 let memory = func.create_global_value(ir::GlobalValueData::Load {
826 base: vmctx,
827 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
828 global_type: pointer_type,
829 readonly: true,
830 });
831 let base_offset = i32::from(self.offsets.vmmemory_definition_base());
832 let current_length_offset =
833 i32::from(self.offsets.vmmemory_definition_current_length());
834 (memory, base_offset, current_length_offset)
835 }
836 };
837
838 // If we have a declared maximum, we can make this a "static" heap, which is
839 // allocated up front and never moved.
840 let (offset_guard_size, heap_style, readonly_base) = match self.module.memory_plans[index] {
841 MemoryPlan {
842 style: MemoryStyle::Dynamic,
843 offset_guard_size,
844 memory: _,
845 } => {
846 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
847 base: ptr,
848 offset: Offset32::new(current_length_offset),
849 global_type: self.offsets.type_of_vmmemory_definition_current_length(),
850 readonly: false,
851 });
852 (
853 Uimm64::new(offset_guard_size),
854 ir::HeapStyle::Dynamic {
855 bound_gv: heap_bound,
856 },
857 false,
858 )
859 }
860 MemoryPlan {
861 style: MemoryStyle::Static { bound },
862 offset_guard_size,
863 memory: _,
864 } => (
865 Uimm64::new(offset_guard_size),
866 ir::HeapStyle::Static {
867 bound: Uimm64::new(u64::from(bound) * u64::from(WASM_PAGE_SIZE)),
868 },
869 true,
870 ),
871 };
872
873 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
874 base: ptr,
875 offset: Offset32::new(base_offset),
876 global_type: pointer_type,
877 readonly: readonly_base,
878 });
879 Ok(func.create_heap(ir::HeapData {
880 base: heap_base,
881 min_size: 0.into(),
882 offset_guard_size,
883 style: heap_style,
884 index_type: I32,
885 }))
886 }
887
make_global( &mut self, func: &mut ir::Function, index: GlobalIndex, ) -> WasmResult<GlobalVariable>888 fn make_global(
889 &mut self,
890 func: &mut ir::Function,
891 index: GlobalIndex,
892 ) -> WasmResult<GlobalVariable> {
893 let pointer_type = self.pointer_type();
894
895 let (ptr, offset) = {
896 let vmctx = self.vmctx(func);
897 if let Some(def_index) = self.module.defined_global_index(index) {
898 let offset =
899 i32::try_from(self.offsets.vmctx_vmglobal_definition(def_index)).unwrap();
900 (vmctx, offset)
901 } else {
902 let from_offset = self.offsets.vmctx_vmglobal_import_from(index);
903 let global = func.create_global_value(ir::GlobalValueData::Load {
904 base: vmctx,
905 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
906 global_type: pointer_type,
907 readonly: true,
908 });
909 (global, 0)
910 }
911 };
912
913 Ok(GlobalVariable::Memory {
914 gv: ptr,
915 offset: offset.into(),
916 ty: self.module.globals[index].ty,
917 })
918 }
919
make_indirect_sig( &mut self, func: &mut ir::Function, index: SignatureIndex, ) -> WasmResult<ir::SigRef>920 fn make_indirect_sig(
921 &mut self,
922 func: &mut ir::Function,
923 index: SignatureIndex,
924 ) -> WasmResult<ir::SigRef> {
925 Ok(func.import_signature(self.module.signatures[index].1.clone()))
926 }
927
make_direct_func( &mut self, func: &mut ir::Function, index: FuncIndex, ) -> WasmResult<ir::FuncRef>928 fn make_direct_func(
929 &mut self,
930 func: &mut ir::Function,
931 index: FuncIndex,
932 ) -> WasmResult<ir::FuncRef> {
933 let sig = self.module.native_func_signature(index);
934 let signature = func.import_signature(sig.clone());
935 let name = get_func_name(index);
936 Ok(func.import_function(ir::ExtFuncData {
937 name,
938 signature,
939 // We currently allocate all code segments independently, so nothing
940 // is colocated.
941 colocated: false,
942 }))
943 }
944
translate_call_indirect( &mut self, mut pos: FuncCursor<'_>, table_index: TableIndex, table: ir::Table, sig_index: SignatureIndex, sig_ref: ir::SigRef, callee: ir::Value, call_args: &[ir::Value], ) -> WasmResult<ir::Inst>945 fn translate_call_indirect(
946 &mut self,
947 mut pos: FuncCursor<'_>,
948 table_index: TableIndex,
949 table: ir::Table,
950 sig_index: SignatureIndex,
951 sig_ref: ir::SigRef,
952 callee: ir::Value,
953 call_args: &[ir::Value],
954 ) -> WasmResult<ir::Inst> {
955 let pointer_type = self.pointer_type();
956
957 let table_entry_addr = pos.ins().table_addr(pointer_type, table, callee, 0);
958
959 // Dereference table_entry_addr to get the function address.
960 let mem_flags = ir::MemFlags::trusted();
961 let func_addr = pos.ins().load(
962 pointer_type,
963 mem_flags,
964 table_entry_addr,
965 i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
966 );
967
968 // Check whether `func_addr` is null.
969 pos.ins().trapz(func_addr, ir::TrapCode::IndirectCallToNull);
970
971 // If necessary, check the signature.
972 match self.module.table_plans[table_index].style {
973 TableStyle::CallerChecksSignature => {
974 let sig_id_size = self.offsets.size_of_vmshared_signature_index();
975 let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap();
976 let vmctx = self.vmctx(pos.func);
977 let base = pos.ins().global_value(pointer_type, vmctx);
978 let offset =
979 i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
980
981 // Load the caller ID.
982 let mut mem_flags = ir::MemFlags::trusted();
983 mem_flags.set_readonly();
984 let caller_sig_id = pos.ins().load(sig_id_type, mem_flags, base, offset);
985
986 // Load the callee ID.
987 let mem_flags = ir::MemFlags::trusted();
988 let callee_sig_id = pos.ins().load(
989 sig_id_type,
990 mem_flags,
991 table_entry_addr,
992 i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
993 );
994
995 // Check that they match.
996 let cmp = pos.ins().icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
997 pos.ins().trapz(cmp, ir::TrapCode::BadSignature);
998 }
999 }
1000
1001 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1002 let caller_vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap();
1003
1004 // First append the callee vmctx address.
1005 let vmctx = pos.ins().load(
1006 pointer_type,
1007 mem_flags,
1008 table_entry_addr,
1009 i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1010 );
1011 real_call_args.push(vmctx);
1012 real_call_args.push(caller_vmctx);
1013
1014 // Then append the regular call arguments.
1015 real_call_args.extend_from_slice(call_args);
1016
1017 Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args))
1018 }
1019
translate_call( &mut self, mut pos: FuncCursor<'_>, callee_index: FuncIndex, callee: ir::FuncRef, call_args: &[ir::Value], ) -> WasmResult<ir::Inst>1020 fn translate_call(
1021 &mut self,
1022 mut pos: FuncCursor<'_>,
1023 callee_index: FuncIndex,
1024 callee: ir::FuncRef,
1025 call_args: &[ir::Value],
1026 ) -> WasmResult<ir::Inst> {
1027 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1028 let caller_vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap();
1029
1030 // Handle direct calls to locally-defined functions.
1031 if !self.module.is_imported_function(callee_index) {
1032 // First append the callee vmctx address, which is the same as the caller vmctx in
1033 // this case.
1034 real_call_args.push(caller_vmctx);
1035
1036 // Then append the caller vmctx address.
1037 real_call_args.push(caller_vmctx);
1038
1039 // Then append the regular call arguments.
1040 real_call_args.extend_from_slice(call_args);
1041
1042 return Ok(pos.ins().call(callee, &real_call_args));
1043 }
1044
1045 // Handle direct calls to imported functions. We use an indirect call
1046 // so that we don't have to patch the code at runtime.
1047 let pointer_type = self.pointer_type();
1048 let sig_ref = pos.func.dfg.ext_funcs[callee].signature;
1049 let vmctx = self.vmctx(&mut pos.func);
1050 let base = pos.ins().global_value(pointer_type, vmctx);
1051
1052 let mem_flags = ir::MemFlags::trusted();
1053
1054 // Load the callee address.
1055 let body_offset =
1056 i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1057 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
1058
1059 // First append the callee vmctx address.
1060 let vmctx_offset =
1061 i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1062 let vmctx = pos.ins().load(pointer_type, mem_flags, base, vmctx_offset);
1063 real_call_args.push(vmctx);
1064 real_call_args.push(caller_vmctx);
1065
1066 // Then append the regular call arguments.
1067 real_call_args.extend_from_slice(call_args);
1068
1069 Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args))
1070 }
1071
translate_memory_grow( &mut self, mut pos: FuncCursor<'_>, index: MemoryIndex, _heap: ir::Heap, val: ir::Value, ) -> WasmResult<ir::Value>1072 fn translate_memory_grow(
1073 &mut self,
1074 mut pos: FuncCursor<'_>,
1075 index: MemoryIndex,
1076 _heap: ir::Heap,
1077 val: ir::Value,
1078 ) -> WasmResult<ir::Value> {
1079 let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(&mut pos.func, index);
1080 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1081 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1082 let call_inst = pos
1083 .ins()
1084 .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1085 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1086 }
1087
translate_memory_size( &mut self, mut pos: FuncCursor<'_>, index: MemoryIndex, _heap: ir::Heap, ) -> WasmResult<ir::Value>1088 fn translate_memory_size(
1089 &mut self,
1090 mut pos: FuncCursor<'_>,
1091 index: MemoryIndex,
1092 _heap: ir::Heap,
1093 ) -> WasmResult<ir::Value> {
1094 let (func_sig, index_arg, func_idx) = self.get_memory_size_func(&mut pos.func, index);
1095 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1096 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1097 let call_inst = pos
1098 .ins()
1099 .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
1100 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1101 }
1102
translate_memory_copy( &mut self, mut pos: FuncCursor, memory_index: MemoryIndex, _heap: ir::Heap, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1103 fn translate_memory_copy(
1104 &mut self,
1105 mut pos: FuncCursor,
1106 memory_index: MemoryIndex,
1107 _heap: ir::Heap,
1108 dst: ir::Value,
1109 src: ir::Value,
1110 len: ir::Value,
1111 ) -> WasmResult<()> {
1112 let (func_sig, memory_index, func_idx) =
1113 self.get_memory_copy_func(&mut pos.func, memory_index);
1114
1115 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
1116
1117 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1118
1119 pos.ins().call_indirect(
1120 func_sig,
1121 func_addr,
1122 &[vmctx, memory_index_arg, dst, src, len],
1123 );
1124
1125 Ok(())
1126 }
1127
translate_memory_fill( &mut self, mut pos: FuncCursor, memory_index: MemoryIndex, _heap: ir::Heap, dst: ir::Value, val: ir::Value, len: ir::Value, ) -> WasmResult<()>1128 fn translate_memory_fill(
1129 &mut self,
1130 mut pos: FuncCursor,
1131 memory_index: MemoryIndex,
1132 _heap: ir::Heap,
1133 dst: ir::Value,
1134 val: ir::Value,
1135 len: ir::Value,
1136 ) -> WasmResult<()> {
1137 let (func_sig, memory_index, func_idx) =
1138 self.get_memory_fill_func(&mut pos.func, memory_index);
1139
1140 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
1141
1142 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1143
1144 pos.ins().call_indirect(
1145 func_sig,
1146 func_addr,
1147 &[vmctx, memory_index_arg, dst, val, len],
1148 );
1149
1150 Ok(())
1151 }
1152
translate_memory_init( &mut self, mut pos: FuncCursor, memory_index: MemoryIndex, _heap: ir::Heap, seg_index: u32, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1153 fn translate_memory_init(
1154 &mut self,
1155 mut pos: FuncCursor,
1156 memory_index: MemoryIndex,
1157 _heap: ir::Heap,
1158 seg_index: u32,
1159 dst: ir::Value,
1160 src: ir::Value,
1161 len: ir::Value,
1162 ) -> WasmResult<()> {
1163 let (func_sig, func_idx) = self.get_memory_init_func(&mut pos.func);
1164
1165 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
1166 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1167
1168 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1169
1170 pos.ins().call_indirect(
1171 func_sig,
1172 func_addr,
1173 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
1174 );
1175
1176 Ok(())
1177 }
1178
translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()>1179 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
1180 let (func_sig, func_idx) = self.get_data_drop_func(&mut pos.func);
1181 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1182 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1183 pos.ins()
1184 .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
1185 Ok(())
1186 }
1187
translate_table_size( &mut self, _pos: FuncCursor, _index: TableIndex, _table: ir::Table, ) -> WasmResult<ir::Value>1188 fn translate_table_size(
1189 &mut self,
1190 _pos: FuncCursor,
1191 _index: TableIndex,
1192 _table: ir::Table,
1193 ) -> WasmResult<ir::Value> {
1194 Err(WasmError::Unsupported(
1195 "bulk memory: `table.size`".to_string(),
1196 ))
1197 }
1198
translate_table_copy( &mut self, mut pos: FuncCursor, dst_table_index: TableIndex, _dst_table: ir::Table, src_table_index: TableIndex, _src_table: ir::Table, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1199 fn translate_table_copy(
1200 &mut self,
1201 mut pos: FuncCursor,
1202 dst_table_index: TableIndex,
1203 _dst_table: ir::Table,
1204 src_table_index: TableIndex,
1205 _src_table: ir::Table,
1206 dst: ir::Value,
1207 src: ir::Value,
1208 len: ir::Value,
1209 ) -> WasmResult<()> {
1210 let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
1211 self.get_table_copy_func(&mut pos.func, dst_table_index, src_table_index);
1212
1213 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
1214 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
1215
1216 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1217
1218 pos.ins().call_indirect(
1219 func_sig,
1220 func_addr,
1221 &[
1222 vmctx,
1223 dst_table_index_arg,
1224 src_table_index_arg,
1225 dst,
1226 src,
1227 len,
1228 ],
1229 );
1230
1231 Ok(())
1232 }
1233
translate_table_init( &mut self, mut pos: FuncCursor, seg_index: u32, table_index: TableIndex, _table: ir::Table, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1234 fn translate_table_init(
1235 &mut self,
1236 mut pos: FuncCursor,
1237 seg_index: u32,
1238 table_index: TableIndex,
1239 _table: ir::Table,
1240 dst: ir::Value,
1241 src: ir::Value,
1242 len: ir::Value,
1243 ) -> WasmResult<()> {
1244 let (func_sig, table_index_arg, func_idx) =
1245 self.get_table_init_func(&mut pos.func, table_index);
1246
1247 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1248 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1249
1250 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1251
1252 pos.ins().call_indirect(
1253 func_sig,
1254 func_addr,
1255 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
1256 );
1257
1258 Ok(())
1259 }
1260
translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()>1261 fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
1262 let (func_sig, func_idx) = self.get_elem_drop_func(&mut pos.func);
1263
1264 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
1265
1266 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1267
1268 pos.ins()
1269 .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
1270
1271 Ok(())
1272 }
1273
translate_loop_header(&mut self, mut pos: FuncCursor) -> WasmResult<()>1274 fn translate_loop_header(&mut self, mut pos: FuncCursor) -> WasmResult<()> {
1275 if !self.tunables.interruptable {
1276 return Ok(());
1277 }
1278
1279 // Start out each loop with a check to the interupt flag to allow
1280 // interruption of long or infinite loops.
1281 //
1282 // For more information about this see comments in
1283 // `crates/environ/src/cranelift.rs`
1284 let vmctx = self.vmctx(&mut pos.func);
1285 let pointer_type = self.pointer_type();
1286 let base = pos.ins().global_value(pointer_type, vmctx);
1287 let offset = i32::try_from(self.offsets.vmctx_interrupts()).unwrap();
1288 let interrupt_ptr = pos
1289 .ins()
1290 .load(pointer_type, ir::MemFlags::trusted(), base, offset);
1291 let interrupt = pos.ins().load(
1292 pointer_type,
1293 ir::MemFlags::trusted(),
1294 interrupt_ptr,
1295 i32::from(self.offsets.vminterrupts_stack_limit()),
1296 );
1297 // Note that the cast to `isize` happens first to allow sign-extension,
1298 // if necessary, to `i64`.
1299 let interrupted_sentinel = pos.ins().iconst(pointer_type, INTERRUPTED as isize as i64);
1300 let cmp = pos
1301 .ins()
1302 .icmp(IntCC::Equal, interrupt, interrupted_sentinel);
1303 pos.ins().trapnz(cmp, ir::TrapCode::Interrupt);
1304 Ok(())
1305 }
1306 }
1307