1 /* Copyright 2018 Mozilla Foundation
2 *
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 //! This module deals with the translation of WebAssembly binary functions to Cranelift IR.
17 //!
18 //! The code here deals with adapting the `cranelift_wasm` module to the specifics of BaldrMonkey's
19 //! internal data structures.
20
21 use std::collections::HashMap;
22 use std::rc::Rc;
23
24 use cranelift_codegen::cursor::{Cursor, FuncCursor};
25 use cranelift_codegen::entity::{EntityRef, PrimaryMap, SecondaryMap};
26 use cranelift_codegen::ir;
27 use cranelift_codegen::ir::condcodes::IntCC;
28 use cranelift_codegen::ir::immediates::Offset32;
29 use cranelift_codegen::ir::InstBuilder;
30 use cranelift_codegen::isa::{CallConv, TargetFrontendConfig, TargetIsa};
31 use cranelift_codegen::packed_option::PackedOption;
32 use cranelift_wasm::{
33 FuncEnvironment, FuncIndex, FunctionBuilder, GlobalIndex, GlobalVariable, MemoryIndex,
34 ReturnMode, TableIndex, TargetEnvironment, TypeIndex, WasmError, WasmResult,
35 };
36
37 use crate::bindings::{self, GlobalDesc, SymbolicAddress};
38 use crate::compile::{symbolic_function_name, wasm_function_name};
39 use crate::isa::{platform::USES_HEAP_REG, POINTER_SIZE};
40 use bindings::typecode_to_nonvoid_type;
41
42 #[cfg(target_pointer_width = "64")]
43 pub const POINTER_TYPE: ir::Type = ir::types::I64;
44 #[cfg(target_pointer_width = "32")]
45 pub const POINTER_TYPE: ir::Type = ir::types::I32;
46
47 #[cfg(target_pointer_width = "64")]
48 pub const REF_TYPE: ir::Type = ir::types::R64;
49 #[cfg(target_pointer_width = "32")]
50 pub const REF_TYPE: ir::Type = ir::types::R32;
51
52 /// Convert a TlsData offset into a `Offset32` for a global decl.
offset32(offset: usize) -> ir::immediates::Offset3253 fn offset32(offset: usize) -> ir::immediates::Offset32 {
54 assert!(offset <= i32::max_value() as usize);
55 (offset as i32).into()
56 }
57
58 /// Convert a usize offset into a `Imm64` for an iadd_imm.
imm64(offset: usize) -> ir::immediates::Imm6459 fn imm64(offset: usize) -> ir::immediates::Imm64 {
60 (offset as i64).into()
61 }
62
63 /// Initialize a `Signature` from a wasm signature.
64 ///
65 /// These signatures are used by Cranelift both to perform calls (e.g., to other
66 /// Wasm functions, or back to JS or native code) and to generate code that
67 /// accesses its own args and sets its return value(s) properly.
68 ///
69 /// Note that the extension modes are in principle applicable to *both* sides of
70 /// the call. They must be respected when setting up args for a callee, and when
71 /// setting up a return value to a caller; they may be used/relied upon when
72 /// using an arg that came from a caller, or using a return value that came from
73 /// a callee.
init_sig_from_wsig(call_conv: CallConv, wsig: &bindings::FuncType) -> WasmResult<ir::Signature>74 fn init_sig_from_wsig(call_conv: CallConv, wsig: &bindings::FuncType) -> WasmResult<ir::Signature> {
75 let mut sig = ir::Signature::new(call_conv);
76
77 for arg_type in wsig.args() {
78 let ty = typecode_to_nonvoid_type(*arg_type)?;
79 let arg = match ty {
80 // SpiderMonkey requires i32 arguments to callees (e.g., from Wasm
81 // back into JS or native code) to have their high 32 bits zero so
82 // that it can directly box them.
83 ir::types::I32 => ir::AbiParam::new(ty).uext(),
84 _ => ir::AbiParam::new(ty),
85 };
86 sig.params.push(arg);
87 }
88
89 for ret_type in wsig.results() {
90 let ty = typecode_to_nonvoid_type(*ret_type)?;
91 let ret = match ty {
92 // SpiderMonkey requires i32 returns to have their high 32 bits
93 // zero so that it can directly box them.
94 ir::types::I32 => ir::AbiParam::new(ty).uext(),
95 _ => ir::AbiParam::new(ty),
96 };
97 sig.returns.push(ret);
98 }
99
100 // Add a VM context pointer argument.
101 // This corresponds to SpiderMonkey's `WasmTlsReg` hidden argument.
102 sig.params.push(ir::AbiParam::special(
103 POINTER_TYPE,
104 ir::ArgumentPurpose::VMContext,
105 ));
106
107 // Add a callee-TLS and caller-TLS argument.
108 sig.params.push(ir::AbiParam::special(
109 POINTER_TYPE,
110 ir::ArgumentPurpose::CalleeTLS,
111 ));
112 sig.params.push(ir::AbiParam::special(
113 POINTER_TYPE,
114 ir::ArgumentPurpose::CallerTLS,
115 ));
116
117 Ok(sig)
118 }
119
120 /// Initialize the signature `sig` to match the function with `index` in `env`.
init_sig( env: &bindings::ModuleEnvironment, call_conv: CallConv, func_index: FuncIndex, ) -> WasmResult<ir::Signature>121 pub fn init_sig(
122 env: &bindings::ModuleEnvironment,
123 call_conv: CallConv,
124 func_index: FuncIndex,
125 ) -> WasmResult<ir::Signature> {
126 let wsig = env.func_sig(func_index);
127 init_sig_from_wsig(call_conv, &wsig)
128 }
129
130 /// An instance call may return a special value to indicate that the operation
131 /// failed and we need to trap. This indicates what kind of value to check for,
132 /// if any.
133 enum FailureMode {
134 Infallible,
135 /// The value returned by the function must be checked. internal_ret set to true indicates that
136 /// the returned value is only used internally, and should not be passed back to wasm.
137 NotZero {
138 internal_ret: bool,
139 },
140 /// The value returned by the function must be checked. An error is deemed to have
141 /// happened if the value, when viewed as a signed 32-bit int, is negative.
142 IsNegativeI32,
143 InvalidRef,
144 }
145
146 /// A description of builtin call to the `wasm::Instance`.
147 struct InstanceCall {
148 address: SymbolicAddress,
149 arguments: &'static [ir::Type],
150 ret: Option<ir::Type>,
151 failure_mode: FailureMode,
152 }
153
154 // The following are a list of the instance calls used to implement operations.
155
156 const FN_MEMORY_GROW: InstanceCall = InstanceCall {
157 address: SymbolicAddress::MemoryGrow,
158 arguments: &[ir::types::I32],
159 ret: Some(ir::types::I32),
160 failure_mode: FailureMode::Infallible,
161 };
162 const FN_MEMORY_SIZE: InstanceCall = InstanceCall {
163 address: SymbolicAddress::MemorySize,
164 arguments: &[],
165 ret: Some(ir::types::I32),
166 failure_mode: FailureMode::Infallible,
167 };
168 const FN_MEMORY_COPY: InstanceCall = InstanceCall {
169 address: SymbolicAddress::MemoryCopy,
170 arguments: &[ir::types::I32, ir::types::I32, ir::types::I32, POINTER_TYPE],
171 ret: Some(ir::types::I32),
172 failure_mode: FailureMode::NotZero { internal_ret: true },
173 };
174 const FN_MEMORY_COPY_SHARED: InstanceCall = InstanceCall {
175 address: SymbolicAddress::MemoryCopyShared,
176 arguments: &[ir::types::I32, ir::types::I32, ir::types::I32, POINTER_TYPE],
177 ret: Some(ir::types::I32),
178 failure_mode: FailureMode::NotZero { internal_ret: true },
179 };
180 const FN_MEMORY_FILL: InstanceCall = InstanceCall {
181 address: SymbolicAddress::MemoryFill,
182 arguments: &[ir::types::I32, ir::types::I32, ir::types::I32, POINTER_TYPE],
183 ret: Some(ir::types::I32),
184 failure_mode: FailureMode::NotZero { internal_ret: true },
185 };
186 const FN_MEMORY_FILL_SHARED: InstanceCall = InstanceCall {
187 address: SymbolicAddress::MemoryFillShared,
188 arguments: &[ir::types::I32, ir::types::I32, ir::types::I32, POINTER_TYPE],
189 ret: Some(ir::types::I32),
190 failure_mode: FailureMode::NotZero { internal_ret: true },
191 };
192 const FN_MEMORY_INIT: InstanceCall = InstanceCall {
193 address: SymbolicAddress::MemoryInit,
194 arguments: &[
195 ir::types::I32,
196 ir::types::I32,
197 ir::types::I32,
198 ir::types::I32,
199 ],
200 ret: Some(ir::types::I32),
201 failure_mode: FailureMode::NotZero { internal_ret: true },
202 };
203 const FN_DATA_DROP: InstanceCall = InstanceCall {
204 address: SymbolicAddress::DataDrop,
205 arguments: &[ir::types::I32],
206 ret: Some(ir::types::I32),
207 failure_mode: FailureMode::NotZero { internal_ret: true },
208 };
209 const FN_TABLE_SIZE: InstanceCall = InstanceCall {
210 address: SymbolicAddress::TableSize,
211 arguments: &[ir::types::I32],
212 ret: Some(ir::types::I32),
213 failure_mode: FailureMode::Infallible,
214 };
215 const FN_TABLE_GROW: InstanceCall = InstanceCall {
216 address: SymbolicAddress::TableGrow,
217 arguments: &[REF_TYPE, ir::types::I32, ir::types::I32],
218 ret: Some(ir::types::I32),
219 failure_mode: FailureMode::Infallible,
220 };
221 const FN_TABLE_GET: InstanceCall = InstanceCall {
222 address: SymbolicAddress::TableGet,
223 arguments: &[ir::types::I32, ir::types::I32],
224 ret: Some(REF_TYPE),
225 failure_mode: FailureMode::InvalidRef,
226 };
227 const FN_TABLE_SET: InstanceCall = InstanceCall {
228 address: SymbolicAddress::TableSet,
229 arguments: &[ir::types::I32, REF_TYPE, ir::types::I32],
230 ret: Some(ir::types::I32),
231 failure_mode: FailureMode::NotZero { internal_ret: true },
232 };
233 const FN_TABLE_COPY: InstanceCall = InstanceCall {
234 address: SymbolicAddress::TableCopy,
235 arguments: &[
236 ir::types::I32,
237 ir::types::I32,
238 ir::types::I32,
239 ir::types::I32,
240 ir::types::I32,
241 ],
242 ret: Some(ir::types::I32),
243 failure_mode: FailureMode::NotZero { internal_ret: true },
244 };
245 const FN_TABLE_FILL: InstanceCall = InstanceCall {
246 address: SymbolicAddress::TableFill,
247 arguments: &[ir::types::I32, REF_TYPE, ir::types::I32, ir::types::I32],
248 ret: Some(ir::types::I32),
249 failure_mode: FailureMode::NotZero { internal_ret: true },
250 };
251 const FN_TABLE_INIT: InstanceCall = InstanceCall {
252 address: SymbolicAddress::TableInit,
253 arguments: &[
254 ir::types::I32,
255 ir::types::I32,
256 ir::types::I32,
257 ir::types::I32,
258 ir::types::I32,
259 ],
260 ret: Some(ir::types::I32),
261 failure_mode: FailureMode::NotZero { internal_ret: true },
262 };
263 const FN_ELEM_DROP: InstanceCall = InstanceCall {
264 address: SymbolicAddress::ElemDrop,
265 arguments: &[ir::types::I32],
266 ret: Some(ir::types::I32),
267 failure_mode: FailureMode::NotZero { internal_ret: true },
268 };
269 const FN_REF_FUNC: InstanceCall = InstanceCall {
270 address: SymbolicAddress::RefFunc,
271 arguments: &[ir::types::I32],
272 ret: Some(REF_TYPE),
273 failure_mode: FailureMode::InvalidRef,
274 };
275 const FN_PRE_BARRIER: InstanceCall = InstanceCall {
276 address: SymbolicAddress::PreBarrier,
277 arguments: &[POINTER_TYPE],
278 ret: None,
279 failure_mode: FailureMode::Infallible,
280 };
281 const FN_POST_BARRIER: InstanceCall = InstanceCall {
282 address: SymbolicAddress::PostBarrier,
283 arguments: &[POINTER_TYPE],
284 ret: None,
285 failure_mode: FailureMode::Infallible,
286 };
287 const FN_WAIT_I32: InstanceCall = InstanceCall {
288 address: SymbolicAddress::WaitI32,
289 arguments: &[ir::types::I32, ir::types::I32, ir::types::I64],
290 ret: Some(ir::types::I32),
291 failure_mode: FailureMode::IsNegativeI32,
292 };
293 const FN_WAIT_I64: InstanceCall = InstanceCall {
294 address: SymbolicAddress::WaitI64,
295 arguments: &[ir::types::I32, ir::types::I64, ir::types::I64],
296 ret: Some(ir::types::I32),
297 failure_mode: FailureMode::IsNegativeI32,
298 };
299 const FN_WAKE: InstanceCall = InstanceCall {
300 address: SymbolicAddress::Wake,
301 arguments: &[ir::types::I32, ir::types::I32],
302 ret: Some(ir::types::I32),
303 failure_mode: FailureMode::IsNegativeI32,
304 };
305
306 // Custom trap codes specific to this embedding
307
308 pub const TRAP_THROW_REPORTED: u16 = 1;
309
310 /// A translation context that implements `FuncEnvironment` for the specific Spidermonkey
311 /// translation bits.
312 pub struct TransEnv<'static_env, 'module_env> {
313 static_env: &'static_env bindings::StaticEnvironment,
314 module_env: Rc<bindings::ModuleEnvironment<'module_env>>,
315
316 target_frontend_config: TargetFrontendConfig,
317
318 /// Information about the function pointer tables `self.module_env` knowns about. Indexed by
319 /// table index.
320 tables: PrimaryMap<TableIndex, TableInfo>,
321
322 /// For those signatures whose ID is stored in a global, keep track of the globals we have
323 /// created so far.
324 ///
325 /// Note that most signatures are of the immediate form, and we don't keep any records for
326 /// those.
327 ///
328 /// The key to this table is the TLS offset returned by `sig_idTlsOffset()`.
329 signatures: HashMap<i32, ir::GlobalValue>,
330
331 /// Global variables containing `FuncImportTls` information about imported functions.
332 /// This vector is indexed by a `FuncIndex`, taking advantage of the fact that WebAssembly
333 /// imported functions are numbered starting from 0.
334 ///
335 /// Any `None` entries in this table are simply global variables that have not yet been created.
336 func_gvs: SecondaryMap<FuncIndex, PackedOption<ir::GlobalValue>>,
337
338 /// The `vmctx` global value.
339 vmctx_gv: PackedOption<ir::GlobalValue>,
340
341 /// Global variable representing the `TlsData::instance` field which points to the current
342 /// instance.
343 instance_gv: PackedOption<ir::GlobalValue>,
344
345 /// Global variable representing the `TlsData::interrupt` field which points to the current
346 /// interrupt flag.
347 interrupt_gv: PackedOption<ir::GlobalValue>,
348
349 /// Allocated `FuncRef` for symbolic addresses.
350 /// See the `SymbolicAddress` enum in `baldrapi.h`.
351 symbolic: [PackedOption<ir::FuncRef>; bindings::SymbolicAddress::Limit as usize],
352
353 /// The address of the `cx` field in the `wasm::TlsData` struct.
354 cx_addr: PackedOption<ir::GlobalValue>,
355
356 /// The address of the `realm` field in the `wasm::TlsData` struct.
357 realm_addr: PackedOption<ir::GlobalValue>,
358 }
359
360 impl<'static_env, 'module_env> TransEnv<'static_env, 'module_env> {
new( isa: &dyn TargetIsa, module_env: Rc<bindings::ModuleEnvironment<'module_env>>, static_env: &'static_env bindings::StaticEnvironment, ) -> Self361 pub fn new(
362 isa: &dyn TargetIsa,
363 module_env: Rc<bindings::ModuleEnvironment<'module_env>>,
364 static_env: &'static_env bindings::StaticEnvironment,
365 ) -> Self {
366 TransEnv {
367 static_env,
368 module_env,
369 target_frontend_config: isa.frontend_config(),
370 tables: PrimaryMap::new(),
371 signatures: HashMap::new(),
372 func_gvs: SecondaryMap::new(),
373 vmctx_gv: None.into(),
374 instance_gv: None.into(),
375 interrupt_gv: None.into(),
376 symbolic: [None.into(); bindings::SymbolicAddress::Limit as usize],
377 cx_addr: None.into(),
378 realm_addr: None.into(),
379 }
380 }
381
clear(&mut self)382 pub fn clear(&mut self) {
383 self.tables.clear();
384 self.signatures.clear();
385 self.func_gvs.clear();
386 self.vmctx_gv = None.into();
387 self.instance_gv = None.into();
388 self.interrupt_gv = None.into();
389 for entry in self.symbolic.iter_mut() {
390 *entry = None.into();
391 }
392 self.cx_addr = None.into();
393 self.realm_addr = None.into();
394 }
395
396 /// Get the `vmctx` global value.
get_vmctx_gv(&mut self, func: &mut ir::Function) -> ir::GlobalValue397 fn get_vmctx_gv(&mut self, func: &mut ir::Function) -> ir::GlobalValue {
398 match self.vmctx_gv.expand() {
399 Some(gv) => gv,
400 None => {
401 // We need to allocate the global variable.
402 let gv = func.create_global_value(ir::GlobalValueData::VMContext);
403 self.vmctx_gv = Some(gv).into();
404 gv
405 }
406 }
407 }
408
409 /// Get information about `table`.
410 /// Create it if necessary.
get_table(&mut self, func: &mut ir::Function, table: TableIndex) -> TableInfo411 fn get_table(&mut self, func: &mut ir::Function, table: TableIndex) -> TableInfo {
412 // Allocate all tables up to the requested index.
413 let vmctx = self.get_vmctx_gv(func);
414 while self.tables.len() <= table.index() {
415 let wtab = self.module_env.table(TableIndex::new(self.tables.len()));
416 self.tables.push(TableInfo::new(wtab, func, vmctx));
417 }
418 self.tables[table].clone()
419 }
420
421 /// Get the global variable storing the ID of the given signature.
sig_global(&mut self, func: &mut ir::Function, offset: usize) -> ir::GlobalValue422 fn sig_global(&mut self, func: &mut ir::Function, offset: usize) -> ir::GlobalValue {
423 let vmctx = self.get_vmctx_gv(func);
424 *self.signatures.entry(offset as i32).or_insert_with(|| {
425 func.create_global_value(ir::GlobalValueData::IAddImm {
426 base: vmctx,
427 offset: imm64(offset),
428 global_type: POINTER_TYPE,
429 })
430 })
431 }
432
433 /// Get the global variable storing the `FuncImportTls` struct for an imported function.
func_import_global(&mut self, func: &mut ir::Function, index: FuncIndex) -> ir::GlobalValue434 fn func_import_global(&mut self, func: &mut ir::Function, index: FuncIndex) -> ir::GlobalValue {
435 // See if we already allocated a global for this import.
436 if let Some(gv) = self.func_gvs.get(index).and_then(|gv| gv.expand()) {
437 return gv;
438 }
439 // We need to create a global variable for `import_index`.
440 let vmctx = self.get_vmctx_gv(func);
441 let gv = func.create_global_value(ir::GlobalValueData::IAddImm {
442 base: vmctx,
443 offset: imm64(self.module_env.func_import_tls_offset(index)),
444 global_type: POINTER_TYPE,
445 });
446 // Save it for next time.
447 self.func_gvs[index] = gv.into();
448 gv
449 }
450
451 /// Generate code that loads the current instance pointer.
load_instance(&mut self, pos: &mut FuncCursor) -> ir::Value452 fn load_instance(&mut self, pos: &mut FuncCursor) -> ir::Value {
453 let gv = match self.instance_gv.expand() {
454 Some(gv) => gv,
455 None => {
456 // We need to allocate the global variable.
457 let vmctx = self.get_vmctx_gv(pos.func);
458 let gv = pos.func.create_global_value(ir::GlobalValueData::IAddImm {
459 base: vmctx,
460 offset: imm64(self.static_env.instance_tls_offset),
461 global_type: POINTER_TYPE,
462 });
463 self.instance_gv = gv.into();
464 gv
465 }
466 };
467 let ga = pos.ins().global_value(POINTER_TYPE, gv);
468 pos.ins().load(POINTER_TYPE, ir::MemFlags::trusted(), ga, 0)
469 }
470
471 /// Generate code that loads the current instance pointer.
load_interrupt_flag(&mut self, pos: &mut FuncCursor) -> ir::Value472 fn load_interrupt_flag(&mut self, pos: &mut FuncCursor) -> ir::Value {
473 let gv = match self.interrupt_gv.expand() {
474 Some(gv) => gv,
475 None => {
476 // We need to allocate the global variable.
477 let vmctx = self.get_vmctx_gv(pos.func);
478 let gv = pos.func.create_global_value(ir::GlobalValueData::IAddImm {
479 base: vmctx,
480 offset: imm64(self.static_env.interrupt_tls_offset),
481 global_type: POINTER_TYPE,
482 });
483 self.interrupt_gv = gv.into();
484 gv
485 }
486 };
487 let ga = pos.ins().global_value(POINTER_TYPE, gv);
488 pos.ins()
489 .load(ir::types::I32, ir::MemFlags::trusted(), ga, 0)
490 }
491
492 /// Get a `FuncRef` for the given symbolic address.
493 /// Uses the closure to create the signature if necessary.
symbolic_funcref<MKSIG: FnOnce() -> ir::Signature>( &mut self, func: &mut ir::Function, sym: bindings::SymbolicAddress, make_sig: MKSIG, ) -> (ir::FuncRef, ir::SigRef)494 fn symbolic_funcref<MKSIG: FnOnce() -> ir::Signature>(
495 &mut self,
496 func: &mut ir::Function,
497 sym: bindings::SymbolicAddress,
498 make_sig: MKSIG,
499 ) -> (ir::FuncRef, ir::SigRef) {
500 let symidx = sym as usize;
501 if let Some(fnref) = self.symbolic[symidx].expand() {
502 return (fnref, func.dfg.ext_funcs[fnref].signature);
503 }
504
505 // We need to allocate a signature and func-ref.
506 let signature = func.import_signature(make_sig());
507 let fnref = func.import_function(ir::ExtFuncData {
508 signature,
509 name: symbolic_function_name(sym),
510 colocated: false,
511 });
512
513 self.symbolic[symidx] = fnref.into();
514 (fnref, signature)
515 }
516
517 /// Update the JSContext's realm value. This is called after a call to restore the
518 /// realm value, in case the call has used a different realm.
switch_to_wasm_tls_realm(&mut self, pos: &mut FuncCursor)519 fn switch_to_wasm_tls_realm(&mut self, pos: &mut FuncCursor) {
520 if self.cx_addr.is_none() {
521 let vmctx = self.get_vmctx_gv(&mut pos.func);
522 self.cx_addr = pos
523 .func
524 .create_global_value(ir::GlobalValueData::IAddImm {
525 base: vmctx,
526 offset: imm64(self.static_env.cx_tls_offset),
527 global_type: POINTER_TYPE,
528 })
529 .into();
530 }
531
532 if self.realm_addr.is_none() {
533 let vmctx = self.get_vmctx_gv(&mut pos.func);
534 self.realm_addr = pos
535 .func
536 .create_global_value(ir::GlobalValueData::IAddImm {
537 base: vmctx,
538 offset: imm64(self.static_env.realm_tls_offset),
539 global_type: POINTER_TYPE,
540 })
541 .into();
542 }
543
544 let ptr = POINTER_TYPE;
545 let flags = ir::MemFlags::trusted();
546 let cx_addr_val = pos.ins().global_value(ptr, self.cx_addr.unwrap());
547 let cx = pos.ins().load(ptr, flags, cx_addr_val, 0);
548 let realm_addr_val = pos.ins().global_value(ptr, self.realm_addr.unwrap());
549 let realm = pos.ins().load(ptr, flags, realm_addr_val, 0);
550 pos.ins()
551 .store(flags, realm, cx, offset32(self.static_env.realm_cx_offset));
552 }
553
554 /// Update the JSContext's realm value in preparation for making an indirect call through
555 /// an external table.
switch_to_indirect_callee_realm(&mut self, pos: &mut FuncCursor, vmctx: ir::Value)556 fn switch_to_indirect_callee_realm(&mut self, pos: &mut FuncCursor, vmctx: ir::Value) {
557 let ptr = POINTER_TYPE;
558 let flags = ir::MemFlags::trusted();
559 let cx = pos
560 .ins()
561 .load(ptr, flags, vmctx, offset32(self.static_env.cx_tls_offset));
562 let realm = pos.ins().load(
563 ptr,
564 flags,
565 vmctx,
566 offset32(self.static_env.realm_tls_offset),
567 );
568 pos.ins()
569 .store(flags, realm, cx, offset32(self.static_env.realm_cx_offset));
570 }
571
572 /// Update the JSContext's realm value in preparation for making a call to an imported
573 /// function.
switch_to_import_realm( &mut self, pos: &mut FuncCursor, vmctx: ir::Value, gv_addr: ir::Value, )574 fn switch_to_import_realm(
575 &mut self,
576 pos: &mut FuncCursor,
577 vmctx: ir::Value,
578 gv_addr: ir::Value,
579 ) {
580 let ptr = POINTER_TYPE;
581 let flags = ir::MemFlags::trusted();
582 let cx = pos
583 .ins()
584 .load(ptr, flags, vmctx, offset32(self.static_env.cx_tls_offset));
585 let realm = pos.ins().load(
586 ptr,
587 flags,
588 gv_addr,
589 offset32(self.static_env.realm_func_import_tls_offset),
590 );
591 pos.ins()
592 .store(flags, realm, cx, offset32(self.static_env.realm_cx_offset));
593 }
594
load_pinned_reg(&self, pos: &mut FuncCursor, vmctx: ir::Value)595 fn load_pinned_reg(&self, pos: &mut FuncCursor, vmctx: ir::Value) {
596 if USES_HEAP_REG {
597 let heap_base = pos.ins().load(
598 POINTER_TYPE,
599 ir::MemFlags::trusted(),
600 vmctx,
601 self.static_env.memory_base_tls_offset as i32,
602 );
603 pos.ins().set_pinned_reg(heap_base);
604 }
605 }
606
reload_tls_and_pinned_regs(&mut self, pos: &mut FuncCursor)607 fn reload_tls_and_pinned_regs(&mut self, pos: &mut FuncCursor) {
608 let vmctx_gv = self.get_vmctx_gv(&mut pos.func);
609 let vmctx = pos.ins().global_value(POINTER_TYPE, vmctx_gv);
610 self.load_pinned_reg(pos, vmctx);
611 }
612
instance_call( &mut self, pos: &mut FuncCursor, call: &InstanceCall, arguments: &[ir::Value], ) -> Option<ir::Value>613 fn instance_call(
614 &mut self,
615 pos: &mut FuncCursor,
616 call: &InstanceCall,
617 arguments: &[ir::Value],
618 ) -> Option<ir::Value> {
619 debug_assert!(call.arguments.len() == arguments.len());
620
621 let call_conv = self.static_env.call_conv();
622 let (fnref, sigref) = self.symbolic_funcref(pos.func, call.address, || {
623 let mut sig = ir::Signature::new(call_conv);
624 sig.params.push(ir::AbiParam::new(POINTER_TYPE));
625 for argument in call.arguments {
626 sig.params.push(ir::AbiParam::new(*argument));
627 }
628 sig.params.push(ir::AbiParam::special(
629 POINTER_TYPE,
630 ir::ArgumentPurpose::VMContext,
631 ));
632 // Add a callee-TLS and caller-TLS argument.
633 sig.params.push(ir::AbiParam::special(
634 POINTER_TYPE,
635 ir::ArgumentPurpose::CalleeTLS,
636 ));
637 sig.params.push(ir::AbiParam::special(
638 POINTER_TYPE,
639 ir::ArgumentPurpose::CallerTLS,
640 ));
641 if let Some(ret) = &call.ret {
642 sig.returns.push(ir::AbiParam::new(*ret));
643 }
644 sig
645 });
646
647 let instance = self.load_instance(pos);
648 let vmctx = pos
649 .func
650 .special_param(ir::ArgumentPurpose::VMContext)
651 .expect("Missing vmctx arg");
652
653 // We must use `func_addr` for symbolic references since the stubs can be far away, and the
654 // C++ `SymbolicAccess` linker expects it.
655
656 let func_addr = pos.ins().func_addr(POINTER_TYPE, fnref);
657 let call_ins = pos.ins().call_indirect(sigref, func_addr, &[]);
658 let mut built_arguments = pos.func.dfg[call_ins].take_value_list().unwrap();
659 built_arguments.push(instance, &mut pos.func.dfg.value_lists);
660 built_arguments.extend(arguments.iter().cloned(), &mut pos.func.dfg.value_lists);
661 built_arguments.push(vmctx, &mut pos.func.dfg.value_lists);
662 built_arguments.push(vmctx, &mut pos.func.dfg.value_lists); // callee_tls
663 built_arguments.push(vmctx, &mut pos.func.dfg.value_lists); // caller_tls
664 pos.func.dfg[call_ins].put_value_list(built_arguments);
665
666 self.switch_to_wasm_tls_realm(pos);
667 self.reload_tls_and_pinned_regs(pos);
668
669 if call.ret.is_none() {
670 return None;
671 }
672
673 let ret = pos.func.dfg.first_result(call_ins);
674 match call.failure_mode {
675 FailureMode::Infallible => Some(ret),
676 FailureMode::NotZero { internal_ret } => {
677 pos.ins()
678 .trapnz(ret, ir::TrapCode::User(TRAP_THROW_REPORTED));
679 if internal_ret {
680 None
681 } else {
682 Some(ret)
683 }
684 }
685 FailureMode::IsNegativeI32 => {
686 let ty = pos.func.dfg.value_type(ret);
687 assert!(ty == ir::types::I32);
688 let f = pos.ins().ifcmp_imm(ret, i64::from(0));
689 pos.ins().trapif(
690 IntCC::SignedLessThan,
691 f,
692 ir::TrapCode::User(TRAP_THROW_REPORTED),
693 );
694 Some(ret)
695 }
696 FailureMode::InvalidRef => {
697 let invalid = pos.ins().is_invalid(ret);
698 pos.ins()
699 .trapnz(invalid, ir::TrapCode::User(TRAP_THROW_REPORTED));
700 Some(ret)
701 }
702 }
703 }
704
global_address( &mut self, func: &mut ir::Function, global: &GlobalDesc, ) -> (ir::GlobalValue, Offset32)705 fn global_address(
706 &mut self,
707 func: &mut ir::Function,
708 global: &GlobalDesc,
709 ) -> (ir::GlobalValue, Offset32) {
710 assert!(!global.is_constant());
711
712 // This is a global variable. Here we don't care if it is mutable or not.
713 let vmctx_gv = self.get_vmctx_gv(func);
714 let offset = global.tls_offset();
715
716 // Some globals are represented as a pointer to the actual data, in which case we
717 // must do an extra dereference to get to them. Also, in that case, the pointer
718 // itself is immutable, so we mark it `readonly` here to assist Cranelift in commoning
719 // up what would otherwise be multiple adjacent reads of the value.
720 if global.is_indirect() {
721 let gv = func.create_global_value(ir::GlobalValueData::Load {
722 base: vmctx_gv,
723 offset: offset32(offset),
724 global_type: POINTER_TYPE,
725 readonly: true,
726 });
727 (gv, 0.into())
728 } else {
729 (vmctx_gv, offset32(offset))
730 }
731 }
732 }
733
734 impl<'static_env, 'module_env> TargetEnvironment for TransEnv<'static_env, 'module_env> {
target_config(&self) -> TargetFrontendConfig735 fn target_config(&self) -> TargetFrontendConfig {
736 self.target_frontend_config
737 }
pointer_type(&self) -> ir::Type738 fn pointer_type(&self) -> ir::Type {
739 POINTER_TYPE
740 }
741 }
742
743 impl<'static_env, 'module_env> FuncEnvironment for TransEnv<'static_env, 'module_env> {
make_global( &mut self, func: &mut ir::Function, index: GlobalIndex, ) -> WasmResult<GlobalVariable>744 fn make_global(
745 &mut self,
746 func: &mut ir::Function,
747 index: GlobalIndex,
748 ) -> WasmResult<GlobalVariable> {
749 let global = self.module_env.global(index);
750 if global.is_constant() {
751 // Constant globals have a known value at compile time. We insert an instruction to
752 // materialize the constant at the front of the entry block.
753 let mut pos = FuncCursor::new(func);
754 pos.next_block().expect("empty function");
755 pos.next_inst();
756 return Ok(GlobalVariable::Const(global.emit_constant(&mut pos)?));
757 }
758
759 match global.value_type()? {
760 ir::types::R32 | ir::types::R64 => Ok(GlobalVariable::Custom),
761 _ => {
762 let (base_gv, offset) = self.global_address(func, &global);
763 let mem_ty = global.value_type()?;
764
765 Ok(GlobalVariable::Memory {
766 gv: base_gv,
767 ty: mem_ty,
768 offset,
769 })
770 }
771 }
772 }
773
make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<ir::Heap>774 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<ir::Heap> {
775 // Currently, Baldrdash doesn't support multiple memories.
776 if index.index() != 0 {
777 return Err(WasmError::Unsupported(
778 "only one wasm memory supported".to_string(),
779 ));
780 }
781
782 let vcmtx = self.get_vmctx_gv(func);
783
784 let bound = self.static_env.static_memory_bound as u64;
785 let is_static = bound > 0;
786
787 // Get the `TlsData::memoryBase` field.
788 let base = func.create_global_value(ir::GlobalValueData::Load {
789 base: vcmtx,
790 offset: offset32(0),
791 global_type: POINTER_TYPE,
792 readonly: is_static,
793 });
794
795 let style = if is_static {
796 // We have a static heap.
797 let bound = bound.into();
798 ir::HeapStyle::Static { bound }
799 } else {
800 // Get the `TlsData::boundsCheckLimit` field. An assertion in the C++ code
801 // ensures that the offset of this field is POINTER_SIZE bytes away from the
802 // start of the Tls. The size of the field is the size of a pointer: on
803 // 32-bit systems, heaps are <= 2GB, while on 64-bit systems even 32-bit heaps
804 // can grow to 4GB, and 64-bit heaps can be larger still.
805 //
806 // Note that SpiderMonkey limits 64-bit heaps to 4GB-128K when Cranelift is
807 // present so that Cranelift can continue to treat the boundsCheckLimit as a
808 // 32-bit quantity. This situation will persist until we get around to
809 // updating Cranelift, at which point Cranelift-compiled code can have true
810 // 4GB heaps.
811 let bound_gv = func.create_global_value(ir::GlobalValueData::Load {
812 base: vcmtx,
813 offset: (POINTER_SIZE as i32).into(),
814 global_type: ir::types::I32,
815 readonly: false,
816 });
817 ir::HeapStyle::Dynamic { bound_gv }
818 };
819
820 let min_size = (self.module_env.min_memory_length() as u64).into();
821 let offset_guard_size = (self.static_env.memory_guard_size as u64).into();
822
823 Ok(func.create_heap(ir::HeapData {
824 base,
825 min_size,
826 offset_guard_size,
827 style,
828 index_type: ir::types::I32,
829 }))
830 }
831
make_indirect_sig( &mut self, func: &mut ir::Function, index: TypeIndex, ) -> WasmResult<ir::SigRef>832 fn make_indirect_sig(
833 &mut self,
834 func: &mut ir::Function,
835 index: TypeIndex,
836 ) -> WasmResult<ir::SigRef> {
837 let wsig = self.module_env.signature(index);
838 let wsig_id = self.module_env.signature_id(index);
839 let mut sigdata = init_sig_from_wsig(self.static_env.call_conv(), &wsig)?;
840
841 if wsig_id.id_kind() != bindings::TypeIdDescKind::None {
842 // A signature to be used for an indirect call also takes a signature id.
843 sigdata.params.push(ir::AbiParam::special(
844 POINTER_TYPE,
845 ir::ArgumentPurpose::SignatureId,
846 ));
847 }
848
849 Ok(func.import_signature(sigdata))
850 }
851
make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> WasmResult<ir::Table>852 fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> WasmResult<ir::Table> {
853 let table_desc = self.get_table(func, index);
854
855 // TODO we'd need a better way to synchronize the shape of GlobalDataDesc and these
856 // offsets.
857 let bound_gv = func.create_global_value(ir::GlobalValueData::Load {
858 base: table_desc.global,
859 offset: 0.into(),
860 global_type: ir::types::I32,
861 readonly: false,
862 });
863
864 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
865 base: table_desc.global,
866 offset: offset32(POINTER_SIZE as usize),
867 global_type: POINTER_TYPE,
868 readonly: false,
869 });
870
871 Ok(func.create_table(ir::TableData {
872 base_gv,
873 min_size: 0.into(),
874 bound_gv,
875 element_size: (u64::from(self.pointer_bytes()) * 2).into(),
876 index_type: ir::types::I32,
877 }))
878 }
879
make_direct_func( &mut self, func: &mut ir::Function, index: FuncIndex, ) -> WasmResult<ir::FuncRef>880 fn make_direct_func(
881 &mut self,
882 func: &mut ir::Function,
883 index: FuncIndex,
884 ) -> WasmResult<ir::FuncRef> {
885 // Create a signature.
886 let sigdata = init_sig(&*self.module_env, self.static_env.call_conv(), index)?;
887 let signature = func.import_signature(sigdata);
888
889 Ok(func.import_function(ir::ExtFuncData {
890 name: wasm_function_name(index),
891 signature,
892 colocated: true,
893 }))
894 }
895
translate_call_indirect( &mut self, mut pos: FuncCursor, table_index: TableIndex, table: ir::Table, sig_index: TypeIndex, sig_ref: ir::SigRef, callee: ir::Value, call_args: &[ir::Value], ) -> WasmResult<ir::Inst>896 fn translate_call_indirect(
897 &mut self,
898 mut pos: FuncCursor,
899 table_index: TableIndex,
900 table: ir::Table,
901 sig_index: TypeIndex,
902 sig_ref: ir::SigRef,
903 callee: ir::Value,
904 call_args: &[ir::Value],
905 ) -> WasmResult<ir::Inst> {
906 let wsig_id = self.module_env.signature_id(sig_index);
907
908 let wtable = self.get_table(pos.func, table_index);
909
910 // Follows `MacroAssembler::wasmCallIndirect`:
911
912 // 1. Materialize the signature ID.
913 let sigid_value = match wsig_id.id_kind() {
914 bindings::TypeIdDescKind::None => None,
915 bindings::TypeIdDescKind::Immediate => {
916 // The signature is represented as an immediate pointer-sized value.
917 let imm = wsig_id.id_immediate() as i64;
918 Some(pos.ins().iconst(POINTER_TYPE, imm))
919 }
920 bindings::TypeIdDescKind::Global => {
921 let gv = self.sig_global(pos.func, wsig_id.id_tls_offset());
922 let addr = pos.ins().global_value(POINTER_TYPE, gv);
923 Some(
924 pos.ins()
925 .load(POINTER_TYPE, ir::MemFlags::trusted(), addr, 0),
926 )
927 }
928 };
929
930 // 2. Bounds check the callee against the table length.
931 let (bound_gv, base_gv) = {
932 let table_data = &pos.func.tables[table];
933 (table_data.bound_gv, table_data.base_gv)
934 };
935
936 let tlength = pos.ins().global_value(ir::types::I32, bound_gv);
937
938 let oob = pos
939 .ins()
940 .icmp(IntCC::UnsignedGreaterThanOrEqual, callee, tlength);
941 pos.ins().trapnz(oob, ir::TrapCode::TableOutOfBounds);
942
943 // 3. Load the wtable base pointer from a global.
944 let tbase = pos.ins().global_value(POINTER_TYPE, base_gv);
945
946 // 4. Load callee pointer from wtable.
947 let callee_x = if POINTER_TYPE != ir::types::I32 {
948 pos.ins().uextend(POINTER_TYPE, callee)
949 } else {
950 callee
951 };
952 let callee_scaled = pos.ins().imul_imm(callee_x, wtable.entry_size());
953
954 let entry = pos.ins().iadd(tbase, callee_scaled);
955 let callee_func = pos
956 .ins()
957 .load(POINTER_TYPE, ir::MemFlags::trusted(), entry, 0);
958
959 // Check for a null callee.
960 pos.ins()
961 .trapz(callee_func, ir::TrapCode::IndirectCallToNull);
962
963 // Get the caller TLS value.
964 let vmctx_gv = self.get_vmctx_gv(&mut pos.func);
965 let caller_vmctx = pos.ins().global_value(POINTER_TYPE, vmctx_gv);
966
967 // Handle external tables, set up environment.
968 // A function table call could redirect execution to another module with a different realm,
969 // so switch to this realm just in case.
970 let callee_vmctx = pos.ins().load(
971 POINTER_TYPE,
972 ir::MemFlags::trusted(),
973 entry,
974 POINTER_SIZE as i32,
975 );
976 self.switch_to_indirect_callee_realm(&mut pos, callee_vmctx);
977 self.load_pinned_reg(&mut pos, callee_vmctx);
978
979 // First the wasm args.
980 let mut args = ir::ValueList::default();
981 args.push(callee_func, &mut pos.func.dfg.value_lists);
982 args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
983 args.push(callee_vmctx, &mut pos.func.dfg.value_lists);
984 args.push(callee_vmctx, &mut pos.func.dfg.value_lists);
985 args.push(caller_vmctx, &mut pos.func.dfg.value_lists);
986 if let Some(sigid) = sigid_value {
987 args.push(sigid, &mut pos.func.dfg.value_lists);
988 }
989
990 let call = pos
991 .ins()
992 .CallIndirect(ir::Opcode::CallIndirect, ir::types::INVALID, sig_ref, args)
993 .0;
994
995 self.switch_to_wasm_tls_realm(&mut pos);
996 self.reload_tls_and_pinned_regs(&mut pos);
997
998 Ok(call)
999 }
1000
translate_call( &mut self, mut pos: FuncCursor, callee_index: FuncIndex, callee: ir::FuncRef, call_args: &[ir::Value], ) -> WasmResult<ir::Inst>1001 fn translate_call(
1002 &mut self,
1003 mut pos: FuncCursor,
1004 callee_index: FuncIndex,
1005 callee: ir::FuncRef,
1006 call_args: &[ir::Value],
1007 ) -> WasmResult<ir::Inst> {
1008 // First the wasm args.
1009 let mut args = ir::ValueList::default();
1010 args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
1011
1012 // Is this an imported function in a different instance, or a local function?
1013 if self.module_env.func_is_import(callee_index) {
1014 // This is a call to an imported function. We need to load the callee address and vmctx
1015 // from the associated `FuncImportTls` struct in a global.
1016 let gv = self.func_import_global(pos.func, callee_index);
1017 let gv_addr = pos.ins().global_value(POINTER_TYPE, gv);
1018
1019 // We need the first two pointer-sized fields from the `FuncImportTls` struct: `code`
1020 // and `tls`.
1021 let fit_code = pos
1022 .ins()
1023 .load(POINTER_TYPE, ir::MemFlags::trusted(), gv_addr, 0);
1024 let fit_tls = pos.ins().load(
1025 POINTER_TYPE,
1026 ir::MemFlags::trusted(),
1027 gv_addr,
1028 POINTER_SIZE as i32,
1029 );
1030
1031 // Save the caller TLS value.
1032 let vmctx_gv = self.get_vmctx_gv(&mut pos.func);
1033 let caller_vmctx = pos.ins().global_value(POINTER_TYPE, vmctx_gv);
1034
1035 // Switch to the callee's realm.
1036 self.switch_to_import_realm(&mut pos, fit_tls, gv_addr);
1037 self.load_pinned_reg(&mut pos, fit_tls);
1038
1039 // The `tls` field is the VM context pointer for the callee.
1040 args.push(fit_tls, &mut pos.func.dfg.value_lists);
1041
1042 // callee-TLS slot (ABI-2020).
1043 args.push(fit_tls, &mut pos.func.dfg.value_lists);
1044 // caller-TLS slot (ABI-2020).
1045 args.push(caller_vmctx, &mut pos.func.dfg.value_lists);
1046
1047 // Now make an indirect call to `fit_code`.
1048 // TODO: We don't need the `FuncRef` that was allocated for this callee since we're
1049 // using an indirect call. We would need to change the `FuncTranslator` interface to
1050 // deal.
1051 args.insert(0, fit_code, &mut pos.func.dfg.value_lists);
1052 let sig = pos.func.dfg.ext_funcs[callee].signature;
1053 let call = pos
1054 .ins()
1055 .CallIndirect(ir::Opcode::CallIndirect, ir::types::INVALID, sig, args)
1056 .0;
1057 self.switch_to_wasm_tls_realm(&mut pos);
1058 self.reload_tls_and_pinned_regs(&mut pos);
1059 Ok(call)
1060 } else {
1061 // This is a call to a local function.
1062
1063 // Then we need to pass on the VM context pointer.
1064 let vmctx = pos
1065 .func
1066 .special_param(ir::ArgumentPurpose::VMContext)
1067 .expect("Missing vmctx arg");
1068 args.push(vmctx, &mut pos.func.dfg.value_lists);
1069
1070 // callee-TLS slot (ABI-2020).
1071 args.push(vmctx, &mut pos.func.dfg.value_lists);
1072 // caller-TLS slot (ABI-2020).
1073 args.push(vmctx, &mut pos.func.dfg.value_lists);
1074
1075 Ok(pos
1076 .ins()
1077 .Call(ir::Opcode::Call, ir::types::INVALID, callee, args)
1078 .0)
1079 }
1080 }
1081
translate_memory_grow( &mut self, mut pos: FuncCursor, _index: MemoryIndex, _heap: ir::Heap, val: ir::Value, ) -> WasmResult<ir::Value>1082 fn translate_memory_grow(
1083 &mut self,
1084 mut pos: FuncCursor,
1085 _index: MemoryIndex,
1086 _heap: ir::Heap,
1087 val: ir::Value,
1088 ) -> WasmResult<ir::Value> {
1089 Ok(self
1090 .instance_call(&mut pos, &FN_MEMORY_GROW, &[val])
1091 .unwrap())
1092 }
1093
translate_memory_size( &mut self, mut pos: FuncCursor, _index: MemoryIndex, _heap: ir::Heap, ) -> WasmResult<ir::Value>1094 fn translate_memory_size(
1095 &mut self,
1096 mut pos: FuncCursor,
1097 _index: MemoryIndex,
1098 _heap: ir::Heap,
1099 ) -> WasmResult<ir::Value> {
1100 Ok(self.instance_call(&mut pos, &FN_MEMORY_SIZE, &[]).unwrap())
1101 }
1102
translate_memory_copy( &mut self, mut pos: FuncCursor, _src_index: MemoryIndex, src_heap: ir::Heap, _dst_index: MemoryIndex, dst_heap: ir::Heap, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1103 fn translate_memory_copy(
1104 &mut self,
1105 mut pos: FuncCursor,
1106 _src_index: MemoryIndex,
1107 src_heap: ir::Heap,
1108 _dst_index: MemoryIndex,
1109 dst_heap: ir::Heap,
1110 dst: ir::Value,
1111 src: ir::Value,
1112 len: ir::Value,
1113 ) -> WasmResult<()> {
1114 if src_heap != dst_heap {
1115 return Err(WasmError::Unsupported(
1116 "memory_copy between different heaps is not supported".to_string(),
1117 ));
1118 }
1119 let heap = src_heap;
1120 let heap_gv = pos.func.heaps[heap].base;
1121 let mem_base = pos.ins().global_value(POINTER_TYPE, heap_gv);
1122
1123 // We have a specialized version of `memory.copy` when we are using
1124 // shared memory or not.
1125 let ret = if self.module_env.uses_shared_memory() {
1126 self.instance_call(&mut pos, &FN_MEMORY_COPY_SHARED, &[dst, src, len, mem_base])
1127 } else {
1128 self.instance_call(&mut pos, &FN_MEMORY_COPY, &[dst, src, len, mem_base])
1129 };
1130 debug_assert!(ret.is_none());
1131 Ok(())
1132 }
1133
translate_memory_fill( &mut self, mut pos: FuncCursor, _index: MemoryIndex, heap: ir::Heap, dst: ir::Value, val: ir::Value, len: ir::Value, ) -> WasmResult<()>1134 fn translate_memory_fill(
1135 &mut self,
1136 mut pos: FuncCursor,
1137 _index: MemoryIndex,
1138 heap: ir::Heap,
1139 dst: ir::Value,
1140 val: ir::Value,
1141 len: ir::Value,
1142 ) -> WasmResult<()> {
1143 let mem_base_gv = pos.func.heaps[heap].base;
1144 let mem_base = pos.ins().global_value(POINTER_TYPE, mem_base_gv);
1145
1146 // We have a specialized version of `memory.fill` when we are using
1147 // shared memory or not.
1148 let ret = if self.module_env.uses_shared_memory() {
1149 self.instance_call(&mut pos, &FN_MEMORY_FILL_SHARED, &[dst, val, len, mem_base])
1150 } else {
1151 self.instance_call(&mut pos, &FN_MEMORY_FILL, &[dst, val, len, mem_base])
1152 };
1153 debug_assert!(ret.is_none());
1154 Ok(())
1155 }
1156
translate_memory_init( &mut self, mut pos: FuncCursor, _index: MemoryIndex, _heap: ir::Heap, seg_index: u32, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1157 fn translate_memory_init(
1158 &mut self,
1159 mut pos: FuncCursor,
1160 _index: MemoryIndex,
1161 _heap: ir::Heap,
1162 seg_index: u32,
1163 dst: ir::Value,
1164 src: ir::Value,
1165 len: ir::Value,
1166 ) -> WasmResult<()> {
1167 let seg_index = pos.ins().iconst(ir::types::I32, seg_index as i64);
1168 let ret = self.instance_call(&mut pos, &FN_MEMORY_INIT, &[dst, src, len, seg_index]);
1169 debug_assert!(ret.is_none());
1170 Ok(())
1171 }
1172
translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()>1173 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
1174 let seg_index = pos.ins().iconst(ir::types::I32, seg_index as i64);
1175 let ret = self.instance_call(&mut pos, &FN_DATA_DROP, &[seg_index]);
1176 debug_assert!(ret.is_none());
1177 Ok(())
1178 }
1179
translate_table_size( &mut self, mut pos: FuncCursor, table_index: TableIndex, _table: ir::Table, ) -> WasmResult<ir::Value>1180 fn translate_table_size(
1181 &mut self,
1182 mut pos: FuncCursor,
1183 table_index: TableIndex,
1184 _table: ir::Table,
1185 ) -> WasmResult<ir::Value> {
1186 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1187 Ok(self
1188 .instance_call(&mut pos, &FN_TABLE_SIZE, &[table_index])
1189 .unwrap())
1190 }
1191
translate_table_grow( &mut self, mut pos: FuncCursor, table_index: TableIndex, _table: ir::Table, delta: ir::Value, init_value: ir::Value, ) -> WasmResult<ir::Value>1192 fn translate_table_grow(
1193 &mut self,
1194 mut pos: FuncCursor,
1195 table_index: TableIndex,
1196 _table: ir::Table,
1197 delta: ir::Value,
1198 init_value: ir::Value,
1199 ) -> WasmResult<ir::Value> {
1200 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1201 Ok(self
1202 .instance_call(&mut pos, &FN_TABLE_GROW, &[init_value, delta, table_index])
1203 .unwrap())
1204 }
1205
translate_table_get( &mut self, builder: &mut FunctionBuilder, table_index: TableIndex, _table: ir::Table, index: ir::Value, ) -> WasmResult<ir::Value>1206 fn translate_table_get(
1207 &mut self,
1208 builder: &mut FunctionBuilder,
1209 table_index: TableIndex,
1210 _table: ir::Table,
1211 index: ir::Value,
1212 ) -> WasmResult<ir::Value> {
1213 // TODO(bug 1650038): make use of the `FunctionBuilder` here and its
1214 // ability to edit the CFG in order to add a fast-path.
1215 let mut pos = builder.cursor();
1216 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1217 Ok(self
1218 .instance_call(&mut pos, &FN_TABLE_GET, &[index, table_index])
1219 .unwrap())
1220 }
1221
translate_table_set( &mut self, builder: &mut FunctionBuilder, table_index: TableIndex, _table: ir::Table, value: ir::Value, index: ir::Value, ) -> WasmResult<()>1222 fn translate_table_set(
1223 &mut self,
1224 builder: &mut FunctionBuilder,
1225 table_index: TableIndex,
1226 _table: ir::Table,
1227 value: ir::Value,
1228 index: ir::Value,
1229 ) -> WasmResult<()> {
1230 // TODO(bug 1650038): make use of the `FunctionBuilder` here and its
1231 // ability to edit the CFG in order to add a fast-path.
1232 let mut pos = builder.cursor();
1233 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1234 self.instance_call(&mut pos, &FN_TABLE_SET, &[index, value, table_index]);
1235 Ok(())
1236 }
1237
translate_table_copy( &mut self, mut pos: FuncCursor, dst_table_index: TableIndex, _dst_table: ir::Table, src_table_index: TableIndex, _src_table: ir::Table, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1238 fn translate_table_copy(
1239 &mut self,
1240 mut pos: FuncCursor,
1241 dst_table_index: TableIndex,
1242 _dst_table: ir::Table,
1243 src_table_index: TableIndex,
1244 _src_table: ir::Table,
1245 dst: ir::Value,
1246 src: ir::Value,
1247 len: ir::Value,
1248 ) -> WasmResult<()> {
1249 let dst_index = pos
1250 .ins()
1251 .iconst(ir::types::I32, dst_table_index.index() as i64);
1252 let src_index = pos
1253 .ins()
1254 .iconst(ir::types::I32, src_table_index.index() as i64);
1255 self.instance_call(
1256 &mut pos,
1257 &FN_TABLE_COPY,
1258 &[dst, src, len, dst_index, src_index],
1259 );
1260 Ok(())
1261 }
1262
translate_table_fill( &mut self, mut pos: FuncCursor, table_index: TableIndex, dst: ir::Value, val: ir::Value, len: ir::Value, ) -> WasmResult<()>1263 fn translate_table_fill(
1264 &mut self,
1265 mut pos: FuncCursor,
1266 table_index: TableIndex,
1267 dst: ir::Value,
1268 val: ir::Value,
1269 len: ir::Value,
1270 ) -> WasmResult<()> {
1271 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1272 self.instance_call(&mut pos, &FN_TABLE_FILL, &[dst, val, len, table_index]);
1273 Ok(())
1274 }
1275
translate_table_init( &mut self, mut pos: FuncCursor, seg_index: u32, table_index: TableIndex, _table: ir::Table, dst: ir::Value, src: ir::Value, len: ir::Value, ) -> WasmResult<()>1276 fn translate_table_init(
1277 &mut self,
1278 mut pos: FuncCursor,
1279 seg_index: u32,
1280 table_index: TableIndex,
1281 _table: ir::Table,
1282 dst: ir::Value,
1283 src: ir::Value,
1284 len: ir::Value,
1285 ) -> WasmResult<()> {
1286 let seg_index = pos.ins().iconst(ir::types::I32, seg_index as i64);
1287 let table_index = pos.ins().iconst(ir::types::I32, table_index.index() as i64);
1288 let ret = self.instance_call(
1289 &mut pos,
1290 &FN_TABLE_INIT,
1291 &[dst, src, len, seg_index, table_index],
1292 );
1293 debug_assert!(ret.is_none());
1294 Ok(())
1295 }
1296
translate_elem_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()>1297 fn translate_elem_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
1298 let seg_index = pos.ins().iconst(ir::types::I32, seg_index as i64);
1299 let ret = self.instance_call(&mut pos, &FN_ELEM_DROP, &[seg_index]);
1300 debug_assert!(ret.is_none());
1301 Ok(())
1302 }
1303
translate_ref_func( &mut self, mut pos: FuncCursor, func_index: FuncIndex, ) -> WasmResult<ir::Value>1304 fn translate_ref_func(
1305 &mut self,
1306 mut pos: FuncCursor,
1307 func_index: FuncIndex,
1308 ) -> WasmResult<ir::Value> {
1309 let func_index = pos.ins().iconst(ir::types::I32, func_index.index() as i64);
1310 Ok(self
1311 .instance_call(&mut pos, &FN_REF_FUNC, &[func_index])
1312 .unwrap())
1313 }
1314
translate_custom_global_get( &mut self, mut pos: FuncCursor, global_index: GlobalIndex, ) -> WasmResult<ir::Value>1315 fn translate_custom_global_get(
1316 &mut self,
1317 mut pos: FuncCursor,
1318 global_index: GlobalIndex,
1319 ) -> WasmResult<ir::Value> {
1320 let global = self.module_env.global(global_index);
1321 let ty = global.value_type()?;
1322 debug_assert!(ty == ir::types::R32 || ty == ir::types::R64);
1323
1324 let (base_gv, offset) = self.global_address(pos.func, &global);
1325 let addr = pos.ins().global_value(POINTER_TYPE, base_gv);
1326 let flags = ir::MemFlags::trusted();
1327 Ok(pos.ins().load(ty, flags, addr, offset))
1328 }
1329
translate_custom_global_set( &mut self, mut pos: FuncCursor, global_index: GlobalIndex, val: ir::Value, ) -> WasmResult<()>1330 fn translate_custom_global_set(
1331 &mut self,
1332 mut pos: FuncCursor,
1333 global_index: GlobalIndex,
1334 val: ir::Value,
1335 ) -> WasmResult<()> {
1336 let global = self.module_env.global(global_index);
1337 let ty = global.value_type()?;
1338 debug_assert!(ty == ir::types::R32 || ty == ir::types::R64);
1339
1340 let (global_addr_gv, global_addr_offset) = self.global_address(pos.func, &global);
1341 let global_addr = pos.ins().global_value(POINTER_TYPE, global_addr_gv);
1342 let abs_global_addr = pos.ins().iadd_imm(
1343 global_addr,
1344 ir::immediates::Imm64::new(global_addr_offset.into()),
1345 );
1346
1347 let res = self.instance_call(&mut pos, &FN_PRE_BARRIER, &[abs_global_addr]);
1348 debug_assert!(res.is_none());
1349
1350 let flags = ir::MemFlags::trusted();
1351 pos.ins().store(flags, val, abs_global_addr, offset32(0));
1352
1353 let res = self.instance_call(&mut pos, &FN_POST_BARRIER, &[abs_global_addr]);
1354 debug_assert!(res.is_none());
1355
1356 Ok(())
1357 }
1358
translate_atomic_wait( &mut self, mut pos: FuncCursor, _index: MemoryIndex, _heap: ir::Heap, addr: ir::Value, expected: ir::Value, timeout: ir::Value, ) -> WasmResult<ir::Value>1359 fn translate_atomic_wait(
1360 &mut self,
1361 mut pos: FuncCursor,
1362 _index: MemoryIndex,
1363 _heap: ir::Heap,
1364 addr: ir::Value,
1365 expected: ir::Value,
1366 timeout: ir::Value,
1367 ) -> WasmResult<ir::Value> {
1368 let callee = match pos.func.dfg.value_type(expected) {
1369 ir::types::I64 => &FN_WAIT_I64,
1370 ir::types::I32 => &FN_WAIT_I32,
1371 _ => {
1372 return Err(WasmError::Unsupported(
1373 "atomic_wait is only supported for I32 and I64".to_string(),
1374 ))
1375 }
1376 };
1377 let ret = self.instance_call(&mut pos, callee, &[addr, expected, timeout]);
1378 Ok(ret.unwrap())
1379 }
1380
translate_atomic_notify( &mut self, mut pos: FuncCursor, _index: MemoryIndex, _heap: ir::Heap, addr: ir::Value, count: ir::Value, ) -> WasmResult<ir::Value>1381 fn translate_atomic_notify(
1382 &mut self,
1383 mut pos: FuncCursor,
1384 _index: MemoryIndex,
1385 _heap: ir::Heap,
1386 addr: ir::Value,
1387 count: ir::Value,
1388 ) -> WasmResult<ir::Value> {
1389 let ret = self.instance_call(&mut pos, &FN_WAKE, &[addr, count]);
1390 Ok(ret.unwrap())
1391 }
1392
translate_loop_header(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()>1393 fn translate_loop_header(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {
1394 let mut pos = builder.cursor();
1395 let interrupt = self.load_interrupt_flag(&mut pos);
1396 pos.ins()
1397 .resumable_trapnz(interrupt, ir::TrapCode::Interrupt);
1398 Ok(())
1399 }
1400
return_mode(&self) -> ReturnMode1401 fn return_mode(&self) -> ReturnMode {
1402 // Since we're using SM's epilogue insertion code, we can only handle a single return
1403 // instruction at the end of the function.
1404 ReturnMode::FallthroughReturn
1405 }
1406 }
1407
1408 /// Information about a function table.
1409 #[derive(Clone)]
1410 struct TableInfo {
1411 /// Global variable containing a `wasm::TableTls` struct with two fields:
1412 ///
1413 /// 0: Unsigned 32-bit table length.
1414 /// n: Pointer to table (n = sizeof(void*))
1415 pub global: ir::GlobalValue,
1416 }
1417
1418 impl TableInfo {
1419 /// Create a TableInfo and its global variable in `func`.
new( wtab: bindings::TableDesc, func: &mut ir::Function, vmctx: ir::GlobalValue, ) -> TableInfo1420 pub fn new(
1421 wtab: bindings::TableDesc,
1422 func: &mut ir::Function,
1423 vmctx: ir::GlobalValue,
1424 ) -> TableInfo {
1425 // Create the global variable.
1426 let offset = wtab.tls_offset();
1427 assert!(offset < i32::max_value() as usize);
1428 let offset = imm64(offset);
1429 let global = func.create_global_value(ir::GlobalValueData::IAddImm {
1430 base: vmctx,
1431 offset,
1432 global_type: POINTER_TYPE,
1433 });
1434
1435 TableInfo { global }
1436 }
1437
1438 /// Get the size in bytes of each table entry.
entry_size(&self) -> i641439 pub fn entry_size(&self) -> i64 {
1440 // Each entry is an `wasm::FunctionTableElem` which consists of the code pointer and a new
1441 // VM context pointer.
1442 (POINTER_SIZE * 2) as i64
1443 }
1444 }
1445