1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 *
4 * Copyright 2017 Mozilla Foundation
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 #include "wasm/WasmBuiltins.h"
20
21 #include "mozilla/Atomics.h"
22
23 #include "fdlibm.h"
24 #include "jslibmath.h"
25 #include "jsmath.h"
26
27 #include "gc/Allocator.h"
28 #include "jit/AtomicOperations.h"
29 #include "jit/InlinableNatives.h"
30 #include "jit/MacroAssembler.h"
31 #include "jit/Simulator.h"
32 #include "js/experimental/JitInfo.h" // JSJitInfo
33 #include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
34 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
35 #include "threading/Mutex.h"
36 #include "util/Memory.h"
37 #include "util/Poison.h"
38 #include "vm/BigIntType.h"
39 #include "vm/ErrorObject.h"
40 #include "wasm/TypedObject.h"
41 #include "wasm/WasmCodegenTypes.h"
42 #include "wasm/WasmDebugFrame.h"
43 #include "wasm/WasmInstance.h"
44 #include "wasm/WasmStubs.h"
45
46 #include "debugger/DebugAPI-inl.h"
47 #include "vm/ErrorObject-inl.h"
48 #include "vm/Stack-inl.h"
49
50 using namespace js;
51 using namespace jit;
52 using namespace wasm;
53
54 using mozilla::HashGeneric;
55 using mozilla::IsNaN;
56 using mozilla::MakeEnumeratedRange;
57
58 static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
59
60 // ============================================================================
61 // WebAssembly builtin C++ functions called from wasm code to implement internal
62 // wasm operations: type descriptions.
63
64 // Some abbreviations, for the sake of conciseness.
65 #define _F64 MIRType::Double
66 #define _F32 MIRType::Float32
67 #define _I32 MIRType::Int32
68 #define _I64 MIRType::Int64
69 #define _PTR MIRType::Pointer
70 #define _RoN MIRType::RefOrNull
71 #define _VOID MIRType::None
72 #define _END MIRType::None
73 #define _Infallible FailureMode::Infallible
74 #define _FailOnNegI32 FailureMode::FailOnNegI32
75 #define _FailOnNullPtr FailureMode::FailOnNullPtr
76 #define _FailOnInvalidRef FailureMode::FailOnInvalidRef
77
78 namespace js {
79 namespace wasm {
80
81 const SymbolicAddressSignature SASigSinD = {
82 SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
83 const SymbolicAddressSignature SASigCosD = {
84 SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
85 const SymbolicAddressSignature SASigTanD = {
86 SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
87 const SymbolicAddressSignature SASigASinD = {
88 SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
89 const SymbolicAddressSignature SASigACosD = {
90 SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
91 const SymbolicAddressSignature SASigATanD = {
92 SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
93 const SymbolicAddressSignature SASigCeilD = {
94 SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
95 const SymbolicAddressSignature SASigCeilF = {
96 SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
97 const SymbolicAddressSignature SASigFloorD = {
98 SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
99 const SymbolicAddressSignature SASigFloorF = {
100 SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
101 const SymbolicAddressSignature SASigTruncD = {
102 SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
103 const SymbolicAddressSignature SASigTruncF = {
104 SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
105 const SymbolicAddressSignature SASigNearbyIntD = {
106 SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
107 const SymbolicAddressSignature SASigNearbyIntF = {
108 SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
109 const SymbolicAddressSignature SASigExpD = {
110 SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
111 const SymbolicAddressSignature SASigLogD = {
112 SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
113 const SymbolicAddressSignature SASigPowD = {
114 SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
115 const SymbolicAddressSignature SASigATan2D = {
116 SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
117 const SymbolicAddressSignature SASigMemoryGrowM32 = {
118 SymbolicAddress::MemoryGrowM32, _I32, _Infallible, 2, {_PTR, _I32, _END}};
119 const SymbolicAddressSignature SASigMemoryGrowM64 = {
120 SymbolicAddress::MemoryGrowM64, _I64, _Infallible, 2, {_PTR, _I64, _END}};
121 const SymbolicAddressSignature SASigMemorySizeM32 = {
122 SymbolicAddress::MemorySizeM32, _I32, _Infallible, 1, {_PTR, _END}};
123 const SymbolicAddressSignature SASigMemorySizeM64 = {
124 SymbolicAddress::MemorySizeM64, _I64, _Infallible, 1, {_PTR, _END}};
125 const SymbolicAddressSignature SASigWaitI32M32 = {
126 SymbolicAddress::WaitI32M32,
127 _I32,
128 _FailOnNegI32,
129 4,
130 {_PTR, _I32, _I32, _I64, _END}};
131 const SymbolicAddressSignature SASigWaitI32M64 = {
132 SymbolicAddress::WaitI32M64,
133 _I32,
134 _FailOnNegI32,
135 4,
136 {_PTR, _I64, _I32, _I64, _END}};
137 const SymbolicAddressSignature SASigWaitI64M32 = {
138 SymbolicAddress::WaitI64M32,
139 _I32,
140 _FailOnNegI32,
141 4,
142 {_PTR, _I32, _I64, _I64, _END}};
143 const SymbolicAddressSignature SASigWaitI64M64 = {
144 SymbolicAddress::WaitI64M64,
145 _I32,
146 _FailOnNegI32,
147 4,
148 {_PTR, _I64, _I64, _I64, _END}};
149 const SymbolicAddressSignature SASigWakeM32 = {
150 SymbolicAddress::WakeM32, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
151 const SymbolicAddressSignature SASigWakeM64 = {
152 SymbolicAddress::WakeM64, _I32, _FailOnNegI32, 3, {_PTR, _I64, _I32, _END}};
153 const SymbolicAddressSignature SASigMemCopyM32 = {
154 SymbolicAddress::MemCopyM32,
155 _VOID,
156 _FailOnNegI32,
157 5,
158 {_PTR, _I32, _I32, _I32, _PTR, _END}};
159 const SymbolicAddressSignature SASigMemCopySharedM32 = {
160 SymbolicAddress::MemCopySharedM32,
161 _VOID,
162 _FailOnNegI32,
163 5,
164 {_PTR, _I32, _I32, _I32, _PTR, _END}};
165 const SymbolicAddressSignature SASigMemCopyM64 = {
166 SymbolicAddress::MemCopyM64,
167 _VOID,
168 _FailOnNegI32,
169 5,
170 {_PTR, _I64, _I64, _I64, _PTR, _END}};
171 const SymbolicAddressSignature SASigMemCopySharedM64 = {
172 SymbolicAddress::MemCopySharedM64,
173 _VOID,
174 _FailOnNegI32,
175 5,
176 {_PTR, _I64, _I64, _I64, _PTR, _END}};
177 const SymbolicAddressSignature SASigDataDrop = {
178 SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
179 const SymbolicAddressSignature SASigMemFillM32 = {
180 SymbolicAddress::MemFillM32,
181 _VOID,
182 _FailOnNegI32,
183 5,
184 {_PTR, _I32, _I32, _I32, _PTR, _END}};
185 const SymbolicAddressSignature SASigMemFillSharedM32 = {
186 SymbolicAddress::MemFillSharedM32,
187 _VOID,
188 _FailOnNegI32,
189 5,
190 {_PTR, _I32, _I32, _I32, _PTR, _END}};
191 const SymbolicAddressSignature SASigMemFillM64 = {
192 SymbolicAddress::MemFillM64,
193 _VOID,
194 _FailOnNegI32,
195 5,
196 {_PTR, _I64, _I32, _I64, _PTR, _END}};
197 const SymbolicAddressSignature SASigMemFillSharedM64 = {
198 SymbolicAddress::MemFillSharedM64,
199 _VOID,
200 _FailOnNegI32,
201 5,
202 {_PTR, _I64, _I32, _I64, _PTR, _END}};
203 const SymbolicAddressSignature SASigMemInitM32 = {
204 SymbolicAddress::MemInitM32,
205 _VOID,
206 _FailOnNegI32,
207 5,
208 {_PTR, _I32, _I32, _I32, _I32, _END}};
209 const SymbolicAddressSignature SASigMemInitM64 = {
210 SymbolicAddress::MemInitM64,
211 _VOID,
212 _FailOnNegI32,
213 5,
214 {_PTR, _I64, _I32, _I32, _I32, _END}};
215 const SymbolicAddressSignature SASigTableCopy = {
216 SymbolicAddress::TableCopy,
217 _VOID,
218 _FailOnNegI32,
219 6,
220 {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
221 const SymbolicAddressSignature SASigElemDrop = {
222 SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
223 const SymbolicAddressSignature SASigTableFill = {
224 SymbolicAddress::TableFill,
225 _VOID,
226 _FailOnNegI32,
227 5,
228 {_PTR, _I32, _RoN, _I32, _I32, _END}};
229 const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
230 _RoN,
231 _FailOnInvalidRef,
232 3,
233 {_PTR, _I32, _I32, _END}};
234 const SymbolicAddressSignature SASigTableGrow = {
235 SymbolicAddress::TableGrow,
236 _I32,
237 _Infallible,
238 4,
239 {_PTR, _RoN, _I32, _I32, _END}};
240 const SymbolicAddressSignature SASigTableInit = {
241 SymbolicAddress::TableInit,
242 _VOID,
243 _FailOnNegI32,
244 6,
245 {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
246 const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
247 _VOID,
248 _FailOnNegI32,
249 4,
250 {_PTR, _I32, _RoN, _I32, _END}};
251 const SymbolicAddressSignature SASigTableSize = {
252 SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
253 const SymbolicAddressSignature SASigRefFunc = {
254 SymbolicAddress::RefFunc, _RoN, _FailOnInvalidRef, 2, {_PTR, _I32, _END}};
255 const SymbolicAddressSignature SASigPreBarrierFiltering = {
256 SymbolicAddress::PreBarrierFiltering,
257 _VOID,
258 _Infallible,
259 2,
260 {_PTR, _PTR, _END}};
261 const SymbolicAddressSignature SASigPostBarrier = {
262 SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
263 const SymbolicAddressSignature SASigPostBarrierFiltering = {
264 SymbolicAddress::PostBarrierFiltering,
265 _VOID,
266 _Infallible,
267 2,
268 {_PTR, _PTR, _END}};
269 const SymbolicAddressSignature SASigStructNew = {
270 SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _RoN, _END}};
271 #ifdef ENABLE_WASM_EXCEPTIONS
272 const SymbolicAddressSignature SASigExceptionNew = {
273 SymbolicAddress::ExceptionNew, _RoN, _FailOnNullPtr, 2, {_PTR, _RoN, _END}};
274 const SymbolicAddressSignature SASigThrowException = {
275 SymbolicAddress::ThrowException,
276 _VOID,
277 _FailOnNegI32,
278 2,
279 {_PTR, _RoN, _END}};
280 #endif
281 const SymbolicAddressSignature SASigArrayNew = {SymbolicAddress::ArrayNew,
282 _RoN,
283 _FailOnNullPtr,
284 3,
285 {_PTR, _I32, _RoN, _END}};
286 const SymbolicAddressSignature SASigRefTest = {
287 SymbolicAddress::RefTest, _I32, _Infallible, 3, {_PTR, _RoN, _RoN, _END}};
288 const SymbolicAddressSignature SASigRttSub = {
289 SymbolicAddress::RttSub, _RoN, _FailOnNullPtr, 3, {_PTR, _RoN, _RoN, _END}};
290 #define DECL_SAS_FOR_INTRINSIC(op, export, sa_name, abitype, entry, idx) \
291 const SymbolicAddressSignature SASig##sa_name = { \
292 SymbolicAddress::sa_name, _VOID, _FailOnNegI32, \
293 DECLARE_INTRINSIC_PARAM_TYPES_##op};
294
295 FOR_EACH_INTRINSIC(DECL_SAS_FOR_INTRINSIC)
296 #undef DECL_SAS_FOR_INTRINSIC
297
298 } // namespace wasm
299 } // namespace js
300
301 #undef _F64
302 #undef _F32
303 #undef _I32
304 #undef _I64
305 #undef _PTR
306 #undef _RoN
307 #undef _VOID
308 #undef _END
309 #undef _Infallible
310 #undef _FailOnNegI32
311 #undef _FailOnNullPtr
312
313 #ifdef DEBUG
ToABIType(FailureMode mode)314 ABIArgType ToABIType(FailureMode mode) {
315 switch (mode) {
316 case FailureMode::FailOnNegI32:
317 return ArgType_Int32;
318 case FailureMode::FailOnNullPtr:
319 case FailureMode::FailOnInvalidRef:
320 return ArgType_General;
321 default:
322 MOZ_CRASH("unexpected failure mode");
323 }
324 }
325
ToABIType(MIRType type)326 ABIArgType ToABIType(MIRType type) {
327 switch (type) {
328 case MIRType::None:
329 case MIRType::Int32:
330 return ArgType_Int32;
331 case MIRType::Int64:
332 return ArgType_Int64;
333 case MIRType::Pointer:
334 case MIRType::RefOrNull:
335 return ArgType_General;
336 case MIRType::Float32:
337 return ArgType_Float32;
338 case MIRType::Double:
339 return ArgType_Float64;
340 default:
341 MOZ_CRASH("unexpected type");
342 }
343 }
344
ToABIType(const SymbolicAddressSignature & sig)345 ABIFunctionType ToABIType(const SymbolicAddressSignature& sig) {
346 MOZ_ASSERT_IF(sig.failureMode != FailureMode::Infallible,
347 ToABIType(sig.failureMode) == ToABIType(sig.retType));
348 int abiType = ToABIType(sig.retType) << RetType_Shift;
349 for (int i = 0; i < sig.numArgs; i++) {
350 abiType |= (ToABIType(sig.argTypes[i]) << (ArgType_Shift * (i + 1)));
351 }
352 return ABIFunctionType(abiType);
353 }
354 #endif
355
356 // ============================================================================
357 // WebAssembly builtin C++ functions called from wasm code to implement internal
358 // wasm operations: implementations.
359
360 #if defined(JS_CODEGEN_ARM)
361 extern "C" {
362
363 extern MOZ_EXPORT int64_t __aeabi_idivmod(int, int);
364
365 extern MOZ_EXPORT int64_t __aeabi_uidivmod(int, int);
366 }
367 #endif
368
369 // This utility function can only be called for builtins that are called
370 // directly from wasm code.
CallingActivation(JSContext * cx)371 static JitActivation* CallingActivation(JSContext* cx) {
372 Activation* act = cx->activation();
373 MOZ_ASSERT(act->asJit()->hasWasmExitFP());
374 return act->asJit();
375 }
376
WasmHandleDebugTrap()377 static bool WasmHandleDebugTrap() {
378 JSContext* cx = TlsContext.get(); // Cold code
379 JitActivation* activation = CallingActivation(cx);
380 Frame* fp = activation->wasmExitFP();
381 Instance* instance = GetNearestEffectiveTls(fp)->instance;
382 const Code& code = instance->code();
383 MOZ_ASSERT(code.metadata().debugEnabled);
384
385 // The debug trap stub is the innermost frame. It's return address is the
386 // actual trap site.
387 const CallSite* site = code.lookupCallSite(fp->returnAddress());
388 MOZ_ASSERT(site);
389
390 // Advance to the actual trapping frame.
391 fp = fp->wasmCaller();
392 DebugFrame* debugFrame = DebugFrame::from(fp);
393
394 if (site->kind() == CallSite::EnterFrame) {
395 if (!instance->debug().enterFrameTrapsEnabled()) {
396 return true;
397 }
398 debugFrame->setIsDebuggee();
399 debugFrame->observe(cx);
400 if (!DebugAPI::onEnterFrame(cx, debugFrame)) {
401 if (cx->isPropagatingForcedReturn()) {
402 cx->clearPropagatingForcedReturn();
403 // Ignoring forced return because changing code execution order is
404 // not yet implemented in the wasm baseline.
405 // TODO properly handle forced return and resume wasm execution.
406 JS_ReportErrorASCII(cx,
407 "Unexpected resumption value from onEnterFrame");
408 }
409 return false;
410 }
411 return true;
412 }
413 if (site->kind() == CallSite::LeaveFrame) {
414 if (!debugFrame->updateReturnJSValue(cx)) {
415 return false;
416 }
417 bool ok = DebugAPI::onLeaveFrame(cx, debugFrame, nullptr, true);
418 debugFrame->leave(cx);
419 return ok;
420 }
421
422 DebugState& debug = instance->debug();
423 MOZ_ASSERT(debug.hasBreakpointTrapAtOffset(site->lineOrBytecode()));
424 if (debug.stepModeEnabled(debugFrame->funcIndex())) {
425 if (!DebugAPI::onSingleStep(cx)) {
426 if (cx->isPropagatingForcedReturn()) {
427 cx->clearPropagatingForcedReturn();
428 // TODO properly handle forced return.
429 JS_ReportErrorASCII(cx,
430 "Unexpected resumption value from onSingleStep");
431 }
432 return false;
433 }
434 }
435 if (debug.hasBreakpointSite(site->lineOrBytecode())) {
436 if (!DebugAPI::onTrap(cx)) {
437 if (cx->isPropagatingForcedReturn()) {
438 cx->clearPropagatingForcedReturn();
439 // TODO properly handle forced return.
440 JS_ReportErrorASCII(
441 cx, "Unexpected resumption value from breakpoint handler");
442 }
443 return false;
444 }
445 }
446 return true;
447 }
448
449 // Check if the pending exception, if any, is catchable by wasm.
450 #ifdef ENABLE_WASM_EXCEPTIONS
HasCatchableException(JitActivation * activation,JSContext * cx,MutableHandleValue exn)451 static bool HasCatchableException(JitActivation* activation, JSContext* cx,
452 MutableHandleValue exn) {
453 if (!cx->isExceptionPending()) {
454 return false;
455 }
456
457 // Traps are generally not catchable as wasm exceptions. The only case in
458 // which they are catchable is for Trap::ThrowReported, which the wasm
459 // compiler uses to throw exceptions and is the source of exceptions from C++.
460 if (activation->isWasmTrapping() &&
461 activation->wasmTrapData().trap != Trap::ThrowReported) {
462 return false;
463 }
464
465 if (cx->isThrowingOverRecursed() || cx->isThrowingOutOfMemory()) {
466 return false;
467 }
468
469 // Write the exception out here to exn to avoid having to get the pending
470 // exception and checking for OOM multiple times.
471 if (cx->getPendingException(exn)) {
472 // Check if a JS exception originated from a wasm trap.
473 if (exn.isObject() && exn.toObject().is<ErrorObject>()) {
474 ErrorObject& err = exn.toObject().as<ErrorObject>();
475 if (err.fromWasmTrap()) {
476 return false;
477 }
478 }
479 return true;
480 }
481
482 MOZ_ASSERT(cx->isThrowingOutOfMemory());
483 return false;
484 }
485 #endif
486
487 // Unwind the entire activation in response to a thrown exception. This function
488 // is responsible for notifying the debugger of each unwound frame. The return
489 // value is the new stack address which the calling stub will set to the sp
490 // register before executing a return instruction.
491 //
492 // This function will also look for try-catch handlers and, if not trapping or
493 // throwing an uncatchable exception, will write the handler info in the return
494 // argument and return true.
495 //
496 // Returns false if a handler isn't found or shouldn't be used (e.g., traps).
497
HandleThrow(JSContext * cx,WasmFrameIter & iter,jit::ResumeFromException * rfe)498 bool wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter,
499 jit::ResumeFromException* rfe) {
500 // WasmFrameIter iterates down wasm frames in the activation starting at
501 // JitActivation::wasmExitFP(). Calling WasmFrameIter::startUnwinding pops
502 // JitActivation::wasmExitFP() once each time WasmFrameIter is incremented,
503 // ultimately leaving exit FP null when the WasmFrameIter is done(). This
504 // is necessary to prevent a DebugFrame from being observed again after we
505 // just called onLeaveFrame (which would lead to the frame being re-added
506 // to the map of live frames, right as it becomes trash).
507
508 MOZ_ASSERT(CallingActivation(cx) == iter.activation());
509 MOZ_ASSERT(!iter.done());
510 iter.setUnwind(WasmFrameIter::Unwind::True);
511
512 // Live wasm code on the stack is kept alive (in TraceJitActivation) by
513 // marking the instance of every wasm::Frame found by WasmFrameIter.
514 // However, as explained above, we're popping frames while iterating which
515 // means that a GC during this loop could collect the code of frames whose
516 // code is still on the stack. This is actually mostly fine: as soon as we
517 // return to the throw stub, the entire stack will be popped as a whole,
518 // returning to the C++ caller. However, we must keep the throw stub alive
519 // itself which is owned by the innermost instance.
520 RootedWasmInstanceObject keepAlive(cx, iter.instance()->object());
521
522 #ifdef ENABLE_WASM_EXCEPTIONS
523 JitActivation* activation = CallingActivation(cx);
524 RootedValue exn(cx);
525 bool hasCatchableException = HasCatchableException(activation, cx, &exn);
526 #endif
527
528 for (; !iter.done(); ++iter) {
529 // Wasm code can enter same-compartment realms, so reset cx->realm to
530 // this frame's realm.
531 cx->setRealmForJitExceptionHandler(iter.instance()->realm());
532
533 #ifdef ENABLE_WASM_EXCEPTIONS
534 // Only look for an exception handler if there's a catchable exception.
535 if (hasCatchableException) {
536 const wasm::Code& code = iter.instance()->code();
537 const uint8_t* pc = iter.resumePCinCurrentFrame();
538 Tier tier;
539 const wasm::WasmTryNote* tryNote =
540 code.lookupWasmTryNote((void*)pc, &tier);
541
542 if (tryNote) {
543 cx->clearPendingException();
544 RootedAnyRef ref(cx, AnyRef::null());
545 if (!BoxAnyRef(cx, exn, &ref)) {
546 MOZ_ASSERT(cx->isThrowingOutOfMemory());
547 hasCatchableException = false;
548 continue;
549 }
550
551 MOZ_ASSERT(iter.tls() == iter.instance()->tlsData());
552 iter.instance()->setPendingException(ref);
553
554 rfe->kind = ResumeFromException::RESUME_WASM_CATCH;
555 rfe->framePointer = (uint8_t*)iter.frame();
556 rfe->tlsData = iter.instance()->tlsData();
557
558 rfe->stackPointer =
559 (uint8_t*)(rfe->framePointer - tryNote->framePushed);
560 rfe->target = iter.instance()->codeBase(tier) + tryNote->entryPoint;
561
562 // Make sure to clear trapping state if we got here due to a trap.
563 if (activation->isWasmTrapping()) {
564 activation->finishWasmTrap();
565 }
566
567 return true;
568 }
569 }
570 #endif
571
572 if (!iter.debugEnabled()) {
573 continue;
574 }
575
576 DebugFrame* frame = iter.debugFrame();
577 frame->clearReturnJSValue();
578
579 // Assume ResumeMode::Terminate if no exception is pending --
580 // no onExceptionUnwind handlers must be fired.
581 if (cx->isExceptionPending()) {
582 if (!DebugAPI::onExceptionUnwind(cx, frame)) {
583 if (cx->isPropagatingForcedReturn()) {
584 cx->clearPropagatingForcedReturn();
585 // Unexpected trap return -- raising error since throw recovery
586 // is not yet implemented in the wasm baseline.
587 // TODO properly handle forced return and resume wasm execution.
588 JS_ReportErrorASCII(
589 cx, "Unexpected resumption value from onExceptionUnwind");
590 }
591 }
592 }
593
594 bool ok = DebugAPI::onLeaveFrame(cx, frame, nullptr, false);
595 if (ok) {
596 // Unexpected success from the handler onLeaveFrame -- raising error
597 // since throw recovery is not yet implemented in the wasm baseline.
598 // TODO properly handle success and resume wasm execution.
599 JS_ReportErrorASCII(cx, "Unexpected success from onLeaveFrame");
600 }
601 frame->leave(cx);
602 }
603
604 MOZ_ASSERT(!cx->activation()->asJit()->isWasmTrapping(),
605 "unwinding clears the trapping state");
606
607 // In case of no handler, exit wasm via ret().
608 // FailFP signals to wasm stub to do a failure return.
609 rfe->kind = ResumeFromException::RESUME_WASM;
610 rfe->framePointer = (uint8_t*)wasm::FailFP;
611 rfe->stackPointer = (uint8_t*)iter.unwoundAddressOfReturnAddress();
612 rfe->target = nullptr;
613 return false;
614 }
615
WasmHandleThrow(jit::ResumeFromException * rfe)616 static void* WasmHandleThrow(jit::ResumeFromException* rfe) {
617 JSContext* cx = TlsContext.get(); // Cold code
618 JitActivation* activation = CallingActivation(cx);
619 WasmFrameIter iter(activation);
620 // We can ignore the return result here because the throw stub code
621 // can just check the resume kind to see if a handler was found or not.
622 HandleThrow(cx, iter, rfe);
623 return rfe;
624 }
625
626 // Has the same return-value convention as HandleTrap().
CheckInterrupt(JSContext * cx,JitActivation * activation)627 static void* CheckInterrupt(JSContext* cx, JitActivation* activation) {
628 ResetInterruptState(cx);
629
630 if (!CheckForInterrupt(cx)) {
631 return nullptr;
632 }
633
634 void* resumePC = activation->wasmTrapData().resumePC;
635 activation->finishWasmTrap();
636 return resumePC;
637 }
638
639 // The calling convention between this function and its caller in the stub
640 // generated by GenerateTrapExit() is:
641 // - return nullptr if the stub should jump to the throw stub to unwind
642 // the activation;
643 // - return the (non-null) resumePC that should be jumped if execution should
644 // resume after the trap.
WasmHandleTrap()645 static void* WasmHandleTrap() {
646 JSContext* cx = TlsContext.get(); // Cold code
647 JitActivation* activation = CallingActivation(cx);
648
649 switch (activation->wasmTrapData().trap) {
650 case Trap::Unreachable: {
651 ReportTrapError(cx, JSMSG_WASM_UNREACHABLE);
652 return nullptr;
653 }
654 case Trap::IntegerOverflow: {
655 ReportTrapError(cx, JSMSG_WASM_INTEGER_OVERFLOW);
656 return nullptr;
657 }
658 case Trap::InvalidConversionToInteger: {
659 ReportTrapError(cx, JSMSG_WASM_INVALID_CONVERSION);
660 return nullptr;
661 }
662 case Trap::IntegerDivideByZero: {
663 ReportTrapError(cx, JSMSG_WASM_INT_DIVIDE_BY_ZERO);
664 return nullptr;
665 }
666 case Trap::IndirectCallToNull: {
667 ReportTrapError(cx, JSMSG_WASM_IND_CALL_TO_NULL);
668 return nullptr;
669 }
670 case Trap::IndirectCallBadSig: {
671 ReportTrapError(cx, JSMSG_WASM_IND_CALL_BAD_SIG);
672 return nullptr;
673 }
674 case Trap::NullPointerDereference: {
675 ReportTrapError(cx, JSMSG_WASM_DEREF_NULL);
676 return nullptr;
677 }
678 case Trap::BadCast: {
679 ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
680 return nullptr;
681 }
682 case Trap::OutOfBounds: {
683 ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
684 return nullptr;
685 }
686 case Trap::UnalignedAccess: {
687 ReportTrapError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
688 return nullptr;
689 }
690 case Trap::CheckInterrupt:
691 return CheckInterrupt(cx, activation);
692 case Trap::StackOverflow: {
693 // TlsData::setInterrupt() causes a fake stack overflow. Since
694 // TlsData::setInterrupt() is called racily, it's possible for a real
695 // stack overflow to trap, followed by a racy call to setInterrupt().
696 // Thus, we must check for a real stack overflow first before we
697 // CheckInterrupt() and possibly resume execution.
698 AutoCheckRecursionLimit recursion(cx);
699 if (!recursion.check(cx)) {
700 return nullptr;
701 }
702 if (activation->wasmExitTls()->isInterrupted()) {
703 return CheckInterrupt(cx, activation);
704 }
705 ReportTrapError(cx, JSMSG_OVER_RECURSED);
706 return nullptr;
707 }
708 case Trap::ThrowReported:
709 // Error was already reported under another name.
710 return nullptr;
711 case Trap::Limit:
712 break;
713 }
714
715 MOZ_CRASH("unexpected trap");
716 }
717
WasmReportV128JSCall()718 static void WasmReportV128JSCall() {
719 JSContext* cx = TlsContext.get(); // Cold code
720 JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
721 JSMSG_WASM_BAD_VAL_TYPE);
722 }
723
CoerceInPlace_ToInt32(Value * rawVal)724 static int32_t CoerceInPlace_ToInt32(Value* rawVal) {
725 JSContext* cx = TlsContext.get(); // Cold code
726
727 int32_t i32;
728 RootedValue val(cx, *rawVal);
729 if (!ToInt32(cx, val, &i32)) {
730 *rawVal = PoisonedObjectValue(0x42);
731 return false;
732 }
733
734 *rawVal = Int32Value(i32);
735 return true;
736 }
737
CoerceInPlace_ToBigInt(Value * rawVal)738 static int32_t CoerceInPlace_ToBigInt(Value* rawVal) {
739 JSContext* cx = TlsContext.get(); // Cold code
740
741 RootedValue val(cx, *rawVal);
742 BigInt* bi = ToBigInt(cx, val);
743 if (!bi) {
744 *rawVal = PoisonedObjectValue(0x43);
745 return false;
746 }
747
748 *rawVal = BigIntValue(bi);
749 return true;
750 }
751
CoerceInPlace_ToNumber(Value * rawVal)752 static int32_t CoerceInPlace_ToNumber(Value* rawVal) {
753 JSContext* cx = TlsContext.get(); // Cold code
754
755 double dbl;
756 RootedValue val(cx, *rawVal);
757 if (!ToNumber(cx, val, &dbl)) {
758 *rawVal = PoisonedObjectValue(0x42);
759 return false;
760 }
761
762 *rawVal = DoubleValue(dbl);
763 return true;
764 }
765
BoxValue_Anyref(Value * rawVal)766 static void* BoxValue_Anyref(Value* rawVal) {
767 JSContext* cx = TlsContext.get(); // Cold code
768 RootedValue val(cx, *rawVal);
769 RootedAnyRef result(cx, AnyRef::null());
770 if (!BoxAnyRef(cx, val, &result)) {
771 return nullptr;
772 }
773 return result.get().forCompiledCode();
774 }
775
CoerceInPlace_JitEntry(int funcExportIndex,TlsData * tlsData,Value * argv)776 static int32_t CoerceInPlace_JitEntry(int funcExportIndex, TlsData* tlsData,
777 Value* argv) {
778 JSContext* cx = TlsContext.get(); // Cold code
779
780 const Code& code = tlsData->instance->code();
781 const FuncExport& fe =
782 code.metadata(code.stableTier()).funcExports[funcExportIndex];
783
784 for (size_t i = 0; i < fe.funcType().args().length(); i++) {
785 HandleValue arg = HandleValue::fromMarkedLocation(&argv[i]);
786 switch (fe.funcType().args()[i].kind()) {
787 case ValType::I32: {
788 int32_t i32;
789 if (!ToInt32(cx, arg, &i32)) {
790 return false;
791 }
792 argv[i] = Int32Value(i32);
793 break;
794 }
795 case ValType::I64: {
796 // In this case we store a BigInt value as there is no value type
797 // corresponding directly to an I64. The conversion to I64 happens
798 // in the JIT entry stub.
799 BigInt* bigint = ToBigInt(cx, arg);
800 if (!bigint) {
801 return false;
802 }
803 argv[i] = BigIntValue(bigint);
804 break;
805 }
806 case ValType::F32:
807 case ValType::F64: {
808 double dbl;
809 if (!ToNumber(cx, arg, &dbl)) {
810 return false;
811 }
812 // No need to convert double-to-float for f32, it's done inline
813 // in the wasm stub later.
814 argv[i] = DoubleValue(dbl);
815 break;
816 }
817 case ValType::Ref: {
818 switch (fe.funcType().args()[i].refTypeKind()) {
819 case RefType::Extern:
820 // Leave Object and Null alone, we will unbox inline. All we need
821 // to do is convert other values to an Object representation.
822 if (!arg.isObjectOrNull()) {
823 RootedAnyRef result(cx, AnyRef::null());
824 if (!BoxAnyRef(cx, arg, &result)) {
825 return false;
826 }
827 argv[i].setObject(*result.get().asJSObject());
828 }
829 break;
830 case RefType::Func:
831 case RefType::Eq:
832 case RefType::TypeIndex:
833 // Guarded against by temporarilyUnsupportedReftypeForEntry()
834 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
835 }
836 break;
837 }
838 case ValType::V128: {
839 // Guarded against by hasV128ArgOrRet()
840 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
841 }
842 default: {
843 MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
844 }
845 }
846 }
847
848 return true;
849 }
850
851 // Allocate a BigInt without GC, corresponds to the similar VMFunction.
AllocateBigIntTenuredNoGC()852 static BigInt* AllocateBigIntTenuredNoGC() {
853 JSContext* cx = TlsContext.get(); // Cold code (the caller is elaborate)
854
855 return js::AllocateBigInt<NoGC>(cx, gc::TenuredHeap);
856 }
857
DivI64(uint32_t x_hi,uint32_t x_lo,uint32_t y_hi,uint32_t y_lo)858 static int64_t DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
859 uint32_t y_lo) {
860 int64_t x = ((uint64_t)x_hi << 32) + x_lo;
861 int64_t y = ((uint64_t)y_hi << 32) + y_lo;
862 MOZ_ASSERT(x != INT64_MIN || y != -1);
863 MOZ_ASSERT(y != 0);
864 return x / y;
865 }
866
UDivI64(uint32_t x_hi,uint32_t x_lo,uint32_t y_hi,uint32_t y_lo)867 static int64_t UDivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
868 uint32_t y_lo) {
869 uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
870 uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
871 MOZ_ASSERT(y != 0);
872 return x / y;
873 }
874
ModI64(uint32_t x_hi,uint32_t x_lo,uint32_t y_hi,uint32_t y_lo)875 static int64_t ModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
876 uint32_t y_lo) {
877 int64_t x = ((uint64_t)x_hi << 32) + x_lo;
878 int64_t y = ((uint64_t)y_hi << 32) + y_lo;
879 MOZ_ASSERT(x != INT64_MIN || y != -1);
880 MOZ_ASSERT(y != 0);
881 return x % y;
882 }
883
UModI64(uint32_t x_hi,uint32_t x_lo,uint32_t y_hi,uint32_t y_lo)884 static int64_t UModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
885 uint32_t y_lo) {
886 uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
887 uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
888 MOZ_ASSERT(y != 0);
889 return x % y;
890 }
891
TruncateDoubleToInt64(double input)892 static int64_t TruncateDoubleToInt64(double input) {
893 // Note: INT64_MAX is not representable in double. It is actually
894 // INT64_MAX + 1. Therefore also sending the failure value.
895 if (input >= double(INT64_MAX) || input < double(INT64_MIN) || IsNaN(input)) {
896 return 0x8000000000000000;
897 }
898 return int64_t(input);
899 }
900
TruncateDoubleToUint64(double input)901 static uint64_t TruncateDoubleToUint64(double input) {
902 // Note: UINT64_MAX is not representable in double. It is actually
903 // UINT64_MAX + 1. Therefore also sending the failure value.
904 if (input >= double(UINT64_MAX) || input <= -1.0 || IsNaN(input)) {
905 return 0x8000000000000000;
906 }
907 return uint64_t(input);
908 }
909
SaturatingTruncateDoubleToInt64(double input)910 static int64_t SaturatingTruncateDoubleToInt64(double input) {
911 // Handle in-range values (except INT64_MIN).
912 if (fabs(input) < -double(INT64_MIN)) {
913 return int64_t(input);
914 }
915 // Handle NaN.
916 if (IsNaN(input)) {
917 return 0;
918 }
919 // Handle positive overflow.
920 if (input > 0) {
921 return INT64_MAX;
922 }
923 // Handle negative overflow.
924 return INT64_MIN;
925 }
926
SaturatingTruncateDoubleToUint64(double input)927 static uint64_t SaturatingTruncateDoubleToUint64(double input) {
928 // Handle positive overflow.
929 if (input >= -double(INT64_MIN) * 2.0) {
930 return UINT64_MAX;
931 }
932 // Handle in-range values.
933 if (input > -1.0) {
934 return uint64_t(input);
935 }
936 // Handle NaN and negative overflow.
937 return 0;
938 }
939
Int64ToDouble(int32_t x_hi,uint32_t x_lo)940 static double Int64ToDouble(int32_t x_hi, uint32_t x_lo) {
941 int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
942 return double(x);
943 }
944
Int64ToFloat32(int32_t x_hi,uint32_t x_lo)945 static float Int64ToFloat32(int32_t x_hi, uint32_t x_lo) {
946 int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
947 return float(x);
948 }
949
Uint64ToDouble(int32_t x_hi,uint32_t x_lo)950 static double Uint64ToDouble(int32_t x_hi, uint32_t x_lo) {
951 uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
952 return double(x);
953 }
954
Uint64ToFloat32(int32_t x_hi,uint32_t x_lo)955 static float Uint64ToFloat32(int32_t x_hi, uint32_t x_lo) {
956 uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
957 return float(x);
958 }
959
960 template <class F>
FuncCast(F * funcPtr,ABIFunctionType abiType)961 static inline void* FuncCast(F* funcPtr, ABIFunctionType abiType) {
962 void* pf = JS_FUNC_TO_DATA_PTR(void*, funcPtr);
963 #ifdef JS_SIMULATOR
964 pf = Simulator::RedirectNativeFunction(pf, abiType);
965 #endif
966 return pf;
967 }
968
969 #ifdef WASM_CODEGEN_DEBUG
PrintI32(int32_t val)970 void wasm::PrintI32(int32_t val) { fprintf(stderr, "i32(%d) ", val); }
971
PrintPtr(uint8_t * val)972 void wasm::PrintPtr(uint8_t* val) { fprintf(stderr, "ptr(%p) ", val); }
973
PrintF32(float val)974 void wasm::PrintF32(float val) { fprintf(stderr, "f32(%f) ", val); }
975
PrintF64(double val)976 void wasm::PrintF64(double val) { fprintf(stderr, "f64(%lf) ", val); }
977
PrintText(const char * out)978 void wasm::PrintText(const char* out) { fprintf(stderr, "%s", out); }
979 #endif
980
AddressOf(SymbolicAddress imm,ABIFunctionType * abiType)981 void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
982 switch (imm) {
983 case SymbolicAddress::HandleDebugTrap:
984 *abiType = Args_General0;
985 return FuncCast(WasmHandleDebugTrap, *abiType);
986 case SymbolicAddress::HandleThrow:
987 *abiType = Args_General1;
988 return FuncCast(WasmHandleThrow, *abiType);
989 case SymbolicAddress::HandleTrap:
990 *abiType = Args_General0;
991 return FuncCast(WasmHandleTrap, *abiType);
992 case SymbolicAddress::ReportV128JSCall:
993 *abiType = Args_General0;
994 return FuncCast(WasmReportV128JSCall, *abiType);
995 case SymbolicAddress::CallImport_General:
996 *abiType = Args_Int32_GeneralInt32Int32General;
997 return FuncCast(Instance::callImport_general, *abiType);
998 case SymbolicAddress::CoerceInPlace_ToInt32:
999 *abiType = Args_General1;
1000 return FuncCast(CoerceInPlace_ToInt32, *abiType);
1001 case SymbolicAddress::CoerceInPlace_ToBigInt:
1002 *abiType = Args_General1;
1003 return FuncCast(CoerceInPlace_ToBigInt, *abiType);
1004 case SymbolicAddress::CoerceInPlace_ToNumber:
1005 *abiType = Args_General1;
1006 return FuncCast(CoerceInPlace_ToNumber, *abiType);
1007 case SymbolicAddress::CoerceInPlace_JitEntry:
1008 *abiType = Args_General3;
1009 return FuncCast(CoerceInPlace_JitEntry, *abiType);
1010 case SymbolicAddress::ToInt32:
1011 *abiType = Args_Int_Double;
1012 return FuncCast<int32_t(double)>(JS::ToInt32, *abiType);
1013 case SymbolicAddress::BoxValue_Anyref:
1014 *abiType = Args_General1;
1015 return FuncCast(BoxValue_Anyref, *abiType);
1016 case SymbolicAddress::AllocateBigInt:
1017 *abiType = Args_General0;
1018 return FuncCast(AllocateBigIntTenuredNoGC, *abiType);
1019 case SymbolicAddress::DivI64:
1020 *abiType = Args_General4;
1021 return FuncCast(DivI64, *abiType);
1022 case SymbolicAddress::UDivI64:
1023 *abiType = Args_General4;
1024 return FuncCast(UDivI64, *abiType);
1025 case SymbolicAddress::ModI64:
1026 *abiType = Args_General4;
1027 return FuncCast(ModI64, *abiType);
1028 case SymbolicAddress::UModI64:
1029 *abiType = Args_General4;
1030 return FuncCast(UModI64, *abiType);
1031 case SymbolicAddress::TruncateDoubleToUint64:
1032 *abiType = Args_Int64_Double;
1033 return FuncCast(TruncateDoubleToUint64, *abiType);
1034 case SymbolicAddress::TruncateDoubleToInt64:
1035 *abiType = Args_Int64_Double;
1036 return FuncCast(TruncateDoubleToInt64, *abiType);
1037 case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1038 *abiType = Args_Int64_Double;
1039 return FuncCast(SaturatingTruncateDoubleToUint64, *abiType);
1040 case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1041 *abiType = Args_Int64_Double;
1042 return FuncCast(SaturatingTruncateDoubleToInt64, *abiType);
1043 case SymbolicAddress::Uint64ToDouble:
1044 *abiType = Args_Double_IntInt;
1045 return FuncCast(Uint64ToDouble, *abiType);
1046 case SymbolicAddress::Uint64ToFloat32:
1047 *abiType = Args_Float32_IntInt;
1048 return FuncCast(Uint64ToFloat32, *abiType);
1049 case SymbolicAddress::Int64ToDouble:
1050 *abiType = Args_Double_IntInt;
1051 return FuncCast(Int64ToDouble, *abiType);
1052 case SymbolicAddress::Int64ToFloat32:
1053 *abiType = Args_Float32_IntInt;
1054 return FuncCast(Int64ToFloat32, *abiType);
1055 #if defined(JS_CODEGEN_ARM)
1056 case SymbolicAddress::aeabi_idivmod:
1057 *abiType = Args_General2;
1058 return FuncCast(__aeabi_idivmod, *abiType);
1059 case SymbolicAddress::aeabi_uidivmod:
1060 *abiType = Args_General2;
1061 return FuncCast(__aeabi_uidivmod, *abiType);
1062 #endif
1063 case SymbolicAddress::ModD:
1064 *abiType = Args_Double_DoubleDouble;
1065 return FuncCast(NumberMod, *abiType);
1066 case SymbolicAddress::SinD:
1067 *abiType = Args_Double_Double;
1068 return FuncCast<double(double)>(sin, *abiType);
1069 case SymbolicAddress::CosD:
1070 *abiType = Args_Double_Double;
1071 return FuncCast<double(double)>(cos, *abiType);
1072 case SymbolicAddress::TanD:
1073 *abiType = Args_Double_Double;
1074 return FuncCast<double(double)>(tan, *abiType);
1075 case SymbolicAddress::ASinD:
1076 *abiType = Args_Double_Double;
1077 return FuncCast<double(double)>(fdlibm::asin, *abiType);
1078 case SymbolicAddress::ACosD:
1079 *abiType = Args_Double_Double;
1080 return FuncCast<double(double)>(fdlibm::acos, *abiType);
1081 case SymbolicAddress::ATanD:
1082 *abiType = Args_Double_Double;
1083 return FuncCast<double(double)>(fdlibm::atan, *abiType);
1084 case SymbolicAddress::CeilD:
1085 *abiType = Args_Double_Double;
1086 return FuncCast<double(double)>(fdlibm::ceil, *abiType);
1087 case SymbolicAddress::CeilF:
1088 *abiType = Args_Float32_Float32;
1089 return FuncCast<float(float)>(fdlibm::ceilf, *abiType);
1090 case SymbolicAddress::FloorD:
1091 *abiType = Args_Double_Double;
1092 return FuncCast<double(double)>(fdlibm::floor, *abiType);
1093 case SymbolicAddress::FloorF:
1094 *abiType = Args_Float32_Float32;
1095 return FuncCast<float(float)>(fdlibm::floorf, *abiType);
1096 case SymbolicAddress::TruncD:
1097 *abiType = Args_Double_Double;
1098 return FuncCast<double(double)>(fdlibm::trunc, *abiType);
1099 case SymbolicAddress::TruncF:
1100 *abiType = Args_Float32_Float32;
1101 return FuncCast<float(float)>(fdlibm::truncf, *abiType);
1102 case SymbolicAddress::NearbyIntD:
1103 *abiType = Args_Double_Double;
1104 return FuncCast<double(double)>(fdlibm::nearbyint, *abiType);
1105 case SymbolicAddress::NearbyIntF:
1106 *abiType = Args_Float32_Float32;
1107 return FuncCast<float(float)>(fdlibm::nearbyintf, *abiType);
1108 case SymbolicAddress::ExpD:
1109 *abiType = Args_Double_Double;
1110 return FuncCast<double(double)>(fdlibm::exp, *abiType);
1111 case SymbolicAddress::LogD:
1112 *abiType = Args_Double_Double;
1113 return FuncCast<double(double)>(fdlibm::log, *abiType);
1114 case SymbolicAddress::PowD:
1115 *abiType = Args_Double_DoubleDouble;
1116 return FuncCast(ecmaPow, *abiType);
1117 case SymbolicAddress::ATan2D:
1118 *abiType = Args_Double_DoubleDouble;
1119 return FuncCast(ecmaAtan2, *abiType);
1120
1121 case SymbolicAddress::MemoryGrowM32:
1122 *abiType = Args_Int32_GeneralInt32;
1123 MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM32));
1124 return FuncCast(Instance::memoryGrow_m32, *abiType);
1125 case SymbolicAddress::MemoryGrowM64:
1126 *abiType = Args_Int64_GeneralInt64;
1127 MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM64));
1128 return FuncCast(Instance::memoryGrow_m64, *abiType);
1129 case SymbolicAddress::MemorySizeM32:
1130 *abiType = Args_Int32_General;
1131 MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM32));
1132 return FuncCast(Instance::memorySize_m32, *abiType);
1133 case SymbolicAddress::MemorySizeM64:
1134 *abiType = Args_Int64_General;
1135 MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM64));
1136 return FuncCast(Instance::memorySize_m64, *abiType);
1137 case SymbolicAddress::WaitI32M32:
1138 *abiType = Args_Int32_GeneralInt32Int32Int64;
1139 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M32));
1140 return FuncCast(Instance::wait_i32_m32, *abiType);
1141 case SymbolicAddress::WaitI32M64:
1142 *abiType = Args_Int32_GeneralInt64Int32Int64;
1143 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M64));
1144 return FuncCast(Instance::wait_i32_m64, *abiType);
1145 case SymbolicAddress::WaitI64M32:
1146 *abiType = Args_Int32_GeneralInt32Int64Int64;
1147 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M32));
1148 return FuncCast(Instance::wait_i64_m32, *abiType);
1149 case SymbolicAddress::WaitI64M64:
1150 *abiType = Args_Int32_GeneralInt64Int64Int64;
1151 MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M64));
1152 return FuncCast(Instance::wait_i64_m64, *abiType);
1153 case SymbolicAddress::WakeM32:
1154 *abiType = Args_Int32_GeneralInt32Int32;
1155 MOZ_ASSERT(*abiType == ToABIType(SASigWakeM32));
1156 return FuncCast(Instance::wake_m32, *abiType);
1157 case SymbolicAddress::WakeM64:
1158 *abiType = Args_Int32_GeneralInt64Int32;
1159 MOZ_ASSERT(*abiType == ToABIType(SASigWakeM64));
1160 return FuncCast(Instance::wake_m64, *abiType);
1161 case SymbolicAddress::MemCopyM32:
1162 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1163 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM32));
1164 return FuncCast(Instance::memCopy_m32, *abiType);
1165 case SymbolicAddress::MemCopySharedM32:
1166 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1167 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM32));
1168 return FuncCast(Instance::memCopyShared_m32, *abiType);
1169 case SymbolicAddress::MemCopyM64:
1170 *abiType = Args_Int32_GeneralInt64Int64Int64General;
1171 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM64));
1172 return FuncCast(Instance::memCopy_m64, *abiType);
1173 case SymbolicAddress::MemCopySharedM64:
1174 *abiType = Args_Int32_GeneralInt64Int64Int64General;
1175 MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM64));
1176 return FuncCast(Instance::memCopyShared_m64, *abiType);
1177 case SymbolicAddress::DataDrop:
1178 *abiType = Args_Int32_GeneralInt32;
1179 MOZ_ASSERT(*abiType == ToABIType(SASigDataDrop));
1180 return FuncCast(Instance::dataDrop, *abiType);
1181 case SymbolicAddress::MemFillM32:
1182 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1183 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM32));
1184 return FuncCast(Instance::memFill_m32, *abiType);
1185 case SymbolicAddress::MemFillSharedM32:
1186 *abiType = Args_Int32_GeneralInt32Int32Int32General;
1187 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM32));
1188 return FuncCast(Instance::memFillShared_m32, *abiType);
1189 case SymbolicAddress::MemFillM64:
1190 *abiType = Args_Int32_GeneralInt64Int32Int64General;
1191 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM64));
1192 return FuncCast(Instance::memFill_m64, *abiType);
1193 case SymbolicAddress::MemFillSharedM64:
1194 *abiType = Args_Int32_GeneralInt64Int32Int64General;
1195 MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM64));
1196 return FuncCast(Instance::memFillShared_m64, *abiType);
1197 case SymbolicAddress::MemInitM32:
1198 *abiType = Args_Int32_GeneralInt32Int32Int32Int32;
1199 MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM32));
1200 return FuncCast(Instance::memInit_m32, *abiType);
1201 case SymbolicAddress::MemInitM64:
1202 *abiType = Args_Int32_GeneralInt64Int32Int32Int32;
1203 MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM64));
1204 return FuncCast(Instance::memInit_m64, *abiType);
1205 case SymbolicAddress::TableCopy:
1206 *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
1207 MOZ_ASSERT(*abiType == ToABIType(SASigTableCopy));
1208 return FuncCast(Instance::tableCopy, *abiType);
1209 case SymbolicAddress::ElemDrop:
1210 *abiType = Args_Int32_GeneralInt32;
1211 MOZ_ASSERT(*abiType == ToABIType(SASigElemDrop));
1212 return FuncCast(Instance::elemDrop, *abiType);
1213 case SymbolicAddress::TableFill:
1214 *abiType = Args_Int32_GeneralInt32GeneralInt32Int32;
1215 MOZ_ASSERT(*abiType == ToABIType(SASigTableFill));
1216 return FuncCast(Instance::tableFill, *abiType);
1217 case SymbolicAddress::TableInit:
1218 *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
1219 MOZ_ASSERT(*abiType == ToABIType(SASigTableInit));
1220 return FuncCast(Instance::tableInit, *abiType);
1221 case SymbolicAddress::TableGet:
1222 *abiType = Args_General_GeneralInt32Int32;
1223 MOZ_ASSERT(*abiType == ToABIType(SASigTableGet));
1224 return FuncCast(Instance::tableGet, *abiType);
1225 case SymbolicAddress::TableGrow:
1226 *abiType = Args_Int32_GeneralGeneralInt32Int32;
1227 MOZ_ASSERT(*abiType == ToABIType(SASigTableGrow));
1228 return FuncCast(Instance::tableGrow, *abiType);
1229 case SymbolicAddress::TableSet:
1230 *abiType = Args_Int32_GeneralInt32GeneralInt32;
1231 MOZ_ASSERT(*abiType == ToABIType(SASigTableSet));
1232 return FuncCast(Instance::tableSet, *abiType);
1233 case SymbolicAddress::TableSize:
1234 *abiType = Args_Int32_GeneralInt32;
1235 MOZ_ASSERT(*abiType == ToABIType(SASigTableSize));
1236 return FuncCast(Instance::tableSize, *abiType);
1237 case SymbolicAddress::RefFunc:
1238 *abiType = Args_General_GeneralInt32;
1239 MOZ_ASSERT(*abiType == ToABIType(SASigRefFunc));
1240 return FuncCast(Instance::refFunc, *abiType);
1241 case SymbolicAddress::PostBarrier:
1242 *abiType = Args_Int32_GeneralGeneral;
1243 MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrier));
1244 return FuncCast(Instance::postBarrier, *abiType);
1245 case SymbolicAddress::PreBarrierFiltering:
1246 *abiType = Args_Int32_GeneralGeneral;
1247 MOZ_ASSERT(*abiType == ToABIType(SASigPreBarrierFiltering));
1248 return FuncCast(Instance::preBarrierFiltering, *abiType);
1249 case SymbolicAddress::PostBarrierFiltering:
1250 *abiType = Args_Int32_GeneralGeneral;
1251 MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierFiltering));
1252 return FuncCast(Instance::postBarrierFiltering, *abiType);
1253 case SymbolicAddress::StructNew:
1254 *abiType = Args_General2;
1255 MOZ_ASSERT(*abiType == ToABIType(SASigStructNew));
1256 return FuncCast(Instance::structNew, *abiType);
1257 case SymbolicAddress::ArrayNew:
1258 *abiType = Args_General_GeneralInt32General;
1259 MOZ_ASSERT(*abiType == ToABIType(SASigArrayNew));
1260 return FuncCast(Instance::arrayNew, *abiType);
1261 case SymbolicAddress::RefTest:
1262 *abiType = Args_Int32_GeneralGeneralGeneral;
1263 MOZ_ASSERT(*abiType == ToABIType(SASigRefTest));
1264 return FuncCast(Instance::refTest, *abiType);
1265 case SymbolicAddress::RttSub:
1266 *abiType = Args_General3;
1267 MOZ_ASSERT(*abiType == ToABIType(SASigRttSub));
1268 return FuncCast(Instance::rttSub, *abiType);
1269 case SymbolicAddress::InlineTypedObjectClass:
1270 // The ABI type is not used here, but assigning one to avoid garbage.
1271 *abiType = Args_General1;
1272 return (void*)&js::InlineTypedObject::class_;
1273
1274 #if defined(ENABLE_WASM_EXCEPTIONS)
1275 case SymbolicAddress::ExceptionNew:
1276 *abiType = Args_General2;
1277 MOZ_ASSERT(*abiType == ToABIType(SASigExceptionNew));
1278 return FuncCast(Instance::exceptionNew, *abiType);
1279 case SymbolicAddress::ThrowException:
1280 *abiType = Args_Int32_GeneralGeneral;
1281 MOZ_ASSERT(*abiType == ToABIType(SASigThrowException));
1282 return FuncCast(Instance::throwException, *abiType);
1283 #endif
1284
1285 #ifdef WASM_CODEGEN_DEBUG
1286 case SymbolicAddress::PrintI32:
1287 *abiType = Args_General1;
1288 return FuncCast(PrintI32, *abiType);
1289 case SymbolicAddress::PrintPtr:
1290 *abiType = Args_General1;
1291 return FuncCast(PrintPtr, *abiType);
1292 case SymbolicAddress::PrintF32:
1293 *abiType = Args_Int_Float32;
1294 return FuncCast(PrintF32, *abiType);
1295 case SymbolicAddress::PrintF64:
1296 *abiType = Args_Int_Double;
1297 return FuncCast(PrintF64, *abiType);
1298 case SymbolicAddress::PrintText:
1299 *abiType = Args_General1;
1300 return FuncCast(PrintText, *abiType);
1301 #endif
1302 #define DECL_SAS_TYPE_AND_FN(op, export, sa_name, abitype, entry, idx) \
1303 case SymbolicAddress::sa_name: \
1304 *abiType = abitype; \
1305 return FuncCast(entry, *abiType);
1306 FOR_EACH_INTRINSIC(DECL_SAS_TYPE_AND_FN)
1307 #undef DECL_SAS_TYPE_AND_FN
1308 case SymbolicAddress::Limit:
1309 break;
1310 }
1311
1312 MOZ_CRASH("Bad SymbolicAddress");
1313 }
1314
IsRoundingFunction(SymbolicAddress callee,jit::RoundingMode * mode)1315 bool wasm::IsRoundingFunction(SymbolicAddress callee, jit::RoundingMode* mode) {
1316 switch (callee) {
1317 case SymbolicAddress::FloorD:
1318 case SymbolicAddress::FloorF:
1319 *mode = jit::RoundingMode::Down;
1320 return true;
1321 case SymbolicAddress::CeilD:
1322 case SymbolicAddress::CeilF:
1323 *mode = jit::RoundingMode::Up;
1324 return true;
1325 case SymbolicAddress::TruncD:
1326 case SymbolicAddress::TruncF:
1327 *mode = jit::RoundingMode::TowardsZero;
1328 return true;
1329 case SymbolicAddress::NearbyIntD:
1330 case SymbolicAddress::NearbyIntF:
1331 *mode = jit::RoundingMode::NearestTiesToEven;
1332 return true;
1333 default:
1334 return false;
1335 }
1336 }
1337
NeedsBuiltinThunk(SymbolicAddress sym)1338 bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
1339 switch (sym) {
1340 // No thunk, because these are data addresses
1341 case SymbolicAddress::InlineTypedObjectClass:
1342 return false;
1343
1344 // No thunk, because they do their work within the activation
1345 case SymbolicAddress::HandleDebugTrap: // GenerateDebugTrapStub
1346 case SymbolicAddress::HandleThrow: // GenerateThrowStub
1347 case SymbolicAddress::HandleTrap: // GenerateTrapExit
1348 return false;
1349
1350 // No thunk, because their caller manages the activation exit explicitly
1351 case SymbolicAddress::CallImport_General: // GenerateImportInterpExit
1352 case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
1353 case SymbolicAddress::CoerceInPlace_ToNumber: // GenerateImportJitExit
1354 case SymbolicAddress::CoerceInPlace_ToBigInt: // GenerateImportJitExit
1355 case SymbolicAddress::BoxValue_Anyref: // GenerateImportJitExit
1356 return false;
1357
1358 #ifdef WASM_CODEGEN_DEBUG
1359 // No thunk, because they call directly into C++ code that does not interact
1360 // with the rest of the VM at all.
1361 case SymbolicAddress::PrintI32: // Debug stub printers
1362 case SymbolicAddress::PrintPtr:
1363 case SymbolicAddress::PrintF32:
1364 case SymbolicAddress::PrintF64:
1365 case SymbolicAddress::PrintText:
1366 return false;
1367 #endif
1368
1369 // Everyone else gets a thunk to handle the exit from the activation
1370 case SymbolicAddress::ToInt32:
1371 case SymbolicAddress::DivI64:
1372 case SymbolicAddress::UDivI64:
1373 case SymbolicAddress::ModI64:
1374 case SymbolicAddress::UModI64:
1375 case SymbolicAddress::TruncateDoubleToUint64:
1376 case SymbolicAddress::TruncateDoubleToInt64:
1377 case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1378 case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1379 case SymbolicAddress::Uint64ToDouble:
1380 case SymbolicAddress::Uint64ToFloat32:
1381 case SymbolicAddress::Int64ToDouble:
1382 case SymbolicAddress::Int64ToFloat32:
1383 #if defined(JS_CODEGEN_ARM)
1384 case SymbolicAddress::aeabi_idivmod:
1385 case SymbolicAddress::aeabi_uidivmod:
1386 #endif
1387 case SymbolicAddress::AllocateBigInt:
1388 case SymbolicAddress::ModD:
1389 case SymbolicAddress::SinD:
1390 case SymbolicAddress::CosD:
1391 case SymbolicAddress::TanD:
1392 case SymbolicAddress::ASinD:
1393 case SymbolicAddress::ACosD:
1394 case SymbolicAddress::ATanD:
1395 case SymbolicAddress::CeilD:
1396 case SymbolicAddress::CeilF:
1397 case SymbolicAddress::FloorD:
1398 case SymbolicAddress::FloorF:
1399 case SymbolicAddress::TruncD:
1400 case SymbolicAddress::TruncF:
1401 case SymbolicAddress::NearbyIntD:
1402 case SymbolicAddress::NearbyIntF:
1403 case SymbolicAddress::ExpD:
1404 case SymbolicAddress::LogD:
1405 case SymbolicAddress::PowD:
1406 case SymbolicAddress::ATan2D:
1407 case SymbolicAddress::MemoryGrowM32:
1408 case SymbolicAddress::MemoryGrowM64:
1409 case SymbolicAddress::MemorySizeM32:
1410 case SymbolicAddress::MemorySizeM64:
1411 case SymbolicAddress::WaitI32M32:
1412 case SymbolicAddress::WaitI32M64:
1413 case SymbolicAddress::WaitI64M32:
1414 case SymbolicAddress::WaitI64M64:
1415 case SymbolicAddress::WakeM32:
1416 case SymbolicAddress::WakeM64:
1417 case SymbolicAddress::CoerceInPlace_JitEntry:
1418 case SymbolicAddress::ReportV128JSCall:
1419 case SymbolicAddress::MemCopyM32:
1420 case SymbolicAddress::MemCopySharedM32:
1421 case SymbolicAddress::MemCopyM64:
1422 case SymbolicAddress::MemCopySharedM64:
1423 case SymbolicAddress::DataDrop:
1424 case SymbolicAddress::MemFillM32:
1425 case SymbolicAddress::MemFillSharedM32:
1426 case SymbolicAddress::MemFillM64:
1427 case SymbolicAddress::MemFillSharedM64:
1428 case SymbolicAddress::MemInitM32:
1429 case SymbolicAddress::MemInitM64:
1430 case SymbolicAddress::TableCopy:
1431 case SymbolicAddress::ElemDrop:
1432 case SymbolicAddress::TableFill:
1433 case SymbolicAddress::TableGet:
1434 case SymbolicAddress::TableGrow:
1435 case SymbolicAddress::TableInit:
1436 case SymbolicAddress::TableSet:
1437 case SymbolicAddress::TableSize:
1438 case SymbolicAddress::RefFunc:
1439 case SymbolicAddress::PreBarrierFiltering:
1440 case SymbolicAddress::PostBarrier:
1441 case SymbolicAddress::PostBarrierFiltering:
1442 case SymbolicAddress::StructNew:
1443 #ifdef ENABLE_WASM_EXCEPTIONS
1444 case SymbolicAddress::ExceptionNew:
1445 case SymbolicAddress::ThrowException:
1446 #endif
1447 case SymbolicAddress::ArrayNew:
1448 case SymbolicAddress::RefTest:
1449 case SymbolicAddress::RttSub:
1450 #define OP(op, export, sa_name, abitype, entry, idx) \
1451 case SymbolicAddress::sa_name:
1452 FOR_EACH_INTRINSIC(OP)
1453 #undef OP
1454 return true;
1455
1456 case SymbolicAddress::Limit:
1457 break;
1458 }
1459
1460 MOZ_CRASH("unexpected symbolic address");
1461 }
1462
1463 // ============================================================================
1464 // JS builtins that can be imported by wasm modules and called efficiently
1465 // through thunks. These thunks conform to the internal wasm ABI and thus can be
1466 // patched in for import calls. Calling a JS builtin through a thunk is much
1467 // faster than calling out through the generic import call trampoline which will
1468 // end up in the slowest C++ Instance::callImport path.
1469 //
1470 // Each JS builtin can have several overloads. These must all be enumerated in
1471 // PopulateTypedNatives() so they can be included in the process-wide thunk set.
1472
1473 #define FOR_EACH_SIN_COS_TAN_NATIVE(_) \
1474 _(math_sin, MathSin) \
1475 _(math_tan, MathTan) \
1476 _(math_cos, MathCos)
1477
1478 #define FOR_EACH_UNARY_NATIVE(_) \
1479 _(math_exp, MathExp) \
1480 _(math_log, MathLog) \
1481 _(math_asin, MathASin) \
1482 _(math_atan, MathATan) \
1483 _(math_acos, MathACos) \
1484 _(math_log10, MathLog10) \
1485 _(math_log2, MathLog2) \
1486 _(math_log1p, MathLog1P) \
1487 _(math_expm1, MathExpM1) \
1488 _(math_sinh, MathSinH) \
1489 _(math_tanh, MathTanH) \
1490 _(math_cosh, MathCosH) \
1491 _(math_asinh, MathASinH) \
1492 _(math_atanh, MathATanH) \
1493 _(math_acosh, MathACosH) \
1494 _(math_sign, MathSign) \
1495 _(math_trunc, MathTrunc) \
1496 _(math_cbrt, MathCbrt)
1497
1498 #define FOR_EACH_BINARY_NATIVE(_) \
1499 _(ecmaAtan2, MathATan2) \
1500 _(ecmaHypot, MathHypot) \
1501 _(ecmaPow, MathPow)
1502
1503 #define DEFINE_SIN_COS_TAN_FLOAT_WRAPPER(func, _) \
1504 static float func##_impl_f32(float x) { \
1505 if (math_use_fdlibm_for_sin_cos_tan()) { \
1506 return float(func##_fdlibm_impl(double(x))); \
1507 } \
1508 return float(func##_native_impl(double(x))); \
1509 }
1510
1511 #define DEFINE_UNARY_FLOAT_WRAPPER(func, _) \
1512 static float func##_impl_f32(float x) { \
1513 return float(func##_impl(double(x))); \
1514 }
1515
1516 #define DEFINE_BINARY_FLOAT_WRAPPER(func, _) \
1517 static float func##_f32(float x, float y) { \
1518 return float(func(double(x), double(y))); \
1519 }
1520
1521 FOR_EACH_SIN_COS_TAN_NATIVE(DEFINE_SIN_COS_TAN_FLOAT_WRAPPER)
1522 FOR_EACH_UNARY_NATIVE(DEFINE_UNARY_FLOAT_WRAPPER)
1523 FOR_EACH_BINARY_NATIVE(DEFINE_BINARY_FLOAT_WRAPPER)
1524
1525 #undef DEFINE_UNARY_FLOAT_WRAPPER
1526 #undef DEFINE_BINARY_FLOAT_WRAPPER
1527
1528 struct TypedNative {
1529 InlinableNative native;
1530 ABIFunctionType abiType;
1531
TypedNativeTypedNative1532 TypedNative(InlinableNative native, ABIFunctionType abiType)
1533 : native(native), abiType(abiType) {}
1534
1535 using Lookup = TypedNative;
hashTypedNative1536 static HashNumber hash(const Lookup& l) {
1537 return HashGeneric(uint32_t(l.native), uint32_t(l.abiType));
1538 }
matchTypedNative1539 static bool match(const TypedNative& lhs, const Lookup& rhs) {
1540 return lhs.native == rhs.native && lhs.abiType == rhs.abiType;
1541 }
1542 };
1543
1544 using TypedNativeToFuncPtrMap =
1545 HashMap<TypedNative, void*, TypedNative, SystemAllocPolicy>;
1546
PopulateTypedNatives(TypedNativeToFuncPtrMap * typedNatives)1547 static bool PopulateTypedNatives(TypedNativeToFuncPtrMap* typedNatives) {
1548 #define ADD_OVERLOAD(funcName, native, abiType) \
1549 if (!typedNatives->putNew(TypedNative(InlinableNative::native, abiType), \
1550 FuncCast(funcName, abiType))) \
1551 return false;
1552
1553 #define ADD_SIN_COS_TAN_OVERLOADS(funcName, native) \
1554 if (math_use_fdlibm_for_sin_cos_tan()) { \
1555 ADD_OVERLOAD(funcName##_fdlibm_impl, native, Args_Double_Double) \
1556 } else { \
1557 ADD_OVERLOAD(funcName##_native_impl, native, Args_Double_Double) \
1558 } \
1559 ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32)
1560
1561 #define ADD_UNARY_OVERLOADS(funcName, native) \
1562 ADD_OVERLOAD(funcName##_impl, native, Args_Double_Double) \
1563 ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32)
1564
1565 #define ADD_BINARY_OVERLOADS(funcName, native) \
1566 ADD_OVERLOAD(funcName, native, Args_Double_DoubleDouble) \
1567 ADD_OVERLOAD(funcName##_f32, native, Args_Float32_Float32Float32)
1568
1569 FOR_EACH_SIN_COS_TAN_NATIVE(ADD_SIN_COS_TAN_OVERLOADS)
1570 FOR_EACH_UNARY_NATIVE(ADD_UNARY_OVERLOADS)
1571 FOR_EACH_BINARY_NATIVE(ADD_BINARY_OVERLOADS)
1572
1573 #undef ADD_UNARY_OVERLOADS
1574 #undef ADD_BINARY_OVERLOADS
1575
1576 return true;
1577 }
1578
1579 #undef FOR_EACH_UNARY_NATIVE
1580 #undef FOR_EACH_BINARY_NATIVE
1581
1582 // ============================================================================
1583 // Process-wide builtin thunk set
1584 //
1585 // Thunks are inserted between wasm calls and the C++ callee and achieve two
1586 // things:
1587 // - bridging the few differences between the internal wasm ABI and the
1588 // external native ABI (viz. float returns on x86 and soft-fp ARM)
1589 // - executing an exit prologue/epilogue which in turn allows any profiling
1590 // iterator to see the full stack up to the wasm operation that called out
1591 //
1592 // Thunks are created for two kinds of C++ callees, enumerated above:
1593 // - SymbolicAddress: for statically compiled calls in the wasm module
1594 // - Imported JS builtins: optimized calls to imports
1595 //
1596 // All thunks are created up front, lazily, when the first wasm module is
1597 // compiled in the process. Thunks are kept alive until the JS engine shuts down
1598 // in the process. No thunks are created at runtime after initialization. This
1599 // simple scheme allows several simplifications:
1600 // - no reference counting to keep thunks alive
1601 // - no problems toggling W^X permissions which, because of multiple executing
1602 // threads, would require each thunk allocation to be on its own page
1603 // The cost for creating all thunks at once is relatively low since all thunks
1604 // fit within the smallest executable quanta (64k).
1605
1606 using TypedNativeToCodeRangeMap =
1607 HashMap<TypedNative, uint32_t, TypedNative, SystemAllocPolicy>;
1608
1609 using SymbolicAddressToCodeRangeArray =
1610 EnumeratedArray<SymbolicAddress, SymbolicAddress::Limit, uint32_t>;
1611
1612 struct BuiltinThunks {
1613 uint8_t* codeBase;
1614 size_t codeSize;
1615 CodeRangeVector codeRanges;
1616 TypedNativeToCodeRangeMap typedNativeToCodeRange;
1617 SymbolicAddressToCodeRangeArray symbolicAddressToCodeRange;
1618 uint32_t provisionalLazyJitEntryOffset;
1619
BuiltinThunksBuiltinThunks1620 BuiltinThunks() : codeBase(nullptr), codeSize(0) {}
1621
~BuiltinThunksBuiltinThunks1622 ~BuiltinThunks() {
1623 if (codeBase) {
1624 DeallocateExecutableMemory(codeBase, codeSize);
1625 }
1626 }
1627 };
1628
1629 Mutex initBuiltinThunks(mutexid::WasmInitBuiltinThunks);
1630 Atomic<const BuiltinThunks*> builtinThunks;
1631
EnsureBuiltinThunksInitialized()1632 bool wasm::EnsureBuiltinThunksInitialized() {
1633 LockGuard<Mutex> guard(initBuiltinThunks);
1634 if (builtinThunks) {
1635 return true;
1636 }
1637
1638 auto thunks = MakeUnique<BuiltinThunks>();
1639 if (!thunks) {
1640 return false;
1641 }
1642
1643 LifoAlloc lifo(BUILTIN_THUNK_LIFO_SIZE);
1644 TempAllocator tempAlloc(&lifo);
1645 WasmMacroAssembler masm(tempAlloc);
1646 AutoCreatedBy acb(masm, "wasm::EnsureBuiltinThunksInitialized");
1647
1648 for (auto sym : MakeEnumeratedRange(SymbolicAddress::Limit)) {
1649 if (!NeedsBuiltinThunk(sym)) {
1650 thunks->symbolicAddressToCodeRange[sym] = UINT32_MAX;
1651 continue;
1652 }
1653
1654 uint32_t codeRangeIndex = thunks->codeRanges.length();
1655 thunks->symbolicAddressToCodeRange[sym] = codeRangeIndex;
1656
1657 ABIFunctionType abiType;
1658 void* funcPtr = AddressOf(sym, &abiType);
1659
1660 ExitReason exitReason(sym);
1661
1662 CallableOffsets offsets;
1663 if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1664 return false;
1665 }
1666 if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1667 return false;
1668 }
1669 }
1670
1671 TypedNativeToFuncPtrMap typedNatives;
1672 if (!PopulateTypedNatives(&typedNatives)) {
1673 return false;
1674 }
1675
1676 for (TypedNativeToFuncPtrMap::Range r = typedNatives.all(); !r.empty();
1677 r.popFront()) {
1678 TypedNative typedNative = r.front().key();
1679
1680 uint32_t codeRangeIndex = thunks->codeRanges.length();
1681 if (!thunks->typedNativeToCodeRange.putNew(typedNative, codeRangeIndex)) {
1682 return false;
1683 }
1684
1685 ABIFunctionType abiType = typedNative.abiType;
1686 void* funcPtr = r.front().value();
1687
1688 ExitReason exitReason = ExitReason::Fixed::BuiltinNative;
1689
1690 CallableOffsets offsets;
1691 if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1692 return false;
1693 }
1694 if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1695 return false;
1696 }
1697 }
1698
1699 // Provisional lazy JitEntry stub: This is a shared stub that can be installed
1700 // in the jit-entry jump table. It uses the JIT ABI and when invoked will
1701 // retrieve (via TlsContext()) and invoke the context-appropriate
1702 // invoke-from-interpreter jit stub, thus serving as the initial, unoptimized
1703 // jit-entry stub for any exported wasm function that has a jit-entry.
1704
1705 #ifdef DEBUG
1706 // We need to allow this machine code to bake in a C++ code pointer, so we
1707 // disable the wasm restrictions while generating this stub.
1708 JitContext jitContext(&tempAlloc);
1709 bool oldFlag = jitContext.setIsCompilingWasm(false);
1710 #endif
1711
1712 Offsets provisionalLazyJitEntryOffsets;
1713 if (!GenerateProvisionalLazyJitEntryStub(masm,
1714 &provisionalLazyJitEntryOffsets)) {
1715 return false;
1716 }
1717 thunks->provisionalLazyJitEntryOffset = provisionalLazyJitEntryOffsets.begin;
1718
1719 #ifdef DEBUG
1720 jitContext.setIsCompilingWasm(oldFlag);
1721 #endif
1722
1723 masm.finish();
1724 if (masm.oom()) {
1725 return false;
1726 }
1727
1728 size_t allocSize = AlignBytes(masm.bytesNeeded(), ExecutableCodePageSize);
1729
1730 thunks->codeSize = allocSize;
1731 thunks->codeBase = (uint8_t*)AllocateExecutableMemory(
1732 allocSize, ProtectionSetting::Writable, MemCheckKind::MakeUndefined);
1733 if (!thunks->codeBase) {
1734 return false;
1735 }
1736
1737 masm.executableCopy(thunks->codeBase);
1738 memset(thunks->codeBase + masm.bytesNeeded(), 0,
1739 allocSize - masm.bytesNeeded());
1740
1741 masm.processCodeLabels(thunks->codeBase);
1742 PatchDebugSymbolicAccesses(thunks->codeBase, masm);
1743
1744 MOZ_ASSERT(masm.callSites().empty());
1745 MOZ_ASSERT(masm.callSiteTargets().empty());
1746 MOZ_ASSERT(masm.trapSites().empty());
1747 #ifdef ENABLE_WASM_EXCEPTIONS
1748 MOZ_ASSERT(masm.tryNotes().empty());
1749 #endif
1750
1751 if (!ExecutableAllocator::makeExecutableAndFlushICache(
1752 FlushICacheSpec::LocalThreadOnly, thunks->codeBase,
1753 thunks->codeSize)) {
1754 return false;
1755 }
1756
1757 builtinThunks = thunks.release();
1758 return true;
1759 }
1760
ReleaseBuiltinThunks()1761 void wasm::ReleaseBuiltinThunks() {
1762 if (builtinThunks) {
1763 const BuiltinThunks* ptr = builtinThunks;
1764 js_delete(const_cast<BuiltinThunks*>(ptr));
1765 builtinThunks = nullptr;
1766 }
1767 }
1768
SymbolicAddressTarget(SymbolicAddress sym)1769 void* wasm::SymbolicAddressTarget(SymbolicAddress sym) {
1770 MOZ_ASSERT(builtinThunks);
1771
1772 ABIFunctionType abiType;
1773 void* funcPtr = AddressOf(sym, &abiType);
1774
1775 if (!NeedsBuiltinThunk(sym)) {
1776 return funcPtr;
1777 }
1778
1779 const BuiltinThunks& thunks = *builtinThunks;
1780 uint32_t codeRangeIndex = thunks.symbolicAddressToCodeRange[sym];
1781 return thunks.codeBase + thunks.codeRanges[codeRangeIndex].begin();
1782 }
1783
ProvisionalLazyJitEntryStub()1784 void* wasm::ProvisionalLazyJitEntryStub() {
1785 MOZ_ASSERT(builtinThunks);
1786
1787 const BuiltinThunks& thunks = *builtinThunks;
1788 return thunks.codeBase + thunks.provisionalLazyJitEntryOffset;
1789 }
1790
ToBuiltinABIFunctionType(const FuncType & funcType)1791 static Maybe<ABIFunctionType> ToBuiltinABIFunctionType(
1792 const FuncType& funcType) {
1793 const ValTypeVector& args = funcType.args();
1794 const ValTypeVector& results = funcType.results();
1795
1796 if (results.length() != 1) {
1797 return Nothing();
1798 }
1799
1800 uint32_t abiType;
1801 switch (results[0].kind()) {
1802 case ValType::F32:
1803 abiType = ArgType_Float32 << RetType_Shift;
1804 break;
1805 case ValType::F64:
1806 abiType = ArgType_Float64 << RetType_Shift;
1807 break;
1808 default:
1809 return Nothing();
1810 }
1811
1812 if ((args.length() + 1) > (sizeof(uint32_t) * 8 / ArgType_Shift)) {
1813 return Nothing();
1814 }
1815
1816 for (size_t i = 0; i < args.length(); i++) {
1817 switch (args[i].kind()) {
1818 case ValType::F32:
1819 abiType |= (ArgType_Float32 << (ArgType_Shift * (i + 1)));
1820 break;
1821 case ValType::F64:
1822 abiType |= (ArgType_Float64 << (ArgType_Shift * (i + 1)));
1823 break;
1824 default:
1825 return Nothing();
1826 }
1827 }
1828
1829 return Some(ABIFunctionType(abiType));
1830 }
1831
MaybeGetBuiltinThunk(JSFunction * f,const FuncType & funcType)1832 void* wasm::MaybeGetBuiltinThunk(JSFunction* f, const FuncType& funcType) {
1833 MOZ_ASSERT(builtinThunks);
1834
1835 if (!f->isNativeFun() || !f->hasJitInfo() ||
1836 f->jitInfo()->type() != JSJitInfo::InlinableNative) {
1837 return nullptr;
1838 }
1839
1840 Maybe<ABIFunctionType> abiType = ToBuiltinABIFunctionType(funcType);
1841 if (!abiType) {
1842 return nullptr;
1843 }
1844
1845 TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType);
1846
1847 const BuiltinThunks& thunks = *builtinThunks;
1848 auto p = thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
1849 if (!p) {
1850 return nullptr;
1851 }
1852
1853 return thunks.codeBase + thunks.codeRanges[p->value()].begin();
1854 }
1855
LookupBuiltinThunk(void * pc,const CodeRange ** codeRange,uint8_t ** codeBase)1856 bool wasm::LookupBuiltinThunk(void* pc, const CodeRange** codeRange,
1857 uint8_t** codeBase) {
1858 if (!builtinThunks) {
1859 return false;
1860 }
1861
1862 const BuiltinThunks& thunks = *builtinThunks;
1863 if (pc < thunks.codeBase || pc >= thunks.codeBase + thunks.codeSize) {
1864 return false;
1865 }
1866
1867 *codeBase = thunks.codeBase;
1868
1869 CodeRange::OffsetInCode target((uint8_t*)pc - thunks.codeBase);
1870 *codeRange = LookupInSorted(thunks.codeRanges, target);
1871
1872 return !!*codeRange;
1873 }
1874