1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  *
4  * Copyright 2015 Mozilla Foundation
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #include "asmjs/WasmStubs.h"
20 
21 #include "mozilla/ArrayUtils.h"
22 #include "mozilla/EnumeratedRange.h"
23 
24 #include "asmjs/AsmJSModule.h"
25 
26 #include "jit/MacroAssembler-inl.h"
27 
28 using namespace js;
29 using namespace js::jit;
30 using namespace js::wasm;
31 
32 using mozilla::ArrayLength;
33 using mozilla::MakeEnumeratedRange;
34 
35 typedef Vector<MIRType, 8, SystemAllocPolicy> MIRTypeVector;
36 typedef ABIArgIter<MIRTypeVector> ABIArgMIRTypeIter;
37 typedef ABIArgIter<MallocSig::ArgVector> ABIArgValTypeIter;
38 
39 static void
AssertStackAlignment(MacroAssembler & masm,uint32_t alignment,uint32_t addBeforeAssert=0)40 AssertStackAlignment(MacroAssembler& masm, uint32_t alignment, uint32_t addBeforeAssert = 0)
41 {
42     MOZ_ASSERT((sizeof(AsmJSFrame) + masm.framePushed() + addBeforeAssert) % alignment == 0);
43     masm.assertStackAlignment(alignment, addBeforeAssert);
44 }
45 
46 static unsigned
StackDecrementForCall(MacroAssembler & masm,uint32_t alignment,unsigned bytesToPush)47 StackDecrementForCall(MacroAssembler& masm, uint32_t alignment, unsigned bytesToPush)
48 {
49     return StackDecrementForCall(alignment, sizeof(AsmJSFrame) + masm.framePushed(), bytesToPush);
50 }
51 
52 template <class VectorT>
53 static unsigned
StackArgBytes(const VectorT & args)54 StackArgBytes(const VectorT& args)
55 {
56     ABIArgIter<VectorT> iter(args);
57     while (!iter.done())
58         iter++;
59     return iter.stackBytesConsumedSoFar();
60 }
61 
62 template <class VectorT>
63 static unsigned
StackDecrementForCall(MacroAssembler & masm,uint32_t alignment,const VectorT & args,unsigned extraBytes=0)64 StackDecrementForCall(MacroAssembler& masm, uint32_t alignment, const VectorT& args,
65                       unsigned extraBytes = 0)
66 {
67     return StackDecrementForCall(masm, alignment, StackArgBytes(args) + extraBytes);
68 }
69 
70 #if defined(JS_CODEGEN_ARM)
71 // The ARM system ABI also includes d15 & s31 in the non volatile float registers.
72 // Also exclude lr (a.k.a. r14) as we preserve it manually)
73 static const LiveRegisterSet NonVolatileRegs =
74     LiveRegisterSet(GeneralRegisterSet(Registers::NonVolatileMask&
75                                        ~(uint32_t(1) << Registers::lr)),
76                     FloatRegisterSet(FloatRegisters::NonVolatileMask
77                                      | (1ULL << FloatRegisters::d15)
78                                      | (1ULL << FloatRegisters::s31)));
79 #else
80 static const LiveRegisterSet NonVolatileRegs =
81     LiveRegisterSet(GeneralRegisterSet(Registers::NonVolatileMask),
82                     FloatRegisterSet(FloatRegisters::NonVolatileMask));
83 #endif
84 
85 #if defined(JS_CODEGEN_MIPS32)
86 // Mips is using one more double slot due to stack alignment for double values.
87 // Look at MacroAssembler::PushRegsInMask(RegisterSet set)
88 static const unsigned FramePushedAfterSave = NonVolatileRegs.gprs().size() * sizeof(intptr_t) +
89                                              NonVolatileRegs.fpus().getPushSizeInBytes() +
90                                              sizeof(double);
91 #elif defined(JS_CODEGEN_NONE)
92 static const unsigned FramePushedAfterSave = 0;
93 #else
94 static const unsigned FramePushedAfterSave = NonVolatileRegs.gprs().size() * sizeof(intptr_t)
95                                            + NonVolatileRegs.fpus().getPushSizeInBytes();
96 #endif
97 static const unsigned FramePushedForEntrySP = FramePushedAfterSave + sizeof(void*);
98 
99 // Generate a stub that enters wasm from a C++ caller via the native ABI.
100 // The signature of the entry point is AsmJSModule::CodePtr. The exported wasm
101 // function has an ABI derived from its specific signature, so this function
102 // must map from the ABI of CodePtr to the export's signature's ABI.
103 static bool
GenerateEntry(MacroAssembler & masm,AsmJSModule & module,unsigned exportIndex,const FuncOffsetVector & funcOffsets)104 GenerateEntry(MacroAssembler& masm, AsmJSModule& module, unsigned exportIndex,
105               const FuncOffsetVector& funcOffsets)
106 {
107     AsmJSModule::ExportedFunction& exp = module.exportedFunction(exportIndex);
108     if (exp.isChangeHeap())
109         return true;
110 
111     masm.haltingAlign(CodeAlignment);
112 
113     AsmJSOffsets offsets;
114     offsets.begin = masm.currentOffset();
115 
116     // Save the return address if it wasn't already saved by the call insn.
117 #if defined(JS_CODEGEN_ARM)
118     masm.push(lr);
119 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
120     masm.push(ra);
121 #elif defined(JS_CODEGEN_X86)
122     static const unsigned EntryFrameSize = sizeof(void*);
123 #endif
124 
125     // Save all caller non-volatile registers before we clobber them here and in
126     // the asm.js callee (which does not preserve non-volatile registers).
127     masm.setFramePushed(0);
128     masm.PushRegsInMask(NonVolatileRegs);
129     MOZ_ASSERT(masm.framePushed() == FramePushedAfterSave);
130 
131     // ARM and MIPS/MIPS64 have a globally-pinned GlobalReg (x64 uses RIP-relative
132     // addressing, x86 uses immediates in effective addresses). For the
133     // AsmJSGlobalRegBias addition, see Assembler-(mips,arm).h.
134 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
135     masm.movePtr(IntArgReg1, GlobalReg);
136     masm.addPtr(Imm32(AsmJSGlobalRegBias), GlobalReg);
137 #endif
138 
139     // ARM, MIPS/MIPS64 and x64 have a globally-pinned HeapReg (x86 uses immediates in
140     // effective addresses). Loading the heap register depends on the global
141     // register already having been loaded.
142     masm.loadAsmJSHeapRegisterFromGlobalData();
143 
144     // Put the 'argv' argument into a non-argument/return register so that we
145     // can use 'argv' while we fill in the arguments for the asm.js callee.
146     // Also, save 'argv' on the stack so that we can recover it after the call.
147     // Use a second non-argument/return register as temporary scratch.
148     Register argv = ABIArgGenerator::NonArgReturnReg0;
149     Register scratch = ABIArgGenerator::NonArgReturnReg1;
150 #if defined(JS_CODEGEN_X86)
151     masm.loadPtr(Address(masm.getStackPointer(), EntryFrameSize + masm.framePushed()), argv);
152 #else
153     masm.movePtr(IntArgReg0, argv);
154 #endif
155     masm.Push(argv);
156 
157     // Save the stack pointer to the saved non-volatile registers. We will use
158     // this on two paths: normal return and exceptional return. Since
159     // loadAsmJSActivation uses GlobalReg, we must do this after loading
160     // GlobalReg.
161     MOZ_ASSERT(masm.framePushed() == FramePushedForEntrySP);
162     masm.loadAsmJSActivation(scratch);
163     masm.storeStackPtr(Address(scratch, AsmJSActivation::offsetOfEntrySP()));
164 
165     // Dynamically align the stack since ABIStackAlignment is not necessarily
166     // AsmJSStackAlignment. We'll use entrySP to recover the original stack
167     // pointer on return.
168     masm.andToStackPtr(Imm32(~(AsmJSStackAlignment - 1)));
169 
170     // Bump the stack for the call.
171     masm.reserveStack(AlignBytes(StackArgBytes(exp.sig().args()), AsmJSStackAlignment));
172 
173     // Copy parameters out of argv and into the registers/stack-slots specified by
174     // the system ABI.
175     for (ABIArgValTypeIter iter(exp.sig().args()); !iter.done(); iter++) {
176         unsigned argOffset = iter.index() * sizeof(AsmJSModule::EntryArg);
177         Address src(argv, argOffset);
178         MIRType type = iter.mirType();
179         switch (iter->kind()) {
180           case ABIArg::GPR:
181             masm.load32(src, iter->gpr());
182             break;
183 #ifdef JS_CODEGEN_REGISTER_PAIR
184           case ABIArg::GPR_PAIR:
185             MOZ_CRASH("AsmJS uses hardfp for function calls.");
186             break;
187 #endif
188           case ABIArg::FPU: {
189             static_assert(sizeof(AsmJSModule::EntryArg) >= jit::Simd128DataSize,
190                           "EntryArg must be big enough to store SIMD values");
191             switch (type) {
192               case MIRType_Int32x4:
193                 masm.loadUnalignedInt32x4(src, iter->fpu());
194                 break;
195               case MIRType_Float32x4:
196                 masm.loadUnalignedFloat32x4(src, iter->fpu());
197                 break;
198               case MIRType_Double:
199                 masm.loadDouble(src, iter->fpu());
200                 break;
201               case MIRType_Float32:
202                 masm.loadFloat32(src, iter->fpu());
203                 break;
204               default:
205                 MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("unexpected FPU type");
206                 break;
207             }
208             break;
209           }
210           case ABIArg::Stack:
211             switch (type) {
212               case MIRType_Int32:
213                 masm.load32(src, scratch);
214                 masm.storePtr(scratch, Address(masm.getStackPointer(), iter->offsetFromArgBase()));
215                 break;
216               case MIRType_Double:
217                 masm.loadDouble(src, ScratchDoubleReg);
218                 masm.storeDouble(ScratchDoubleReg, Address(masm.getStackPointer(), iter->offsetFromArgBase()));
219                 break;
220               case MIRType_Float32:
221                 masm.loadFloat32(src, ScratchFloat32Reg);
222                 masm.storeFloat32(ScratchFloat32Reg, Address(masm.getStackPointer(), iter->offsetFromArgBase()));
223                 break;
224               case MIRType_Int32x4:
225                 masm.loadUnalignedInt32x4(src, ScratchSimd128Reg);
226                 masm.storeAlignedInt32x4(ScratchSimd128Reg,
227                                          Address(masm.getStackPointer(), iter->offsetFromArgBase()));
228                 break;
229               case MIRType_Float32x4:
230                 masm.loadUnalignedFloat32x4(src, ScratchSimd128Reg);
231                 masm.storeAlignedFloat32x4(ScratchSimd128Reg,
232                                            Address(masm.getStackPointer(), iter->offsetFromArgBase()));
233                 break;
234               default:
235                 MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("unexpected stack arg type");
236             }
237             break;
238         }
239     }
240 
241     // Call into the real function.
242     masm.assertStackAlignment(AsmJSStackAlignment);
243     Label target;
244     target.bind(funcOffsets[exp.funcIndex()]);
245     masm.call(CallSiteDesc(CallSiteDesc::Relative), &target);
246 
247     // Recover the stack pointer value before dynamic alignment.
248     masm.loadAsmJSActivation(scratch);
249     masm.loadStackPtr(Address(scratch, AsmJSActivation::offsetOfEntrySP()));
250     masm.setFramePushed(FramePushedForEntrySP);
251 
252     // Recover the 'argv' pointer which was saved before aligning the stack.
253     masm.Pop(argv);
254 
255     // Store the return value in argv[0]
256     switch (exp.sig().ret()) {
257       case ExprType::Void:
258         break;
259       case ExprType::I32:
260         masm.storeValue(JSVAL_TYPE_INT32, ReturnReg, Address(argv, 0));
261         break;
262       case ExprType::I64:
263         MOZ_CRASH("no int64 in asm.js");
264       case ExprType::F32:
265         masm.convertFloat32ToDouble(ReturnFloat32Reg, ReturnDoubleReg);
266         // Fall through as ReturnDoubleReg now contains a Double
267       case ExprType::F64:
268         masm.canonicalizeDouble(ReturnDoubleReg);
269         masm.storeDouble(ReturnDoubleReg, Address(argv, 0));
270         break;
271       case ExprType::I32x4:
272         // We don't have control on argv alignment, do an unaligned access.
273         masm.storeUnalignedInt32x4(ReturnSimd128Reg, Address(argv, 0));
274         break;
275       case ExprType::F32x4:
276         // We don't have control on argv alignment, do an unaligned access.
277         masm.storeUnalignedFloat32x4(ReturnSimd128Reg, Address(argv, 0));
278         break;
279     }
280 
281     // Restore clobbered non-volatile registers of the caller.
282     masm.PopRegsInMask(NonVolatileRegs);
283     MOZ_ASSERT(masm.framePushed() == 0);
284 
285     masm.move32(Imm32(true), ReturnReg);
286     masm.ret();
287 
288     if (masm.oom())
289         return false;
290 
291     exp.initCodeOffset(offsets.begin);
292     offsets.end = masm.currentOffset();
293     return module.addCodeRange(AsmJSModule::CodeRange::Entry, offsets);
294 }
295 
296 // Generate a thunk that updates fp before calling the given builtin so that
297 // both the builtin and the calling function show up in profiler stacks. (This
298 // thunk is dynamically patched in when profiling is enabled.) Since the thunk
299 // pushes an AsmJSFrame on the stack, that means we must rebuild the stack
300 // frame. Fortunately, these are low arity functions and everything is passed in
301 // regs on everything but x86 anyhow.
302 //
303 // NB: Since this thunk is being injected at system ABI callsites, it must
304 //     preserve the argument registers (going in) and the return register
305 //     (coming out) and preserve non-volatile registers.
306 static bool
GenerateBuiltinThunk(MacroAssembler & masm,AsmJSModule & module,Builtin builtin)307 GenerateBuiltinThunk(MacroAssembler& masm, AsmJSModule& module, Builtin builtin)
308 {
309     MIRTypeVector args;
310     switch (builtin) {
311       case Builtin::ToInt32:
312         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
313         break;
314 #if defined(JS_CODEGEN_ARM)
315       case Builtin::aeabi_idivmod:
316       case Builtin::aeabi_uidivmod:
317         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
318         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
319         break;
320       case Builtin::AtomicCmpXchg:
321         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
322         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
323         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
324         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
325         break;
326       case Builtin::AtomicXchg:
327       case Builtin::AtomicFetchAdd:
328       case Builtin::AtomicFetchSub:
329       case Builtin::AtomicFetchAnd:
330       case Builtin::AtomicFetchOr:
331       case Builtin::AtomicFetchXor:
332         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
333         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
334         MOZ_ALWAYS_TRUE(args.append(MIRType_Int32));
335         break;
336 #endif
337       case Builtin::SinD:
338       case Builtin::CosD:
339       case Builtin::TanD:
340       case Builtin::ASinD:
341       case Builtin::ACosD:
342       case Builtin::ATanD:
343       case Builtin::CeilD:
344       case Builtin::FloorD:
345       case Builtin::ExpD:
346       case Builtin::LogD:
347         MOZ_ALWAYS_TRUE(args.append(MIRType_Double));
348         break;
349       case Builtin::ModD:
350       case Builtin::PowD:
351       case Builtin::ATan2D:
352         MOZ_ALWAYS_TRUE(args.append(MIRType_Double));
353         MOZ_ALWAYS_TRUE(args.append(MIRType_Double));
354         break;
355       case Builtin::CeilF:
356       case Builtin::FloorF:
357         MOZ_ALWAYS_TRUE(args.append(MIRType_Float32));
358         break;
359       case Builtin::Limit:
360         MOZ_CRASH("Bad builtin");
361     }
362 
363     MOZ_ASSERT(args.length() <= 4);
364     static_assert(MIRTypeVector::InlineLength >= 4, "infallibility of append");
365 
366     MOZ_ASSERT(masm.framePushed() == 0);
367     uint32_t framePushed = StackDecrementForCall(masm, ABIStackAlignment, args);
368 
369     AsmJSProfilingOffsets offsets;
370     GenerateAsmJSExitPrologue(masm, framePushed, ExitReason(builtin), &offsets);
371 
372     for (ABIArgMIRTypeIter i(args); !i.done(); i++) {
373         if (i->kind() != ABIArg::Stack)
374             continue;
375 #if !defined(JS_CODEGEN_ARM)
376         unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed();
377         Address srcAddr(masm.getStackPointer(), offsetToCallerStackArgs + i->offsetFromArgBase());
378         Address dstAddr(masm.getStackPointer(), i->offsetFromArgBase());
379         if (i.mirType() == MIRType_Int32 || i.mirType() == MIRType_Float32) {
380             masm.load32(srcAddr, ABIArgGenerator::NonArg_VolatileReg);
381             masm.store32(ABIArgGenerator::NonArg_VolatileReg, dstAddr);
382         } else {
383             MOZ_ASSERT(i.mirType() == MIRType_Double);
384             masm.loadDouble(srcAddr, ScratchDoubleReg);
385             masm.storeDouble(ScratchDoubleReg, dstAddr);
386         }
387 #else
388         MOZ_CRASH("Architecture should have enough registers for all builtin calls");
389 #endif
390     }
391 
392     AssertStackAlignment(masm, ABIStackAlignment);
393     masm.call(BuiltinToImmediate(builtin));
394 
395     GenerateAsmJSExitEpilogue(masm, framePushed, ExitReason(builtin), &offsets);
396 
397     if (masm.oom())
398         return false;
399 
400     offsets.end = masm.currentOffset();
401     return module.addBuiltinThunkCodeRange(builtin, offsets);
402 }
403 
404 static void
FillArgumentArray(MacroAssembler & masm,const MallocSig::ArgVector & args,unsigned argOffset,unsigned offsetToCallerStackArgs,Register scratch)405 FillArgumentArray(MacroAssembler& masm, const MallocSig::ArgVector& args, unsigned argOffset,
406                   unsigned offsetToCallerStackArgs, Register scratch)
407 {
408     for (ABIArgValTypeIter i(args); !i.done(); i++) {
409         Address dstAddr(masm.getStackPointer(), argOffset + i.index() * sizeof(Value));
410         switch (i->kind()) {
411           case ABIArg::GPR:
412             masm.storeValue(JSVAL_TYPE_INT32, i->gpr(), dstAddr);
413             break;
414 #ifdef JS_CODEGEN_REGISTER_PAIR
415           case ABIArg::GPR_PAIR:
416             MOZ_CRASH("AsmJS uses hardfp for function calls.");
417             break;
418 #endif
419           case ABIArg::FPU:
420             masm.canonicalizeDouble(i->fpu());
421             masm.storeDouble(i->fpu(), dstAddr);
422             break;
423           case ABIArg::Stack:
424             if (i.mirType() == MIRType_Int32) {
425                 Address src(masm.getStackPointer(), offsetToCallerStackArgs + i->offsetFromArgBase());
426 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
427                 masm.load32(src, scratch);
428                 masm.storeValue(JSVAL_TYPE_INT32, scratch, dstAddr);
429 #else
430                 masm.memIntToValue(src, dstAddr);
431 #endif
432             } else {
433                 MOZ_ASSERT(i.mirType() == MIRType_Double);
434                 Address src(masm.getStackPointer(), offsetToCallerStackArgs + i->offsetFromArgBase());
435                 masm.loadDouble(src, ScratchDoubleReg);
436                 masm.canonicalizeDouble(ScratchDoubleReg);
437                 masm.storeDouble(ScratchDoubleReg, dstAddr);
438             }
439             break;
440         }
441     }
442 }
443 
444 // If an FFI detaches its heap (viz., via ArrayBuffer.transfer), it must
445 // call change-heap to another heap (viz., the new heap returned by transfer)
446 // before returning to asm.js code. If the application fails to do this (if the
447 // heap pointer is null), jump to a stub.
448 static void
CheckForHeapDetachment(MacroAssembler & masm,const AsmJSModule & module,Register scratch,Label * onDetached)449 CheckForHeapDetachment(MacroAssembler& masm, const AsmJSModule& module, Register scratch,
450                        Label* onDetached)
451 {
452     if (!module.hasArrayView())
453         return;
454 
455     MOZ_ASSERT(int(masm.framePushed()) >= int(ShadowStackSpace));
456     AssertStackAlignment(masm, ABIStackAlignment);
457 #if defined(JS_CODEGEN_X86)
458     CodeOffset offset = masm.movlWithPatch(PatchedAbsoluteAddress(), scratch);
459     masm.append(AsmJSGlobalAccess(offset, HeapGlobalDataOffset));
460     masm.branchTestPtr(Assembler::Zero, scratch, scratch, onDetached);
461 #else
462     masm.branchTestPtr(Assembler::Zero, HeapReg, HeapReg, onDetached);
463 #endif
464 }
465 
466 // Generate a stub that is called via the internal ABI derived from the
467 // signature of the exit and calls into an appropriate InvokeFromAsmJS_* C++
468 // function, having boxed all the ABI arguments into a homogeneous Value array.
469 static bool
GenerateInterpExit(MacroAssembler & masm,AsmJSModule & module,unsigned exitIndex,Label * throwLabel,Label * onDetached)470 GenerateInterpExit(MacroAssembler& masm, AsmJSModule& module, unsigned exitIndex,
471                    Label* throwLabel, Label* onDetached)
472 {
473     AsmJSModule::Exit& exit = module.exit(exitIndex);
474 
475     masm.setFramePushed(0);
476 
477     // Argument types for InvokeFromAsmJS_*:
478     static const MIRType typeArray[] = { MIRType_Pointer,   // exitDatum
479                                          MIRType_Int32,     // argc
480                                          MIRType_Pointer }; // argv
481     MIRTypeVector invokeArgTypes;
482     MOZ_ALWAYS_TRUE(invokeArgTypes.append(typeArray, ArrayLength(typeArray)));
483 
484     // At the point of the call, the stack layout shall be (sp grows to the left):
485     //   | stack args | padding | Value argv[] | padding | retaddr | caller stack args |
486     // The padding between stack args and argv ensures that argv is aligned. The
487     // padding between argv and retaddr ensures that sp is aligned.
488     unsigned argOffset = AlignBytes(StackArgBytes(invokeArgTypes), sizeof(double));
489     unsigned argBytes = Max<size_t>(1, exit.sig().args().length()) * sizeof(Value);
490     unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, argOffset + argBytes);
491 
492     AsmJSProfilingOffsets offsets;
493     GenerateAsmJSExitPrologue(masm, framePushed, ExitReason::Slow, &offsets);
494 
495     // Fill the argument array.
496     unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed();
497     Register scratch = ABIArgGenerator::NonArgReturnReg0;
498     FillArgumentArray(masm, exit.sig().args(), argOffset, offsetToCallerStackArgs, scratch);
499 
500     // Prepare the arguments for the call to InvokeFromAsmJS_*.
501     ABIArgMIRTypeIter i(invokeArgTypes);
502 
503     // argument 0: exitIndex
504     if (i->kind() == ABIArg::GPR)
505         masm.mov(ImmWord(exitIndex), i->gpr());
506     else
507         masm.store32(Imm32(exitIndex), Address(masm.getStackPointer(), i->offsetFromArgBase()));
508     i++;
509 
510     // argument 1: argc
511     unsigned argc = exit.sig().args().length();
512     if (i->kind() == ABIArg::GPR)
513         masm.mov(ImmWord(argc), i->gpr());
514     else
515         masm.store32(Imm32(argc), Address(masm.getStackPointer(), i->offsetFromArgBase()));
516     i++;
517 
518     // argument 2: argv
519     Address argv(masm.getStackPointer(), argOffset);
520     if (i->kind() == ABIArg::GPR) {
521         masm.computeEffectiveAddress(argv, i->gpr());
522     } else {
523         masm.computeEffectiveAddress(argv, scratch);
524         masm.storePtr(scratch, Address(masm.getStackPointer(), i->offsetFromArgBase()));
525     }
526     i++;
527     MOZ_ASSERT(i.done());
528 
529     // Make the call, test whether it succeeded, and extract the return value.
530     AssertStackAlignment(masm, ABIStackAlignment);
531     switch (exit.sig().ret()) {
532       case ExprType::Void:
533         masm.call(SymbolicAddress::InvokeFromAsmJS_Ignore);
534         masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
535         break;
536       case ExprType::I32:
537         masm.call(SymbolicAddress::InvokeFromAsmJS_ToInt32);
538         masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
539         masm.unboxInt32(argv, ReturnReg);
540         break;
541       case ExprType::I64:
542         MOZ_CRASH("no int64 in asm.js");
543       case ExprType::F32:
544         MOZ_CRASH("Float32 shouldn't be returned from a FFI");
545       case ExprType::F64:
546         masm.call(SymbolicAddress::InvokeFromAsmJS_ToNumber);
547         masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
548         masm.loadDouble(argv, ReturnDoubleReg);
549         break;
550       case ExprType::I32x4:
551       case ExprType::F32x4:
552         MOZ_CRASH("SIMD types shouldn't be returned from a FFI");
553     }
554 
555     // The heap pointer may have changed during the FFI, so reload it and test
556     // for detachment.
557     masm.loadAsmJSHeapRegisterFromGlobalData();
558     CheckForHeapDetachment(masm, module, ABIArgGenerator::NonReturn_VolatileReg0, onDetached);
559 
560     GenerateAsmJSExitEpilogue(masm, framePushed, ExitReason::Slow, &offsets);
561 
562     if (masm.oom())
563         return false;
564 
565     offsets.end = masm.currentOffset();
566     exit.initInterpOffset(offsets.begin);
567     return module.addCodeRange(AsmJSModule::CodeRange::SlowFFI, offsets);
568 }
569 
570 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
571 static const unsigned MaybeSavedGlobalReg = sizeof(void*);
572 #else
573 static const unsigned MaybeSavedGlobalReg = 0;
574 #endif
575 
576 // Generate a stub that is called via the internal ABI derived from the
577 // signature of the exit and calls into a compatible Ion-compiled JIT function,
578 // having boxed all the ABI arguments into the Ion stack frame layout.
579 static bool
GenerateIonExit(MacroAssembler & masm,AsmJSModule & module,unsigned exitIndex,Label * throwLabel,Label * onDetached)580 GenerateIonExit(MacroAssembler& masm, AsmJSModule& module, unsigned exitIndex,
581                 Label* throwLabel, Label* onDetached)
582 {
583     AsmJSModule::Exit& exit = module.exit(exitIndex);
584 
585     masm.setFramePushed(0);
586 
587     // Ion calls use the following stack layout (sp grows to the left):
588     //   | retaddr | descriptor | callee | argc | this | arg1..N |
589     // After the Ion frame, the global register (if present) is saved since Ion
590     // does not preserve non-volatile regs. Also, unlike most ABIs, Ion requires
591     // that sp be JitStackAlignment-aligned *after* pushing the return address.
592     static_assert(AsmJSStackAlignment >= JitStackAlignment, "subsumes");
593     unsigned sizeOfRetAddr = sizeof(void*);
594     unsigned ionFrameBytes = 3 * sizeof(void*) + (1 + exit.sig().args().length()) * sizeof(Value);
595     unsigned totalIonBytes = sizeOfRetAddr + ionFrameBytes + MaybeSavedGlobalReg;
596     unsigned ionFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalIonBytes) -
597                               sizeOfRetAddr;
598 
599     AsmJSProfilingOffsets offsets;
600     GenerateAsmJSExitPrologue(masm, ionFramePushed, ExitReason::Jit, &offsets);
601 
602     // 1. Descriptor
603     size_t argOffset = 0;
604     uint32_t descriptor = MakeFrameDescriptor(ionFramePushed, JitFrame_Entry);
605     masm.storePtr(ImmWord(uintptr_t(descriptor)), Address(masm.getStackPointer(), argOffset));
606     argOffset += sizeof(size_t);
607 
608     // 2. Callee
609     Register callee = ABIArgGenerator::NonArgReturnReg0;   // live until call
610     Register scratch = ABIArgGenerator::NonArgReturnReg1;  // repeatedly clobbered
611 
612     // 2.1. Get ExitDatum
613     unsigned globalDataOffset = module.exit(exitIndex).globalDataOffset();
614 #if defined(JS_CODEGEN_X64)
615     masm.append(AsmJSGlobalAccess(masm.leaRipRelative(callee), globalDataOffset));
616 #elif defined(JS_CODEGEN_X86)
617     masm.append(AsmJSGlobalAccess(masm.movlWithPatch(Imm32(0), callee), globalDataOffset));
618 #elif defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
619     masm.computeEffectiveAddress(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), callee);
620 #endif
621 
622     // 2.2. Get callee
623     masm.loadPtr(Address(callee, offsetof(AsmJSModule::ExitDatum, fun)), callee);
624 
625     // 2.3. Save callee
626     masm.storePtr(callee, Address(masm.getStackPointer(), argOffset));
627     argOffset += sizeof(size_t);
628 
629     // 2.4. Load callee executable entry point
630     masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
631     masm.loadBaselineOrIonNoArgCheck(callee, callee, nullptr);
632 
633     // 3. Argc
634     unsigned argc = exit.sig().args().length();
635     masm.storePtr(ImmWord(uintptr_t(argc)), Address(masm.getStackPointer(), argOffset));
636     argOffset += sizeof(size_t);
637 
638     // 4. |this| value
639     masm.storeValue(UndefinedValue(), Address(masm.getStackPointer(), argOffset));
640     argOffset += sizeof(Value);
641 
642     // 5. Fill the arguments
643     unsigned offsetToCallerStackArgs = ionFramePushed + sizeof(AsmJSFrame);
644     FillArgumentArray(masm, exit.sig().args(), argOffset, offsetToCallerStackArgs, scratch);
645     argOffset += exit.sig().args().length() * sizeof(Value);
646     MOZ_ASSERT(argOffset == ionFrameBytes);
647 
648     // 6. Jit code will clobber all registers, even non-volatiles. GlobalReg and
649     //    HeapReg are removed from the general register set for asm.js code, so
650     //    these will not have been saved by the caller like all other registers,
651     //    so they must be explicitly preserved. Only save GlobalReg since
652     //    HeapReg must be reloaded (from global data) after the call since the
653     //    heap may change during the FFI call.
654 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
655     static_assert(MaybeSavedGlobalReg == sizeof(void*), "stack frame accounting");
656     masm.storePtr(GlobalReg, Address(masm.getStackPointer(), ionFrameBytes));
657 #endif
658 
659     {
660         // Enable Activation.
661         //
662         // This sequence requires four registers, and needs to preserve the 'callee'
663         // register, so there are five live registers.
664         MOZ_ASSERT(callee == AsmJSIonExitRegCallee);
665         Register reg0 = AsmJSIonExitRegE0;
666         Register reg1 = AsmJSIonExitRegE1;
667         Register reg2 = AsmJSIonExitRegE2;
668         Register reg3 = AsmJSIonExitRegE3;
669 
670         // The following is inlined:
671         //   JSContext* cx = activation->cx();
672         //   Activation* act = cx->runtime()->activation();
673         //   act.active_ = true;
674         //   act.prevJitTop_ = cx->runtime()->jitTop;
675         //   act.prevJitJSContext_ = cx->runtime()->jitJSContext;
676         //   cx->runtime()->jitJSContext = cx;
677         //   act.prevJitActivation_ = cx->runtime()->jitActivation;
678         //   cx->runtime()->jitActivation = act;
679         //   act.prevProfilingActivation_ = cx->runtime()->profilingActivation;
680         //   cx->runtime()->profilingActivation_ = act;
681         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
682         size_t offsetOfActivation = JSRuntime::offsetOfActivation();
683         size_t offsetOfJitTop = offsetof(JSRuntime, jitTop);
684         size_t offsetOfJitJSContext = offsetof(JSRuntime, jitJSContext);
685         size_t offsetOfJitActivation = offsetof(JSRuntime, jitActivation);
686         size_t offsetOfProfilingActivation = JSRuntime::offsetOfProfilingActivation();
687         masm.loadAsmJSActivation(reg0);
688         masm.loadPtr(Address(reg0, AsmJSActivation::offsetOfContext()), reg3);
689         masm.loadPtr(Address(reg3, JSContext::offsetOfRuntime()), reg0);
690         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
691 
692         //   act.active_ = true;
693         masm.store8(Imm32(1), Address(reg1, JitActivation::offsetOfActiveUint8()));
694 
695         //   act.prevJitTop_ = cx->runtime()->jitTop;
696         masm.loadPtr(Address(reg0, offsetOfJitTop), reg2);
697         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitTop()));
698 
699         //   act.prevJitJSContext_ = cx->runtime()->jitJSContext;
700         masm.loadPtr(Address(reg0, offsetOfJitJSContext), reg2);
701         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitJSContext()));
702         //   cx->runtime()->jitJSContext = cx;
703         masm.storePtr(reg3, Address(reg0, offsetOfJitJSContext));
704 
705         //   act.prevJitActivation_ = cx->runtime()->jitActivation;
706         masm.loadPtr(Address(reg0, offsetOfJitActivation), reg2);
707         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitActivation()));
708         //   cx->runtime()->jitActivation = act;
709         masm.storePtr(reg1, Address(reg0, offsetOfJitActivation));
710 
711         //   act.prevProfilingActivation_ = cx->runtime()->profilingActivation;
712         masm.loadPtr(Address(reg0, offsetOfProfilingActivation), reg2);
713         masm.storePtr(reg2, Address(reg1, Activation::offsetOfPrevProfiling()));
714         //   cx->runtime()->profilingActivation_ = act;
715         masm.storePtr(reg1, Address(reg0, offsetOfProfilingActivation));
716     }
717 
718     AssertStackAlignment(masm, JitStackAlignment, sizeOfRetAddr);
719     masm.callJitNoProfiler(callee);
720     AssertStackAlignment(masm, JitStackAlignment, sizeOfRetAddr);
721 
722     {
723         // Disable Activation.
724         //
725         // This sequence needs three registers, and must preserve the JSReturnReg_Data and
726         // JSReturnReg_Type, so there are five live registers.
727         MOZ_ASSERT(JSReturnReg_Data == AsmJSIonExitRegReturnData);
728         MOZ_ASSERT(JSReturnReg_Type == AsmJSIonExitRegReturnType);
729         Register reg0 = AsmJSIonExitRegD0;
730         Register reg1 = AsmJSIonExitRegD1;
731         Register reg2 = AsmJSIonExitRegD2;
732 
733         // The following is inlined:
734         //   rt->profilingActivation = prevProfilingActivation_;
735         //   rt->activation()->active_ = false;
736         //   rt->jitTop = prevJitTop_;
737         //   rt->jitJSContext = prevJitJSContext_;
738         //   rt->jitActivation = prevJitActivation_;
739         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
740         size_t offsetOfActivation = JSRuntime::offsetOfActivation();
741         size_t offsetOfJitTop = offsetof(JSRuntime, jitTop);
742         size_t offsetOfJitJSContext = offsetof(JSRuntime, jitJSContext);
743         size_t offsetOfJitActivation = offsetof(JSRuntime, jitActivation);
744         size_t offsetOfProfilingActivation = JSRuntime::offsetOfProfilingActivation();
745 
746         masm.movePtr(SymbolicAddress::Runtime, reg0);
747         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
748 
749         //   rt->jitTop = prevJitTop_;
750         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitTop()), reg2);
751         masm.storePtr(reg2, Address(reg0, offsetOfJitTop));
752 
753         //   rt->profilingActivation = rt->activation()->prevProfiling_;
754         masm.loadPtr(Address(reg1, Activation::offsetOfPrevProfiling()), reg2);
755         masm.storePtr(reg2, Address(reg0, offsetOfProfilingActivation));
756 
757         //   rt->activation()->active_ = false;
758         masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
759 
760         //   rt->jitJSContext = prevJitJSContext_;
761         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitJSContext()), reg2);
762         masm.storePtr(reg2, Address(reg0, offsetOfJitJSContext));
763 
764         //   rt->jitActivation = prevJitActivation_;
765         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitActivation()), reg2);
766         masm.storePtr(reg2, Address(reg0, offsetOfJitActivation));
767     }
768 
769     // Reload the global register since Ion code can clobber any register.
770 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
771     static_assert(MaybeSavedGlobalReg == sizeof(void*), "stack frame accounting");
772     masm.loadPtr(Address(masm.getStackPointer(), ionFrameBytes), GlobalReg);
773 #endif
774 
775     // As explained above, the frame was aligned for Ion such that
776     //   (sp + sizeof(void*)) % JitStackAlignment == 0
777     // But now we possibly want to call one of several different C++ functions,
778     // so subtract the sizeof(void*) so that sp is aligned for an ABI call.
779     static_assert(ABIStackAlignment <= JitStackAlignment, "subsumes");
780     masm.reserveStack(sizeOfRetAddr);
781     unsigned nativeFramePushed = masm.framePushed();
782     AssertStackAlignment(masm, ABIStackAlignment);
783 
784     masm.branchTestMagic(Assembler::Equal, JSReturnOperand, throwLabel);
785 
786     Label oolConvert;
787     switch (exit.sig().ret()) {
788       case ExprType::Void:
789         break;
790       case ExprType::I32:
791         masm.convertValueToInt32(JSReturnOperand, ReturnDoubleReg, ReturnReg, &oolConvert,
792                                  /* -0 check */ false);
793         break;
794       case ExprType::I64:
795         MOZ_CRASH("no int64 in asm.js");
796       case ExprType::F32:
797         MOZ_CRASH("Float shouldn't be returned from a FFI");
798       case ExprType::F64:
799         masm.convertValueToDouble(JSReturnOperand, ReturnDoubleReg, &oolConvert);
800         break;
801       case ExprType::I32x4:
802       case ExprType::F32x4:
803         MOZ_CRASH("SIMD types shouldn't be returned from a FFI");
804     }
805 
806     Label done;
807     masm.bind(&done);
808 
809     // The heap pointer has to be reloaded anyway since Ion could have clobbered
810     // it. Additionally, the FFI may have detached the heap buffer.
811     masm.loadAsmJSHeapRegisterFromGlobalData();
812     CheckForHeapDetachment(masm, module, ABIArgGenerator::NonReturn_VolatileReg0, onDetached);
813 
814     GenerateAsmJSExitEpilogue(masm, masm.framePushed(), ExitReason::Jit, &offsets);
815 
816     if (oolConvert.used()) {
817         masm.bind(&oolConvert);
818         masm.setFramePushed(nativeFramePushed);
819 
820         // Coercion calls use the following stack layout (sp grows to the left):
821         //   | args | padding | Value argv[1] | padding | exit AsmJSFrame |
822         MIRTypeVector coerceArgTypes;
823         JS_ALWAYS_TRUE(coerceArgTypes.append(MIRType_Pointer));
824         unsigned offsetToCoerceArgv = AlignBytes(StackArgBytes(coerceArgTypes), sizeof(Value));
825         MOZ_ASSERT(nativeFramePushed >= offsetToCoerceArgv + sizeof(Value));
826         AssertStackAlignment(masm, ABIStackAlignment);
827 
828         // Store return value into argv[0]
829         masm.storeValue(JSReturnOperand, Address(masm.getStackPointer(), offsetToCoerceArgv));
830 
831         // argument 0: argv
832         ABIArgMIRTypeIter i(coerceArgTypes);
833         Address argv(masm.getStackPointer(), offsetToCoerceArgv);
834         if (i->kind() == ABIArg::GPR) {
835             masm.computeEffectiveAddress(argv, i->gpr());
836         } else {
837             masm.computeEffectiveAddress(argv, scratch);
838             masm.storePtr(scratch, Address(masm.getStackPointer(), i->offsetFromArgBase()));
839         }
840         i++;
841         MOZ_ASSERT(i.done());
842 
843         // Call coercion function
844         AssertStackAlignment(masm, ABIStackAlignment);
845         switch (exit.sig().ret()) {
846           case ExprType::I32:
847             masm.call(SymbolicAddress::CoerceInPlace_ToInt32);
848             masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
849             masm.unboxInt32(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnReg);
850             break;
851           case ExprType::F64:
852             masm.call(SymbolicAddress::CoerceInPlace_ToNumber);
853             masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
854             masm.loadDouble(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnDoubleReg);
855             break;
856           default:
857             MOZ_CRASH("Unsupported convert type");
858         }
859 
860         masm.jump(&done);
861         masm.setFramePushed(0);
862     }
863 
864     MOZ_ASSERT(masm.framePushed() == 0);
865 
866     if (masm.oom())
867         return false;
868 
869     offsets.end = masm.currentOffset();
870     exit.initJitOffset(offsets.begin);
871     return module.addCodeRange(AsmJSModule::CodeRange::JitFFI, offsets);
872 }
873 
874 // Generate a stub that is called when returning from an exit where the module's
875 // buffer has been detached. This stub first calls a C++ function to report an
876 // exception and then jumps to the generic throw stub to pop everything off the
877 // stack.
878 static bool
GenerateOnDetachedExit(MacroAssembler & masm,AsmJSModule & module,Label * onDetached,Label * throwLabel)879 GenerateOnDetachedExit(MacroAssembler& masm, AsmJSModule& module, Label* onDetached,
880                        Label* throwLabel)
881 {
882     masm.haltingAlign(CodeAlignment);
883     AsmJSOffsets offsets;
884     offsets.begin = masm.currentOffset();
885     masm.bind(onDetached);
886 
887     // For now, OnDetached always throws (see OnDetached comment).
888     masm.assertStackAlignment(ABIStackAlignment);
889     masm.call(SymbolicAddress::OnDetached);
890     masm.jump(throwLabel);
891 
892     if (masm.oom())
893         return false;
894 
895     offsets.end = masm.currentOffset();
896     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
897 }
898 
899 // Generate a stub that is called immediately after the prologue when there is a
900 // stack overflow. This stub calls a C++ function to report the error and then
901 // jumps to the throw stub to pop the activation.
902 static bool
GenerateStackOverflowExit(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)903 GenerateStackOverflowExit(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
904 {
905     masm.haltingAlign(CodeAlignment);
906     AsmJSOffsets offsets;
907     offsets.begin = masm.currentOffset();
908     masm.bind(masm.asmStackOverflowLabel());
909 
910     // If we reach here via the non-profiling prologue, AsmJSActivation::fp has
911     // not been updated. To enable stack unwinding from C++, store to it now. If
912     // we reached here via the profiling prologue, we'll just store the same
913     // value again. Do not update AsmJSFrame::callerFP as it is not necessary in
914     // the non-profiling case (there is no return path from this point) and, in
915     // the profiling case, it is already correct.
916     Register activation = ABIArgGenerator::NonArgReturnReg0;
917     masm.loadAsmJSActivation(activation);
918     masm.storePtr(masm.getStackPointer(), Address(activation, AsmJSActivation::offsetOfFP()));
919 
920     // Prepare the stack for calling C++.
921     if (uint32_t d = StackDecrementForCall(ABIStackAlignment, sizeof(AsmJSFrame), ShadowStackSpace))
922         masm.subFromStackPtr(Imm32(d));
923 
924     // No need to restore the stack; the throw stub pops everything.
925     masm.assertStackAlignment(ABIStackAlignment);
926     masm.call(SymbolicAddress::ReportOverRecursed);
927     masm.jump(throwLabel);
928 
929     if (masm.oom())
930         return false;
931 
932     offsets.end = masm.currentOffset();
933     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
934 }
935 
936 // Generate a stub that is called from the synchronous, inline interrupt checks
937 // when the interrupt flag is set. This stub calls the C++ function to handle
938 // the interrupt which returns whether execution has been interrupted.
939 static bool
GenerateSyncInterruptExit(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)940 GenerateSyncInterruptExit(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
941 {
942     masm.setFramePushed(0);
943     unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, ShadowStackSpace);
944 
945     AsmJSProfilingOffsets offsets;
946     GenerateAsmJSExitPrologue(masm, framePushed, ExitReason::Interrupt, &offsets,
947                               masm.asmSyncInterruptLabel());
948 
949     AssertStackAlignment(masm, ABIStackAlignment);
950     masm.call(SymbolicAddress::HandleExecutionInterrupt);
951     masm.branchIfFalseBool(ReturnReg, throwLabel);
952 
953     GenerateAsmJSExitEpilogue(masm, framePushed, ExitReason::Interrupt, &offsets);
954 
955     if (masm.oom())
956         return false;
957 
958     offsets.end = masm.currentOffset();
959     return module.addCodeRange(AsmJSModule::CodeRange::Interrupt, offsets);
960 }
961 
962 // Generate a stub that is jumped to from an out-of-bounds heap access when
963 // there are throwing semantics. This stub calls a C++ function to report an
964 // error and then jumps to the throw stub to pop the activation.
965 static bool
GenerateConversionErrorExit(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)966 GenerateConversionErrorExit(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
967 {
968     masm.haltingAlign(CodeAlignment);
969     AsmJSOffsets offsets;
970     offsets.begin = masm.currentOffset();
971     masm.bind(masm.asmOnConversionErrorLabel());
972 
973     // sp can be anything at this point, so ensure it is aligned when calling
974     // into C++.  We unconditionally jump to throw so don't worry about restoring sp.
975     masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
976 
977     // OnOutOfBounds always throws.
978     masm.assertStackAlignment(ABIStackAlignment);
979     masm.call(SymbolicAddress::OnImpreciseConversion);
980     masm.jump(throwLabel);
981 
982     if (masm.oom())
983         return false;
984 
985     offsets.end = masm.currentOffset();
986     module.setOnOutOfBoundsExitOffset(offsets.begin);
987     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
988 }
989 
990 // Generate a stub that is jumped to from an out-of-bounds heap access when
991 // there are throwing semantics. This stub calls a C++ function to report an
992 // error and then jumps to the throw stub to pop the activation.
993 static bool
GenerateOutOfBoundsExit(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)994 GenerateOutOfBoundsExit(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
995 {
996     masm.haltingAlign(CodeAlignment);
997     AsmJSOffsets offsets;
998     offsets.begin = masm.currentOffset();
999     masm.bind(masm.asmOnOutOfBoundsLabel());
1000 
1001     // sp can be anything at this point, so ensure it is aligned when calling
1002     // into C++.  We unconditionally jump to throw so don't worry about restoring sp.
1003     masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
1004 
1005     // OnOutOfBounds always throws.
1006     masm.assertStackAlignment(ABIStackAlignment);
1007     masm.call(SymbolicAddress::OnOutOfBounds);
1008     masm.jump(throwLabel);
1009 
1010     if (masm.oom())
1011         return false;
1012 
1013     offsets.end = masm.currentOffset();
1014     module.setOnOutOfBoundsExitOffset(offsets.begin);
1015     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
1016 }
1017 
1018 static const LiveRegisterSet AllRegsExceptSP(
1019     GeneralRegisterSet(Registers::AllMask&
1020                        ~(uint32_t(1) << Registers::StackPointer)),
1021     FloatRegisterSet(FloatRegisters::AllMask));
1022 
1023 // The async interrupt-callback exit is called from arbitrarily-interrupted asm.js
1024 // code. That means we must first save *all* registers and restore *all*
1025 // registers (except the stack pointer) when we resume. The address to resume to
1026 // (assuming that js::HandleExecutionInterrupt doesn't indicate that the
1027 // execution should be aborted) is stored in AsmJSActivation::resumePC_.
1028 // Unfortunately, loading this requires a scratch register which we don't have
1029 // after restoring all registers. To hack around this, push the resumePC on the
1030 // stack so that it can be popped directly into PC.
1031 static bool
GenerateAsyncInterruptExit(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)1032 GenerateAsyncInterruptExit(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
1033 {
1034     masm.haltingAlign(CodeAlignment);
1035     AsmJSOffsets offsets;
1036     offsets.begin = masm.currentOffset();
1037 
1038 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
1039     // Be very careful here not to perturb the machine state before saving it
1040     // to the stack. In particular, add/sub instructions may set conditions in
1041     // the flags register.
1042     masm.push(Imm32(0));            // space for resumePC
1043     masm.pushFlags();               // after this we are safe to use sub
1044     masm.setFramePushed(0);         // set to zero so we can use masm.framePushed() below
1045     masm.PushRegsInMask(AllRegsExceptSP); // save all GP/FP registers (except SP)
1046 
1047     Register scratch = ABIArgGenerator::NonArgReturnReg0;
1048 
1049     // Store resumePC into the reserved space.
1050     masm.loadAsmJSActivation(scratch);
1051     masm.loadPtr(Address(scratch, AsmJSActivation::offsetOfResumePC()), scratch);
1052     masm.storePtr(scratch, Address(masm.getStackPointer(), masm.framePushed() + sizeof(void*)));
1053 
1054     // We know that StackPointer is word-aligned, but not necessarily
1055     // stack-aligned, so we need to align it dynamically.
1056     masm.moveStackPtrTo(ABIArgGenerator::NonVolatileReg);
1057     masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
1058     if (ShadowStackSpace)
1059         masm.subFromStackPtr(Imm32(ShadowStackSpace));
1060 
1061     masm.assertStackAlignment(ABIStackAlignment);
1062     masm.call(SymbolicAddress::HandleExecutionInterrupt);
1063 
1064     masm.branchIfFalseBool(ReturnReg, throwLabel);
1065 
1066     // Restore the StackPointer to its position before the call.
1067     masm.moveToStackPtr(ABIArgGenerator::NonVolatileReg);
1068 
1069     // Restore the machine state to before the interrupt.
1070     masm.PopRegsInMask(AllRegsExceptSP); // restore all GP/FP registers (except SP)
1071     masm.popFlags();              // after this, nothing that sets conditions
1072     masm.ret();                   // pop resumePC into PC
1073 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
1074     // Reserve space to store resumePC.
1075     masm.subFromStackPtr(Imm32(sizeof(intptr_t)));
1076     // set to zero so we can use masm.framePushed() below.
1077     masm.setFramePushed(0);
1078     // When this platform supports SIMD extensions, we'll need to push high lanes
1079     // of SIMD registers as well.
1080     JS_STATIC_ASSERT(!SupportsSimd);
1081     // save all registers,except sp. After this stack is alligned.
1082     masm.PushRegsInMask(AllRegsExceptSP);
1083 
1084     // Save the stack pointer in a non-volatile register.
1085     masm.moveStackPtrTo(s0);
1086     // Align the stack.
1087     masm.ma_and(StackPointer, StackPointer, Imm32(~(ABIStackAlignment - 1)));
1088 
1089     // Store resumePC into the reserved space.
1090     masm.loadAsmJSActivation(IntArgReg0);
1091     masm.loadPtr(Address(IntArgReg0, AsmJSActivation::offsetOfResumePC()), IntArgReg1);
1092     masm.storePtr(IntArgReg1, Address(s0, masm.framePushed()));
1093 
1094     // MIPS ABI requires rewserving stack for registes $a0 to $a3.
1095     masm.subFromStackPtr(Imm32(4 * sizeof(intptr_t)));
1096 
1097     masm.assertStackAlignment(ABIStackAlignment);
1098     masm.call(SymbolicAddress::HandleExecutionInterrupt);
1099 
1100     masm.addToStackPtr(Imm32(4 * sizeof(intptr_t)));
1101 
1102     masm.branchIfFalseBool(ReturnReg, throwLabel);
1103 
1104     // This will restore stack to the address before the call.
1105     masm.moveToStackPtr(s0);
1106     masm.PopRegsInMask(AllRegsExceptSP);
1107 
1108     // Pop resumePC into PC. Clobber HeapReg to make the jump and restore it
1109     // during jump delay slot.
1110     masm.pop(HeapReg);
1111     masm.as_jr(HeapReg);
1112     masm.loadAsmJSHeapRegisterFromGlobalData();
1113 #elif defined(JS_CODEGEN_ARM)
1114     masm.setFramePushed(0);         // set to zero so we can use masm.framePushed() below
1115 
1116     // Save all GPR, except the stack pointer.
1117     masm.PushRegsInMask(LiveRegisterSet(
1118                             GeneralRegisterSet(Registers::AllMask & ~(1<<Registers::sp)),
1119                             FloatRegisterSet(uint32_t(0))));
1120 
1121     // Save both the APSR and FPSCR in non-volatile registers.
1122     masm.as_mrs(r4);
1123     masm.as_vmrs(r5);
1124     // Save the stack pointer in a non-volatile register.
1125     masm.mov(sp,r6);
1126     // Align the stack.
1127     masm.ma_and(Imm32(~7), sp, sp);
1128 
1129     // Store resumePC into the return PC stack slot.
1130     masm.loadAsmJSActivation(IntArgReg0);
1131     masm.loadPtr(Address(IntArgReg0, AsmJSActivation::offsetOfResumePC()), IntArgReg1);
1132     masm.storePtr(IntArgReg1, Address(r6, 14 * sizeof(uint32_t*)));
1133 
1134     // When this platform supports SIMD extensions, we'll need to push and pop
1135     // high lanes of SIMD registers as well.
1136 
1137     // Save all FP registers
1138     JS_STATIC_ASSERT(!SupportsSimd);
1139     masm.PushRegsInMask(LiveRegisterSet(GeneralRegisterSet(0),
1140                                         FloatRegisterSet(FloatRegisters::AllDoubleMask)));
1141 
1142     masm.assertStackAlignment(ABIStackAlignment);
1143     masm.call(SymbolicAddress::HandleExecutionInterrupt);
1144 
1145     masm.branchIfFalseBool(ReturnReg, throwLabel);
1146 
1147     // Restore the machine state to before the interrupt. this will set the pc!
1148 
1149     // Restore all FP registers
1150     masm.PopRegsInMask(LiveRegisterSet(GeneralRegisterSet(0),
1151                                        FloatRegisterSet(FloatRegisters::AllDoubleMask)));
1152     masm.mov(r6,sp);
1153     masm.as_vmsr(r5);
1154     masm.as_msr(r4);
1155     // Restore all GP registers
1156     masm.startDataTransferM(IsLoad, sp, IA, WriteBack);
1157     masm.transferReg(r0);
1158     masm.transferReg(r1);
1159     masm.transferReg(r2);
1160     masm.transferReg(r3);
1161     masm.transferReg(r4);
1162     masm.transferReg(r5);
1163     masm.transferReg(r6);
1164     masm.transferReg(r7);
1165     masm.transferReg(r8);
1166     masm.transferReg(r9);
1167     masm.transferReg(r10);
1168     masm.transferReg(r11);
1169     masm.transferReg(r12);
1170     masm.transferReg(lr);
1171     masm.finishDataTransfer();
1172     masm.ret();
1173 #elif defined(JS_CODEGEN_ARM64)
1174     MOZ_CRASH();
1175 #elif defined (JS_CODEGEN_NONE)
1176     MOZ_CRASH();
1177 #else
1178 # error "Unknown architecture!"
1179 #endif
1180 
1181     if (masm.oom())
1182         return false;
1183 
1184     offsets.end = masm.currentOffset();
1185     module.setAsyncInterruptOffset(offsets.begin);
1186     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
1187 }
1188 
1189 // If an exception is thrown, simply pop all frames (since asm.js does not
1190 // contain try/catch). To do this:
1191 //  1. Restore 'sp' to it's value right after the PushRegsInMask in GenerateEntry.
1192 //  2. PopRegsInMask to restore the caller's non-volatile registers.
1193 //  3. Return (to CallAsmJS).
1194 static bool
GenerateThrowStub(MacroAssembler & masm,AsmJSModule & module,Label * throwLabel)1195 GenerateThrowStub(MacroAssembler& masm, AsmJSModule& module, Label* throwLabel)
1196 {
1197     masm.haltingAlign(CodeAlignment);
1198     AsmJSOffsets offsets;
1199     offsets.begin = masm.currentOffset();
1200     masm.bind(throwLabel);
1201 
1202     // We are about to pop all frames in this AsmJSActivation. Set fp to null to
1203     // maintain the invariant that fp is either null or pointing to a valid
1204     // frame.
1205     Register scratch = ABIArgGenerator::NonArgReturnReg0;
1206     masm.loadAsmJSActivation(scratch);
1207     masm.storePtr(ImmWord(0), Address(scratch, AsmJSActivation::offsetOfFP()));
1208 
1209     masm.setFramePushed(FramePushedForEntrySP);
1210     masm.loadStackPtr(Address(scratch, AsmJSActivation::offsetOfEntrySP()));
1211     masm.Pop(scratch);
1212     masm.PopRegsInMask(NonVolatileRegs);
1213     MOZ_ASSERT(masm.framePushed() == 0);
1214 
1215     masm.mov(ImmWord(0), ReturnReg);
1216     masm.ret();
1217 
1218     if (masm.oom())
1219         return false;
1220 
1221     offsets.end = masm.currentOffset();
1222     return module.addCodeRange(AsmJSModule::CodeRange::Inline, offsets);
1223 }
1224 
1225 bool
GenerateStubs(MacroAssembler & masm,AsmJSModule & module,const FuncOffsetVector & funcOffsets)1226 wasm::GenerateStubs(MacroAssembler& masm, AsmJSModule& module, const FuncOffsetVector& funcOffsets)
1227 {
1228     for (unsigned i = 0; i < module.numExportedFunctions(); i++) {
1229         if (!GenerateEntry(masm, module, i, funcOffsets))
1230             return false;
1231     }
1232 
1233     for (auto builtin : MakeEnumeratedRange(Builtin::Limit)) {
1234         if (!GenerateBuiltinThunk(masm, module, builtin))
1235             return false;
1236     }
1237 
1238     Label onThrow;
1239 
1240     {
1241         Label onDetached;
1242 
1243         for (size_t i = 0; i < module.numExits(); i++) {
1244             if (!GenerateInterpExit(masm, module, i, &onThrow, &onDetached))
1245                 return false;
1246             if (!GenerateIonExit(masm, module, i, &onThrow, &onDetached))
1247                 return false;
1248         }
1249 
1250         if (onDetached.used()) {
1251             if (!GenerateOnDetachedExit(masm, module, &onDetached, &onThrow))
1252                 return false;
1253         }
1254     }
1255 
1256     if (masm.asmStackOverflowLabel()->used()) {
1257         if (!GenerateStackOverflowExit(masm, module, &onThrow))
1258             return false;
1259     }
1260 
1261     if (masm.asmSyncInterruptLabel()->used()) {
1262         if (!GenerateSyncInterruptExit(masm, module, &onThrow))
1263             return false;
1264     }
1265 
1266     if (masm.asmOnConversionErrorLabel()->used()) {
1267         if (!GenerateConversionErrorExit(masm, module, &onThrow))
1268             return false;
1269     }
1270 
1271     // Generate unconditionally: the out-of-bounds exit may be used later even
1272     // if signal handling isn't used for out-of-bounds at the moment.
1273     if (!GenerateOutOfBoundsExit(masm, module, &onThrow))
1274         return false;
1275 
1276     // Generate unconditionally: the async interrupt may be taken at any time.
1277     if (!GenerateAsyncInterruptExit(masm, module, &onThrow))
1278         return false;
1279 
1280     if (onThrow.used()) {
1281         if (!GenerateThrowStub(masm, module, &onThrow))
1282             return false;
1283     }
1284 
1285     return true;
1286 }
1287 
1288