1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/Bailouts.h"
8 #include "jit/JitCompartment.h"
9 #include "jit/JitFrames.h"
10 #include "jit/Linker.h"
11 #ifdef JS_ION_PERF
12 # include "jit/PerfSpewer.h"
13 #endif
14 #include "jit/arm64/SharedICHelpers-arm64.h"
15 #include "jit/VMFunctions.h"
16 
17 #include "jit/MacroAssembler-inl.h"
18 
19 using namespace js;
20 using namespace js::jit;
21 
22 // All registers to save and restore. This includes the stack pointer, since we
23 // use the ability to reference register values on the stack by index.
24 static const LiveRegisterSet AllRegs =
25     LiveRegisterSet(GeneralRegisterSet(Registers::AllMask & ~(1 << 31 | 1 << 30 | 1 << 29| 1 << 28)),
26                 FloatRegisterSet(FloatRegisters::AllMask));
27 
28 /* This method generates a trampoline on ARM64 for a c++ function with
29  * the following signature:
30  *   bool blah(void* code, int argc, Value* argv, JSObject* scopeChain, Value* vp)
31  *   ...using standard AArch64 calling convention
32  */
33 JitCode*
generateEnterJIT(JSContext * cx,EnterJitType type)34 JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
35 {
36     MacroAssembler masm(cx);
37 
38     const Register reg_code      = IntArgReg0; // EnterJitData::jitcode.
39     const Register reg_argc      = IntArgReg1; // EnterJitData::maxArgc.
40     const Register reg_argv      = IntArgReg2; // EnterJitData::maxArgv.
41     const Register reg_osrFrame  = IntArgReg3; // EnterJitData::osrFrame.
42     const Register reg_callee    = IntArgReg4; // EnterJitData::calleeToken.
43     const Register reg_scope     = IntArgReg5; // EnterJitData::scopeChain.
44     const Register reg_osrNStack = IntArgReg6; // EnterJitData::osrNumStackValues.
45     const Register reg_vp        = IntArgReg7; // Address of EnterJitData::result.
46 
47     MOZ_ASSERT(OsrFrameReg == IntArgReg3);
48 
49     // During the pushes below, use the normal stack pointer.
50     masm.SetStackPointer64(sp);
51 
52     // Save old frame pointer and return address; set new frame pointer.
53     masm.push(r29, r30);
54     masm.moveStackPtrTo(r29);
55 
56     // Save callee-save integer registers.
57     // Also save x7 (reg_vp) and x30 (lr), for use later.
58     masm.push(r19, r20, r21, r22);
59     masm.push(r23, r24, r25, r26);
60     masm.push(r27, r28, r7,  r30);
61 
62     // Save callee-save floating-point registers.
63     // AArch64 ABI specifies that only the lower 64 bits must be saved.
64     masm.push(d8,  d9,  d10, d11);
65     masm.push(d12, d13, d14, d15);
66 
67 #ifdef DEBUG
68     // Emit stack canaries.
69     masm.movePtr(ImmWord(0xdeadd00d), r23);
70     masm.movePtr(ImmWord(0xdeadd11d), r24);
71     masm.push(r23, r24);
72 #endif
73 
74     // Common code below attempts to push single registers at a time,
75     // which breaks the stack pointer's 16-byte alignment requirement.
76     // Note that movePtr() is invalid because StackPointer is treated as xzr.
77     //
78     // FIXME: After testing, this entire function should be rewritten to not
79     // use the PseudoStackPointer: since the amount of data pushed is precalculated,
80     // we can just allocate the whole frame header at once and index off sp.
81     // This will save a significant number of instructions where Push() updates sp.
82     masm.Mov(PseudoStackPointer64, sp);
83     masm.SetStackPointer64(PseudoStackPointer64);
84 
85     // Save the stack pointer at this point for Baseline OSR.
86     masm.moveStackPtrTo(BaselineFrameReg);
87     // Remember stack depth without padding and arguments.
88     masm.moveStackPtrTo(r19);
89 
90     // If constructing, include newTarget in argument vector.
91     {
92         Label noNewTarget;
93         Imm32 constructingToken(CalleeToken_FunctionConstructing);
94         masm.branchTest32(Assembler::Zero, reg_callee, constructingToken, &noNewTarget);
95         masm.add32(Imm32(1), reg_argc);
96         masm.bind(&noNewTarget);
97     }
98 
99     // JitFrameLayout is as follows (higher is higher in memory):
100     //  N*8  - [ JS argument vector ] (base 16-byte aligned)
101     //  8    - numActualArgs
102     //  8    - calleeToken (16-byte aligned)
103     //  8    - frameDescriptor
104     //  8    - returnAddress (16-byte aligned, pushed by callee)
105 
106     // Push the argument vector onto the stack.
107     // WARNING: destructively modifies reg_argv
108     {
109         vixl::UseScratchRegisterScope temps(&masm.asVIXL());
110 
111         const ARMRegister tmp_argc = temps.AcquireX();
112         const ARMRegister tmp_sp = temps.AcquireX();
113 
114         Label noArguments;
115         Label loopHead;
116 
117         masm.movePtr(reg_argc, tmp_argc.asUnsized());
118 
119         // sp -= 8
120         // Since we're using PostIndex Str below, this is necessary to avoid overwriting
121         // the SPS mark pushed above.
122         masm.subFromStackPtr(Imm32(8));
123 
124         // sp -= 8 * argc
125         masm.Sub(PseudoStackPointer64, PseudoStackPointer64, Operand(tmp_argc, vixl::SXTX, 3));
126 
127         // Give sp 16-byte alignment and sync stack pointers.
128         masm.andToStackPtr(Imm32(~0xff));
129         masm.moveStackPtrTo(tmp_sp.asUnsized());
130 
131         masm.branchTestPtr(Assembler::Zero, reg_argc, reg_argc, &noArguments);
132 
133         // Begin argument-pushing loop.
134         // This could be optimized using Ldp and Stp.
135         {
136             masm.bind(&loopHead);
137 
138             // Load an argument from argv, then increment argv by 8.
139             masm.Ldr(x24, MemOperand(ARMRegister(reg_argv, 64), Operand(8), vixl::PostIndex));
140 
141             // Store the argument to tmp_sp, then increment tmp_sp by 8.
142             masm.Str(x24, MemOperand(tmp_sp, Operand(8), vixl::PostIndex));
143 
144             // Set the condition codes for |cmp tmp_argc, 2| (using the old value).
145             masm.Subs(tmp_argc, tmp_argc, Operand(1));
146 
147             // Branch if arguments remain.
148             masm.B(&loopHead, vixl::Condition::ge);
149         }
150 
151         masm.bind(&noArguments);
152     }
153     masm.checkStackAlignment();
154 
155     // Push the number of actual arguments and the calleeToken.
156     // The result address is used to store the actual number of arguments
157     // without adding an argument to EnterJIT.
158     masm.unboxInt32(Address(reg_vp, 0x0), ip0);
159     masm.push(ip0, reg_callee);
160     masm.checkStackAlignment();
161 
162     // Calculate the number of bytes pushed so far.
163     masm.subStackPtrFrom(r19);
164 
165     // Push the frameDescriptor.
166     masm.makeFrameDescriptor(r19, JitFrame_Entry);
167     masm.Push(r19);
168 
169     Label osrReturnPoint;
170     if (type == EnterJitBaseline) {
171         // Check for OSR.
172         Label notOsr;
173         masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
174 
175         // Push return address and previous frame pointer.
176         masm.Adr(ScratchReg2_64, &osrReturnPoint);
177         masm.push(ScratchReg2, BaselineFrameReg);
178 
179         // Reserve frame.
180         masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
181         masm.moveStackPtrTo(BaselineFrameReg);
182 
183         // Reserve space for locals and stack values.
184         masm.Lsl(w19, ARMRegister(reg_osrNStack, 32), 3); // w19 = num_stack_values * sizeof(Value).
185         masm.subFromStackPtr(r19);
186 
187         // Enter exit frame.
188         masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), r19);
189         masm.makeFrameDescriptor(r19, JitFrame_BaselineJS);
190         masm.asVIXL().Push(x19, xzr); // Push xzr for a fake return address.
191         // No GC things to mark: push a bare token.
192         masm.enterFakeExitFrame(ExitFrameLayoutBareToken);
193 
194         masm.push(BaselineFrameReg, reg_code);
195 
196         // Initialize the frame, including filling in the slots.
197         masm.setupUnalignedABICall(r19);
198         masm.passABIArg(BaselineFrameReg); // BaselineFrame.
199         masm.passABIArg(reg_osrFrame); // InterpreterFrame.
200         masm.passABIArg(reg_osrNStack);
201         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
202 
203         masm.pop(r19, BaselineFrameReg);
204         MOZ_ASSERT(r19 != ReturnReg);
205 
206         masm.addToStackPtr(Imm32(ExitFrameLayout::SizeWithFooter()));
207         masm.addPtr(Imm32(BaselineFrame::Size()), BaselineFrameReg);
208 
209         Label error;
210         masm.branchIfFalseBool(ReturnReg, &error);
211 
212         masm.jump(r19);
213 
214         // OOM: load error value, discard return address and previous frame
215         // pointer, and return.
216         masm.bind(&error);
217         masm.Add(masm.GetStackPointer64(), BaselineFrameReg64, Operand(2 * sizeof(uintptr_t)));
218         masm.syncStackPtr();
219         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
220         masm.B(&osrReturnPoint);
221 
222         masm.bind(&notOsr);
223         masm.movePtr(reg_scope, R1_);
224     }
225 
226     // Call function.
227     // Since AArch64 doesn't have the pc register available, the callee must push lr.
228     masm.callJitNoProfiler(reg_code);
229 
230     // Baseline OSR will return here.
231     if (type == EnterJitBaseline)
232         masm.bind(&osrReturnPoint);
233 
234     // Return back to SP.
235     masm.Pop(r19);
236     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(),
237              Operand(x19, vixl::LSR, FRAMESIZE_SHIFT));
238     masm.syncStackPtr();
239     masm.SetStackPointer64(sp);
240 
241 #ifdef DEBUG
242     // Check that canaries placed on function entry are still present.
243     masm.pop(r24, r23);
244     Label x23OK, x24OK;
245 
246     masm.branchPtr(Assembler::Equal, r23, ImmWord(0xdeadd00d), &x23OK);
247     masm.breakpoint();
248     masm.bind(&x23OK);
249 
250     masm.branchPtr(Assembler::Equal, r24, ImmWord(0xdeadd11d), &x24OK);
251     masm.breakpoint();
252     masm.bind(&x24OK);
253 #endif
254 
255     // Restore callee-save floating-point registers.
256     masm.pop(d15, d14, d13, d12);
257     masm.pop(d11, d10,  d9,  d8);
258 
259     // Restore callee-save integer registers.
260     // Also restore x7 (reg_vp) and x30 (lr).
261     masm.pop(r30, r7,  r28, r27);
262     masm.pop(r26, r25, r24, r23);
263     masm.pop(r22, r21, r20, r19);
264 
265     // Store return value (in JSReturnReg = x2 to just-popped reg_vp).
266     masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
267 
268     // Restore old frame pointer.
269     masm.pop(r30, r29);
270 
271     // Return using the value popped into x30.
272     masm.abiret();
273 
274     Linker linker(masm);
275     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
276 
277 #ifdef JS_ION_PERF
278     writePerfSpewerJitCodeProfile(code, "EnterJIT");
279 #endif
280 
281     return code;
282 }
283 
284 JitCode*
generateInvalidator(JSContext * cx)285 JitRuntime::generateInvalidator(JSContext* cx)
286 {
287     MacroAssembler masm;
288 
289     masm.push(r0, r1, r2, r3);
290 
291     masm.PushRegsInMask(AllRegs);
292     masm.moveStackPtrTo(r0);
293 
294     masm.Sub(x1, masm.GetStackPointer64(), Operand(sizeof(size_t)));
295     masm.Sub(x2, masm.GetStackPointer64(), Operand(sizeof(size_t) + sizeof(void*)));
296     masm.moveToStackPtr(r2);
297 
298     masm.setupUnalignedABICall(r10);
299     masm.passABIArg(r0);
300     masm.passABIArg(r1);
301     masm.passABIArg(r2);
302 
303     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
304 
305     masm.pop(r2, r1);
306 
307     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(), x1);
308     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(),
309              Operand(sizeof(InvalidationBailoutStack)));
310     masm.syncStackPtr();
311 
312     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
313     masm.branch(bailoutTail);
314 
315     Linker linker(masm);
316     return linker.newCode<NoGC>(cx, OTHER_CODE);
317 }
318 
319 JitCode*
generateArgumentsRectifier(JSContext * cx,void ** returnAddrOut)320 JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
321 {
322     MacroAssembler masm;
323 
324     // Save the return address for later.
325     masm.push(lr);
326 
327     // Load the information that the rectifier needs from the stack.
328     masm.Ldr(w0, MemOperand(masm.GetStackPointer64(), RectifierFrameLayout::offsetOfNumActualArgs()));
329     masm.Ldr(x1, MemOperand(masm.GetStackPointer64(), RectifierFrameLayout::offsetOfCalleeToken()));
330 
331     // Extract a JSFunction pointer from the callee token and keep the
332     // intermediary to avoid later recalculation.
333     masm.And(x5, x1, Operand(CalleeTokenMask));
334 
335     // Get the arguments from the function object.
336     masm.Ldrh(x6, MemOperand(x5, JSFunction::offsetOfNargs()));
337 
338     static_assert(CalleeToken_FunctionConstructing == 0x1, "Constructing must be low-order bit");
339     masm.And(x4, x1, Operand(CalleeToken_FunctionConstructing));
340     masm.Add(x7, x6, x4);
341 
342     // Calculate the position that our arguments are at before sp gets modified.
343     MOZ_ASSERT(ArgumentsRectifierReg == r8, "x8 used for argc in Arguments Rectifier");
344     masm.Add(x3, masm.GetStackPointer64(), Operand(x8, vixl::LSL, 3));
345     masm.Add(x3, x3, Operand(sizeof(RectifierFrameLayout)));
346 
347     // Pad to a multiple of 16 bytes. This neglects the |this| value,
348     // which will also be pushed, because the rest of the frame will
349     // round off that value. See pushes of |argc|, |callee| and |desc| below.
350     Label noPadding;
351     masm.Tbnz(x7, 0, &noPadding);
352     masm.asVIXL().Push(xzr);
353     masm.Add(x7, x7, Operand(1));
354     masm.bind(&noPadding);
355 
356     {
357         Label notConstructing;
358         masm.Cbz(x4, &notConstructing);
359 
360         // new.target lives at the end of the pushed args
361         // NB: The arg vector holder starts at the beginning of the last arg,
362         //     add a value to get to argv[argc]
363         masm.loadPtr(Address(r3, sizeof(Value)), r4);
364         masm.Push(r4);
365 
366         masm.bind(&notConstructing);
367     }
368 
369     // Calculate the number of undefineds that need to be pushed.
370     masm.Sub(w2, w6, w8);
371 
372     // Put an undefined in a register so it can be pushed.
373     masm.moveValue(UndefinedValue(), r4);
374 
375     // Push undefined N times.
376     {
377         Label undefLoopTop;
378         masm.bind(&undefLoopTop);
379         masm.Push(r4);
380         masm.Subs(w2, w2, Operand(1));
381         masm.B(&undefLoopTop, Assembler::NonZero);
382     }
383 
384     // Arguments copy loop. Copy for x8 >= 0 to include |this|.
385     {
386         Label copyLoopTop;
387         masm.bind(&copyLoopTop);
388         masm.Ldr(x4, MemOperand(x3, -sizeof(Value), vixl::PostIndex));
389         masm.Push(r4);
390         masm.Subs(x8, x8, Operand(1));
391         masm.B(&copyLoopTop, Assembler::NotSigned);
392     }
393 
394     // Fix up the size of the stack frame. +1 accounts for |this|.
395     masm.Add(x6, x7, Operand(1));
396     masm.Lsl(x6, x6, 3);
397 
398     // Make that into a frame descriptor.
399     masm.makeFrameDescriptor(r6, JitFrame_Rectifier);
400 
401     masm.push(r0,  // Number of actual arguments.
402               r1,  // Callee token.
403               r6); // Frame descriptor.
404 
405     // Load the address of the code that is getting called.
406     masm.Ldr(x3, MemOperand(x5, JSFunction::offsetOfNativeOrScript()));
407     masm.loadBaselineOrIonRaw(r3, r3, nullptr);
408     uint32_t returnOffset = masm.callJitNoProfiler(r3);
409 
410     // Clean up!
411     // Get the size of the stack frame, and clean up the later fixed frame.
412     masm.Ldr(x4, MemOperand(masm.GetStackPointer64(), 24, vixl::PostIndex));
413 
414     // Now that the size of the stack frame sans the fixed frame has been loaded,
415     // add that onto the stack pointer.
416     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(),
417              Operand(x4, vixl::LSR, FRAMESIZE_SHIFT));
418 
419     // Pop the return address from earlier and branch.
420     masm.ret();
421 
422     Linker linker(masm);
423     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
424 
425     if (returnAddrOut)
426         *returnAddrOut = (void*) (code->raw() + returnOffset);
427 
428     return code;
429 }
430 
431 static void
PushBailoutFrame(MacroAssembler & masm,uint32_t frameClass,Register spArg)432 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
433 {
434     // the stack should look like:
435     // [IonFrame]
436     // bailoutFrame.registersnapshot
437     // bailoutFrame.fpsnapshot
438     // bailoutFrame.snapshotOffset
439     // bailoutFrame.frameSize
440 
441     // STEP 1a: Save our register sets to the stack so Bailout() can read
442     // everything.
443     // sp % 8 == 0
444 
445     // We don't have to push everything, but this is likely easier.
446     // Setting regs_.
447     masm.subFromStackPtr(Imm32(Registers::TotalPhys * sizeof(void*)));
448     for (uint32_t i = 0; i < Registers::TotalPhys; i += 2) {
449         masm.Stp(ARMRegister::XRegFromCode(i),
450                  ARMRegister::XRegFromCode(i + 1),
451                  MemOperand(masm.GetStackPointer64(), i * sizeof(void*)));
452     }
453 
454     // Since our datastructures for stack inspection are compile-time fixed,
455     // if there are only 16 double registers, then we need to reserve
456     // space on the stack for the missing 16.
457     masm.subFromStackPtr(Imm32(FloatRegisters::TotalPhys * sizeof(double)));
458     for (uint32_t i = 0; i < FloatRegisters::TotalPhys; i += 2) {
459         masm.Stp(ARMFPRegister::DRegFromCode(i),
460                  ARMFPRegister::DRegFromCode(i + 1),
461                  MemOperand(masm.GetStackPointer64(), i * sizeof(void*)));
462     }
463 
464     // STEP 1b: Push both the "return address" of the function call (the address
465     //          of the instruction after the call that we used to get here) as
466     //          well as the callee token onto the stack. The return address is
467     //          currently in r14. We will proceed by loading the callee token
468     //          into a sacrificial register <= r14, then pushing both onto the
469     //          stack.
470 
471     // Now place the frameClass onto the stack, via a register.
472     masm.Mov(x9, frameClass);
473 
474     // And onto the stack. Since the stack is full, we need to put this one past
475     // the end of the current stack. Sadly, the ABI says that we need to always
476     // point to the lowest place that has been written. The OS is free to do
477     // whatever it wants below sp.
478     masm.push(r30, r9);
479     masm.moveStackPtrTo(spArg);
480 }
481 
482 static void
GenerateBailoutThunk(JSContext * cx,MacroAssembler & masm,uint32_t frameClass)483 GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
484 {
485     PushBailoutFrame(masm, frameClass, r0);
486 
487     // SP % 8 == 4
488     // STEP 1c: Call the bailout function, giving a pointer to the
489     //          structure we just blitted onto the stack.
490     // Make space for the BaselineBailoutInfo* outparam.
491     const int sizeOfBailoutInfo = sizeof(void*) * 2;
492     masm.reserveStack(sizeOfBailoutInfo);
493     masm.moveStackPtrTo(r1);
494 
495     masm.setupUnalignedABICall(r2);
496     masm.passABIArg(r0);
497     masm.passABIArg(r1);
498     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
499 
500     masm.Ldr(x2, MemOperand(masm.GetStackPointer64(), 0));
501     masm.addToStackPtr(Imm32(sizeOfBailoutInfo));
502 
503     static const uint32_t BailoutDataSize = sizeof(void*) * Registers::Total +
504                                             sizeof(double) * FloatRegisters::TotalPhys;
505 
506     if (frameClass == NO_FRAME_SIZE_CLASS_ID) {
507         vixl::UseScratchRegisterScope temps(&masm.asVIXL());
508         const ARMRegister scratch64 = temps.AcquireX();
509 
510         masm.Ldr(scratch64, MemOperand(masm.GetStackPointer64(), sizeof(uintptr_t)));
511         masm.addToStackPtr(Imm32(BailoutDataSize + 32));
512         masm.addToStackPtr(scratch64.asUnsized());
513     } else {
514         uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
515         masm.addToStackPtr(Imm32(frameSize + BailoutDataSize + sizeof(void*)));
516     }
517 
518     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
519     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
520     masm.branch(bailoutTail);
521 }
522 
523 JitCode*
generateBailoutTable(JSContext * cx,uint32_t frameClass)524 JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
525 {
526     // FIXME: Implement.
527     MacroAssembler masm;
528     masm.breakpoint();
529     Linker linker(masm);
530     return linker.newCode<NoGC>(cx, OTHER_CODE);
531 }
532 
533 JitCode*
generateBailoutHandler(JSContext * cx)534 JitRuntime::generateBailoutHandler(JSContext* cx)
535 {
536     MacroAssembler masm(cx);
537     GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
538 
539 #ifdef JS_ION_PERF
540     writePerfSpewerJitCodeProfile(code, "BailoutHandler");
541 #endif
542 
543     Linker linker(masm);
544     return linker.newCode<NoGC>(cx, OTHER_CODE);
545 }
546 
547 JitCode*
generateVMWrapper(JSContext * cx,const VMFunction & f)548 JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
549 {
550     MOZ_ASSERT(functionWrappers_);
551     MOZ_ASSERT(functionWrappers_->initialized());
552     VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
553     if (p)
554         return p->value();
555 
556     MacroAssembler masm(cx);
557 
558     // Avoid conflicts with argument registers while discarding the result after
559     // the function call.
560     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
561 
562     // Wrapper register set is a superset of the Volatile register set.
563     JS_STATIC_ASSERT((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0);
564 
565     // Unlike on other platforms, it is the responsibility of the VM *callee* to
566     // push the return address, while the caller must ensure that the address
567     // is stored in lr on entry. This allows the VM wrapper to work with both direct
568     // calls and tail calls.
569     masm.push(lr);
570 
571     // First argument is the JSContext.
572     Register reg_cx = IntArgReg0;
573     regs.take(reg_cx);
574 
575     // Stack is:
576     //    ... frame ...
577     //  +12 [args]
578     //  +8  descriptor
579     //  +0  returnAddress (pushed by this function, caller sets as lr)
580     //
581     //  We're aligned to an exit frame, so link it up.
582     masm.enterExitFrame(&f);
583     masm.loadJSContext(reg_cx);
584 
585     // Save the current stack pointer as the base for copying arguments.
586     Register argsBase = InvalidReg;
587     if (f.explicitArgs) {
588         // argsBase can't be an argument register. Bad things would happen if
589         // the MoveResolver didn't throw an assertion failure first.
590         argsBase = r8;
591         regs.take(argsBase);
592         masm.Add(ARMRegister(argsBase, 64), masm.GetStackPointer64(),
593                  Operand(ExitFrameLayout::SizeWithFooter()));
594     }
595 
596     // Reserve space for any outparameter.
597     Register outReg = InvalidReg;
598     switch (f.outParam) {
599       case Type_Value:
600         outReg = regs.takeAny();
601         masm.reserveStack(sizeof(Value));
602         masm.moveStackPtrTo(outReg);
603         break;
604 
605       case Type_Handle:
606         outReg = regs.takeAny();
607         masm.PushEmptyRooted(f.outParamRootType);
608         masm.moveStackPtrTo(outReg);
609         break;
610 
611       case Type_Int32:
612       case Type_Bool:
613         outReg = regs.takeAny();
614         masm.reserveStack(sizeof(int64_t));
615         masm.moveStackPtrTo(outReg);
616         break;
617 
618       case Type_Double:
619         outReg = regs.takeAny();
620         masm.reserveStack(sizeof(double));
621         masm.moveStackPtrTo(outReg);
622         break;
623 
624       case Type_Pointer:
625         outReg = regs.takeAny();
626         masm.reserveStack(sizeof(uintptr_t));
627         masm.moveStackPtrTo(outReg);
628         break;
629 
630       default:
631         MOZ_ASSERT(f.outParam == Type_Void);
632         break;
633     }
634 
635     masm.setupUnalignedABICall(regs.getAny());
636     masm.passABIArg(reg_cx);
637 
638     size_t argDisp = 0;
639 
640     // Copy arguments.
641     for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
642         MoveOperand from;
643         switch (f.argProperties(explicitArg)) {
644           case VMFunction::WordByValue:
645             masm.passABIArg(MoveOperand(argsBase, argDisp),
646                             (f.argPassedInFloatReg(explicitArg) ? MoveOp::DOUBLE : MoveOp::GENERAL));
647             argDisp += sizeof(void*);
648             break;
649 
650           case VMFunction::WordByRef:
651             masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
652                             MoveOp::GENERAL);
653             argDisp += sizeof(void*);
654             break;
655 
656           case VMFunction::DoubleByValue:
657           case VMFunction::DoubleByRef:
658             MOZ_CRASH("NYI: AArch64 callVM should not be used with 128bit values.");
659         }
660     }
661 
662     // Copy the semi-implicit outparam, if any.
663     // It is not a C++-abi outparam, which would get passed in the
664     // outparam register, but a real parameter to the function, which
665     // was stack-allocated above.
666     if (outReg != InvalidReg)
667         masm.passABIArg(outReg);
668 
669     masm.callWithABI(f.wrapped);
670 
671     // SP is used to transfer stack across call boundaries.
672     if (!masm.GetStackPointer64().Is(vixl::sp))
673         masm.Mov(masm.GetStackPointer64(), vixl::sp);
674 
675     // Test for failure.
676     switch (f.failType()) {
677       case Type_Object:
678         masm.branchTestPtr(Assembler::Zero, r0, r0, masm.failureLabel());
679         break;
680       case Type_Bool:
681         masm.branchIfFalseBool(r0, masm.failureLabel());
682         break;
683       default:
684         MOZ_CRASH("unknown failure kind");
685     }
686 
687     // Load the outparam and free any allocated stack.
688     switch (f.outParam) {
689       case Type_Value:
690         masm.Ldr(ARMRegister(JSReturnReg, 64), MemOperand(masm.GetStackPointer64()));
691         masm.freeStack(sizeof(Value));
692         break;
693 
694       case Type_Handle:
695         masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
696         break;
697 
698       case Type_Int32:
699         masm.Ldr(ARMRegister(ReturnReg, 32), MemOperand(masm.GetStackPointer64()));
700         masm.freeStack(sizeof(int64_t));
701         break;
702 
703       case Type_Bool:
704         masm.Ldrb(ARMRegister(ReturnReg, 32), MemOperand(masm.GetStackPointer64()));
705         masm.freeStack(sizeof(int64_t));
706         break;
707 
708       case Type_Double:
709         MOZ_ASSERT(cx->runtime()->jitSupportsFloatingPoint);
710         masm.Ldr(ARMFPRegister(ReturnDoubleReg, 64), MemOperand(masm.GetStackPointer64()));
711         masm.freeStack(sizeof(double));
712         break;
713 
714       case Type_Pointer:
715         masm.Ldr(ARMRegister(ReturnReg, 64), MemOperand(masm.GetStackPointer64()));
716         masm.freeStack(sizeof(uintptr_t));
717         break;
718 
719       default:
720         MOZ_ASSERT(f.outParam == Type_Void);
721         break;
722     }
723 
724     masm.leaveExitFrame();
725     masm.retn(Imm32(sizeof(ExitFrameLayout) +
726               f.explicitStackSlots() * sizeof(void*) +
727               f.extraValuesToPop * sizeof(Value)));
728 
729     Linker linker(masm);
730     JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
731     if (!wrapper)
732         return nullptr;
733 
734 #ifdef JS_ION_PERF
735     writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
736 #endif
737 
738     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
739     // use relookupOrAdd instead of add.
740     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
741         return nullptr;
742 
743     return wrapper;
744 }
745 
746 JitCode*
generatePreBarrier(JSContext * cx,MIRType type)747 JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
748 {
749     MacroAssembler masm(cx);
750 
751     LiveRegisterSet regs = LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
752                                            FloatRegisterSet(FloatRegisters::VolatileMask));
753 
754     // Also preserve the return address.
755     regs.add(lr);
756 
757     masm.PushRegsInMask(regs);
758 
759     MOZ_ASSERT(PreBarrierReg == r1);
760     masm.movePtr(ImmPtr(cx->runtime()), r3);
761 
762     masm.setupUnalignedABICall(r0);
763     masm.passABIArg(r3);
764     masm.passABIArg(PreBarrierReg);
765     masm.callWithABI(IonMarkFunction(type));
766 
767     // Pop the volatile regs and restore LR.
768     masm.PopRegsInMask(regs);
769 
770     masm.abiret();
771 
772     Linker linker(masm);
773     return linker.newCode<NoGC>(cx, OTHER_CODE);
774 }
775 
776 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
777 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
778 
779 JitCode*
generateDebugTrapHandler(JSContext * cx)780 JitRuntime::generateDebugTrapHandler(JSContext* cx)
781 {
782     MacroAssembler masm(cx);
783 #ifndef JS_USE_LINK_REGISTER
784     // The first value contains the return addres,
785     // which we pull into ICTailCallReg for tail calls.
786     masm.setFramePushed(sizeof(intptr_t));
787 #endif
788 
789     Register scratch1 = r0;
790     Register scratch2 = r1;
791 
792     // Load BaselineFrame pointer into scratch1.
793     masm.Sub(ARMRegister(scratch1, 64), BaselineFrameReg64, Operand(BaselineFrame::Size()));
794 
795     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure the
796     // stub frame has a nullptr ICStub pointer, since this pointer is marked
797     // during GC.
798     masm.movePtr(ImmPtr(nullptr), ICStubReg);
799     EmitBaselineEnterStubFrame(masm, scratch2);
800 
801     JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
802     if (!code)
803         return nullptr;
804 
805     masm.asVIXL().Push(vixl::lr, ARMRegister(scratch1, 64));
806     EmitBaselineCallVM(code, masm);
807 
808     EmitBaselineLeaveStubFrame(masm);
809 
810     // If the stub returns |true|, we have to perform a forced return (return
811     // from the JS frame). If the stub returns |false|, just return from the
812     // trap stub so that execution continues at the current pc.
813     Label forcedReturn;
814     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
815     masm.abiret();
816 
817     masm.bind(&forcedReturn);
818     masm.loadValue(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfReturnValue()),
819                    JSReturnOperand);
820     masm.Mov(masm.GetStackPointer64(), BaselineFrameReg64);
821 
822     masm.pop(BaselineFrameReg, lr);
823     masm.syncStackPtr();
824     masm.abiret();
825 
826     Linker linker(masm);
827     JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
828 
829 #ifdef JS_ION_PERF
830     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
831 #endif
832 
833     return codeDbg;
834 }
835 
836 JitCode*
generateExceptionTailStub(JSContext * cx,void * handler)837 JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
838 {
839     MacroAssembler masm(cx);
840 
841     masm.handleFailureWithHandlerTail(handler);
842 
843     Linker linker(masm);
844     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
845 
846 #ifdef JS_ION_PERF
847     writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
848 #endif
849 
850     return code;
851 }
852 
853 JitCode*
generateBailoutTailStub(JSContext * cx)854 JitRuntime::generateBailoutTailStub(JSContext* cx)
855 {
856     MacroAssembler masm(cx);
857 
858     masm.generateBailoutTail(r1, r2);
859 
860     Linker linker(masm);
861     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
862 
863 #ifdef JS_ION_PERF
864     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
865 #endif
866 
867     return code;
868 }
869 JitCode*
generateProfilerExitFrameTailStub(JSContext * cx)870 JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
871 {
872     MacroAssembler masm;
873 
874     Register scratch1 = r8;
875     Register scratch2 = r9;
876     Register scratch3 = r10;
877     Register scratch4 = r11;
878 
879     //
880     // The code generated below expects that the current stack pointer points
881     // to an Ion or Baseline frame, at the state it would be immediately
882     // before a ret().  Thus, after this stub's business is done, it executes
883     // a ret() and returns directly to the caller script, on behalf of the
884     // callee script that jumped to this code.
885     //
886     // Thus the expected stack is:
887     //
888     //                                   StackPointer ----+
889     //                                                    v
890     // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
891     // MEM-HI                                       MEM-LOW
892     //
893     //
894     // The generated jitcode is responsible for overwriting the
895     // jitActivation->lastProfilingFrame field with a pointer to the previous
896     // Ion or Baseline jit-frame that was pushed before this one. It is also
897     // responsible for overwriting jitActivation->lastProfilingCallSite with
898     // the return address into that frame.  The frame could either be an
899     // immediate "caller" frame, or it could be a frame in a previous
900     // JitActivation (if the current frame was entered from C++, and the C++
901     // was entered by some caller jit-frame further down the stack).
902     //
903     // So this jitcode is responsible for "walking up" the jit stack, finding
904     // the previous Ion or Baseline JS frame, and storing its address and the
905     // return address into the appropriate fields on the current jitActivation.
906     //
907     // There are a fixed number of different path types that can lead to the
908     // current frame, which is either a baseline or ion frame:
909     //
910     // <Baseline-Or-Ion>
911     // ^
912     // |
913     // ^--- Ion
914     // |
915     // ^--- Baseline Stub <---- Baseline
916     // |
917     // ^--- Argument Rectifier
918     // |    ^
919     // |    |
920     // |    ^--- Ion
921     // |    |
922     // |    ^--- Baseline Stub <---- Baseline
923     // |
924     // ^--- Entry Frame (From C++)
925     //
926     Register actReg = scratch4;
927     AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
928     masm.loadPtr(activationAddr, actReg);
929 
930     Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
931     Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
932 
933 #ifdef DEBUG
934     // Ensure that frame we are exiting is current lastProfilingFrame
935     {
936         masm.loadPtr(lastProfilingFrame, scratch1);
937         Label checkOk;
938         masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
939         masm.branchStackPtr(Assembler::Equal, scratch1, &checkOk);
940         masm.assumeUnreachable("Mismatch between stored lastProfilingFrame and current stack pointer.");
941         masm.bind(&checkOk);
942     }
943 #endif
944 
945     // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
946     masm.loadPtr(Address(masm.getStackPointer(), JitFrameLayout::offsetOfDescriptor()), scratch1);
947 
948     // Going into the conditionals, we will have:
949     //      FrameDescriptor.size in scratch1
950     //      FrameDescriptor.type in scratch2
951     masm.and32(Imm32((1 << FRAMESIZE_SHIFT) - 1), scratch1, scratch2);
952     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
953 
954     // Handling of each case is dependent on FrameDescriptor.type
955     Label handle_IonJS;
956     Label handle_BaselineStub;
957     Label handle_Rectifier;
958     Label handle_IonAccessorIC;
959     Label handle_Entry;
960     Label end;
961 
962     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
963     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
964     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
965     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
966     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonAccessorIC), &handle_IonAccessorIC);
967     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
968 
969     masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
970 
971     //
972     // JitFrame_IonJS
973     //
974     // Stack layout:
975     //                  ...
976     //                  Ion-Descriptor
977     //     Prev-FP ---> Ion-ReturnAddr
978     //                  ... previous frame data ... |- Descriptor.Size
979     //                  ... arguments ...           |
980     //                  ActualArgc          |
981     //                  CalleeToken         |- JitFrameLayout::Size()
982     //                  Descriptor          |
983     //        FP -----> ReturnAddr          |
984     //
985     masm.bind(&handle_IonJS);
986     {
987         // |scratch1| contains Descriptor.size
988 
989         // returning directly to an IonJS frame.  Store return addr to frame
990         // in lastProfilingCallSite.
991         masm.loadPtr(Address(masm.getStackPointer(), JitFrameLayout::offsetOfReturnAddress()),
992                      scratch2);
993         masm.storePtr(scratch2, lastProfilingCallSite);
994 
995         // Store return frame in lastProfilingFrame.
996         // scratch2 := masm.getStackPointer() + Descriptor.size*1 + JitFrameLayout::Size();
997         masm.addPtr(masm.getStackPointer(), scratch1, scratch2);
998         masm.syncStackPtr();
999         masm.addPtr(Imm32(JitFrameLayout::Size()), scratch2, scratch2);
1000         masm.storePtr(scratch2, lastProfilingFrame);
1001         masm.ret();
1002     }
1003 
1004     //
1005     // JitFrame_BaselineStub
1006     //
1007     // Look past the stub and store the frame pointer to
1008     // the baselineJS frame prior to it.
1009     //
1010     // Stack layout:
1011     //              ...
1012     //              BL-Descriptor
1013     // Prev-FP ---> BL-ReturnAddr
1014     //      +-----> BL-PrevFramePointer
1015     //      |       ... BL-FrameData ...
1016     //      |       BLStub-Descriptor
1017     //      |       BLStub-ReturnAddr
1018     //      |       BLStub-StubPointer          |
1019     //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
1020     //              ... arguments ...           |
1021     //              ActualArgc          |
1022     //              CalleeToken         |- JitFrameLayout::Size()
1023     //              Descriptor          |
1024     //    FP -----> ReturnAddr          |
1025     //
1026     // We take advantage of the fact that the stub frame saves the frame
1027     // pointer pointing to the baseline frame, so a bunch of calculation can
1028     // be avoided.
1029     //
1030     masm.bind(&handle_BaselineStub);
1031     {
1032         masm.addPtr(masm.getStackPointer(), scratch1, scratch3);
1033         masm.syncStackPtr();
1034         Address stubFrameReturnAddr(scratch3,
1035                                     JitFrameLayout::Size() +
1036                                     BaselineStubFrameLayout::offsetOfReturnAddress());
1037         masm.loadPtr(stubFrameReturnAddr, scratch2);
1038         masm.storePtr(scratch2, lastProfilingCallSite);
1039 
1040         Address stubFrameSavedFramePtr(scratch3,
1041                                        JitFrameLayout::Size() - (2 * sizeof(void*)));
1042         masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1043         masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr.
1044         masm.storePtr(scratch2, lastProfilingFrame);
1045         masm.ret();
1046     }
1047 
1048 
1049     //
1050     // JitFrame_Rectifier
1051     //
1052     // The rectifier frame can be preceded by either an IonJS or a
1053     // BaselineStub frame.
1054     //
1055     // Stack layout if caller of rectifier was Ion:
1056     //
1057     //              Ion-Descriptor
1058     //              Ion-ReturnAddr
1059     //              ... ion frame data ... |- Rect-Descriptor.Size
1060     //              < COMMON LAYOUT >
1061     //
1062     // Stack layout if caller of rectifier was Baseline:
1063     //
1064     //              BL-Descriptor
1065     // Prev-FP ---> BL-ReturnAddr
1066     //      +-----> BL-SavedFramePointer
1067     //      |       ... baseline frame data ...
1068     //      |       BLStub-Descriptor
1069     //      |       BLStub-ReturnAddr
1070     //      |       BLStub-StubPointer          |
1071     //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
1072     //              ... args to rectifier ...   |
1073     //              < COMMON LAYOUT >
1074     //
1075     // Common stack layout:
1076     //
1077     //              ActualArgc          |
1078     //              CalleeToken         |- IonRectitiferFrameLayout::Size()
1079     //              Rect-Descriptor     |
1080     //              Rect-ReturnAddr     |
1081     //              ... rectifier data & args ... |- Descriptor.Size
1082     //              ActualArgc      |
1083     //              CalleeToken     |- JitFrameLayout::Size()
1084     //              Descriptor      |
1085     //    FP -----> ReturnAddr      |
1086     //
1087     masm.bind(&handle_Rectifier);
1088     {
1089         // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1090         masm.addPtr(masm.getStackPointer(), scratch1, scratch2);
1091         masm.syncStackPtr();
1092         masm.add32(Imm32(JitFrameLayout::Size()), scratch2);
1093         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1094         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3, scratch1);
1095         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1096 
1097         // Now |scratch1| contains Rect-Descriptor.Size
1098         // and |scratch2| points to Rectifier frame
1099         // and |scratch3| contains Rect-Descriptor.Type
1100 
1101         // Check for either Ion or BaselineStub frame.
1102         Label handle_Rectifier_BaselineStub;
1103         masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1104                       &handle_Rectifier_BaselineStub);
1105 
1106         // Handle Rectifier <- IonJS
1107         // scratch3 := RectFrame[ReturnAddr]
1108         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1109         masm.storePtr(scratch3, lastProfilingCallSite);
1110 
1111         // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1112         masm.addPtr(scratch2, scratch1, scratch3);
1113         masm.add32(Imm32(RectifierFrameLayout::Size()), scratch3);
1114         masm.storePtr(scratch3, lastProfilingFrame);
1115         masm.ret();
1116 
1117         // Handle Rectifier <- BaselineStub <- BaselineJS
1118         masm.bind(&handle_Rectifier_BaselineStub);
1119 #ifdef DEBUG
1120         {
1121             Label checkOk;
1122             masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1123             masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1124             masm.bind(&checkOk);
1125         }
1126 #endif
1127         masm.addPtr(scratch2, scratch1, scratch3);
1128         Address stubFrameReturnAddr(scratch3, RectifierFrameLayout::Size() +
1129                                               BaselineStubFrameLayout::offsetOfReturnAddress());
1130         masm.loadPtr(stubFrameReturnAddr, scratch2);
1131         masm.storePtr(scratch2, lastProfilingCallSite);
1132 
1133         Address stubFrameSavedFramePtr(scratch3,
1134                                        RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1135         masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1136         masm.addPtr(Imm32(sizeof(void*)), scratch2);
1137         masm.storePtr(scratch2, lastProfilingFrame);
1138         masm.ret();
1139     }
1140 
1141     // JitFrame_IonAccessorIC
1142     //
1143     // The caller is always an IonJS frame.
1144     //
1145     //              Ion-Descriptor
1146     //              Ion-ReturnAddr
1147     //              ... ion frame data ... |- AccFrame-Descriptor.Size
1148     //              StubCode             |
1149     //              AccFrame-Descriptor  |- IonAccessorICFrameLayout::Size()
1150     //              AccFrame-ReturnAddr  |
1151     //              ... accessor frame data & args ... |- Descriptor.Size
1152     //              ActualArgc      |
1153     //              CalleeToken     |- JitFrameLayout::Size()
1154     //              Descriptor      |
1155     //    FP -----> ReturnAddr      |
1156     masm.bind(&handle_IonAccessorIC);
1157     {
1158         // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1159         masm.addPtr(masm.getStackPointer(), scratch1, scratch2);
1160         masm.syncStackPtr();
1161         masm.addPtr(Imm32(JitFrameLayout::Size()), scratch2);
1162 
1163         // scratch3 := AccFrame-Descriptor.Size
1164         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfDescriptor()), scratch3);
1165 #ifdef DEBUG
1166         // Assert previous frame is an IonJS frame.
1167         masm.movePtr(scratch3, scratch1);
1168         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1169         {
1170             Label checkOk;
1171             masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1172             masm.assumeUnreachable("IonAccessorIC frame must be preceded by IonJS frame");
1173             masm.bind(&checkOk);
1174         }
1175 #endif
1176         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1177 
1178         // lastProfilingCallSite := AccFrame-ReturnAddr
1179         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfReturnAddress()), scratch1);
1180         masm.storePtr(scratch1, lastProfilingCallSite);
1181 
1182         // lastProfilingFrame := AccessorFrame + AccFrame-Descriptor.Size +
1183         //                       IonAccessorICFrameLayout::Size()
1184         masm.addPtr(scratch2, scratch3, scratch1);
1185         masm.addPtr(Imm32(IonAccessorICFrameLayout::Size()), scratch1);
1186         masm.storePtr(scratch1, lastProfilingFrame);
1187         masm.ret();
1188     }
1189 
1190     //
1191     // JitFrame_Entry
1192     //
1193     // If at an entry frame, store null into both fields.
1194     //
1195     masm.bind(&handle_Entry);
1196     {
1197         masm.movePtr(ImmPtr(nullptr), scratch1);
1198         masm.storePtr(scratch1, lastProfilingCallSite);
1199         masm.storePtr(scratch1, lastProfilingFrame);
1200         masm.ret();
1201     }
1202 
1203     Linker linker(masm);
1204     AutoFlushICache afc("ProfilerExitFrameTailStub");
1205     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1206 
1207 #ifdef JS_ION_PERF
1208     writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1209 #endif
1210 
1211     return code;
1212 }
1213