1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/Bailouts.h"
8 #include "jit/JitCompartment.h"
9 #include "jit/JitFrames.h"
10 #include "jit/Linker.h"
11 #ifdef JS_ION_PERF
12 # include "jit/PerfSpewer.h"
13 #endif
14 #include "jit/VMFunctions.h"
15 #include "jit/x64/SharedICHelpers-x64.h"
16 
17 #include "jit/MacroAssembler-inl.h"
18 
19 using namespace js;
20 using namespace js::jit;
21 
22 // All registers to save and restore. This includes the stack pointer, since we
23 // use the ability to reference register values on the stack by index.
24 static const LiveRegisterSet AllRegs =
25     LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
26                          FloatRegisterSet(FloatRegisters::AllMask));
27 
28 // Generates a trampoline for calling Jit compiled code from a C++ function.
29 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
30 // calling convention.
31 JitCode*
generateEnterJIT(JSContext * cx,EnterJitType type)32 JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
33 {
34     MacroAssembler masm(cx);
35     masm.assertStackAlignment(ABIStackAlignment, -int32_t(sizeof(uintptr_t)) /* return address */);
36 
37     const Register reg_code  = IntArgReg0;
38     const Register reg_argc  = IntArgReg1;
39     const Register reg_argv  = IntArgReg2;
40     MOZ_ASSERT(OsrFrameReg == IntArgReg3);
41 
42 #if defined(_WIN64)
43     const Operand token  = Operand(rbp, 16 + ShadowStackSpace);
44     const Operand scopeChain = Operand(rbp, 24 + ShadowStackSpace);
45     const Operand numStackValuesAddr = Operand(rbp, 32 + ShadowStackSpace);
46     const Operand result = Operand(rbp, 40 + ShadowStackSpace);
47 #else
48     const Register token = IntArgReg4;
49     const Register scopeChain = IntArgReg5;
50     const Operand numStackValuesAddr = Operand(rbp, 16 + ShadowStackSpace);
51     const Operand result = Operand(rbp, 24 + ShadowStackSpace);
52 #endif
53 
54     // Save old stack frame pointer, set new stack frame pointer.
55     masm.push(rbp);
56     masm.mov(rsp, rbp);
57 
58     // Save non-volatile registers. These must be saved by the trampoline, rather
59     // than by the JIT'd code, because they are scanned by the conservative scanner.
60     masm.push(rbx);
61     masm.push(r12);
62     masm.push(r13);
63     masm.push(r14);
64     masm.push(r15);
65 #if defined(_WIN64)
66     masm.push(rdi);
67     masm.push(rsi);
68 
69     // 16-byte aligment for vmovdqa
70     masm.subq(Imm32(16 * 10 + 8), rsp);
71 
72     masm.vmovdqa(xmm6, Operand(rsp, 16 * 0));
73     masm.vmovdqa(xmm7, Operand(rsp, 16 * 1));
74     masm.vmovdqa(xmm8, Operand(rsp, 16 * 2));
75     masm.vmovdqa(xmm9, Operand(rsp, 16 * 3));
76     masm.vmovdqa(xmm10, Operand(rsp, 16 * 4));
77     masm.vmovdqa(xmm11, Operand(rsp, 16 * 5));
78     masm.vmovdqa(xmm12, Operand(rsp, 16 * 6));
79     masm.vmovdqa(xmm13, Operand(rsp, 16 * 7));
80     masm.vmovdqa(xmm14, Operand(rsp, 16 * 8));
81     masm.vmovdqa(xmm15, Operand(rsp, 16 * 9));
82 #endif
83 
84     // Save arguments passed in registers needed after function call.
85     masm.push(result);
86 
87     // Remember stack depth without padding and arguments.
88     masm.mov(rsp, r14);
89 
90     // Remember number of bytes occupied by argument vector
91     masm.mov(reg_argc, r13);
92 
93     // if we are constructing, that also needs to include newTarget
94     {
95         Label noNewTarget;
96         masm.branchTest32(Assembler::Zero, token, Imm32(CalleeToken_FunctionConstructing),
97                           &noNewTarget);
98 
99         masm.addq(Imm32(1), r13);
100 
101         masm.bind(&noNewTarget);
102     }
103 
104     masm.shll(Imm32(3), r13);   // r13 = argc * sizeof(Value)
105     static_assert(sizeof(Value) == 1 << 3, "Constant is baked in assembly code");
106 
107     // Guarantee stack alignment of Jit frames.
108     //
109     // This code compensates for the offset created by the copy of the vector of
110     // arguments, such that the jit frame will be aligned once the return
111     // address is pushed on the stack.
112     //
113     // In the computation of the offset, we omit the size of the JitFrameLayout
114     // which is pushed on the stack, as the JitFrameLayout size is a multiple of
115     // the JitStackAlignment.
116     masm.mov(rsp, r12);
117     masm.subq(r13, r12);
118     static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
119       "No need to consider the JitFrameLayout for aligning the stack");
120     masm.andl(Imm32(JitStackAlignment - 1), r12);
121     masm.subq(r12, rsp);
122 
123     /***************************************************************
124     Loop over argv vector, push arguments onto stack in reverse order
125     ***************************************************************/
126 
127     // r13 still stores the number of bytes in the argument vector.
128     masm.addq(reg_argv, r13); // r13 points above last argument or newTarget
129 
130     // while r13 > rdx, push arguments.
131     {
132         Label header, footer;
133         masm.bind(&header);
134 
135         masm.cmpPtr(r13, reg_argv);
136         masm.j(AssemblerX86Shared::BelowOrEqual, &footer);
137 
138         masm.subq(Imm32(8), r13);
139         masm.push(Operand(r13, 0));
140         masm.jmp(&header);
141 
142         masm.bind(&footer);
143     }
144 
145     // Push the number of actual arguments.  |result| is used to store the
146     // actual number of arguments without adding an extra argument to the enter
147     // JIT.
148     masm.movq(result, reg_argc);
149     masm.unboxInt32(Operand(reg_argc, 0), reg_argc);
150     masm.push(reg_argc);
151 
152     // Push the callee token.
153     masm.push(token);
154 
155     /*****************************************************************
156     Push the number of bytes we've pushed so far on the stack and call
157     *****************************************************************/
158     masm.subq(rsp, r14);
159 
160     // Create a frame descriptor.
161     masm.makeFrameDescriptor(r14, JitFrame_Entry);
162     masm.push(r14);
163 
164     CodeLabel returnLabel;
165     CodeLabel oomReturnLabel;
166     if (type == EnterJitBaseline) {
167         // Handle OSR.
168         AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
169         regs.takeUnchecked(OsrFrameReg);
170         regs.take(rbp);
171         regs.take(reg_code);
172 
173         // Ensure that |scratch| does not end up being JSReturnOperand.
174         // Do takeUnchecked because on Win64/x64, reg_code (IntArgReg0) and JSReturnOperand are
175         // the same (rcx).  See bug 849398.
176         regs.takeUnchecked(JSReturnOperand);
177         Register scratch = regs.takeAny();
178 
179         Label notOsr;
180         masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
181 
182         Register numStackValues = regs.takeAny();
183         masm.movq(numStackValuesAddr, numStackValues);
184 
185         // Push return address
186         masm.mov(returnLabel.patchAt(), scratch);
187         masm.push(scratch);
188 
189         // Push previous frame pointer.
190         masm.push(rbp);
191 
192         // Reserve frame.
193         Register framePtr = rbp;
194         masm.subPtr(Imm32(BaselineFrame::Size()), rsp);
195         masm.mov(rsp, framePtr);
196 
197 #ifdef XP_WIN
198         // Can't push large frames blindly on windows.  Touch frame memory incrementally.
199         masm.mov(numStackValues, scratch);
200         masm.lshiftPtr(Imm32(3), scratch);
201         masm.subPtr(scratch, framePtr);
202         {
203             masm.movePtr(rsp, scratch);
204             masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
205 
206             Label touchFrameLoop;
207             Label touchFrameLoopEnd;
208             masm.bind(&touchFrameLoop);
209             masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
210             masm.store32(Imm32(0), Address(scratch, 0));
211             masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
212             masm.jump(&touchFrameLoop);
213             masm.bind(&touchFrameLoopEnd);
214         }
215         masm.mov(rsp, framePtr);
216 #endif
217 
218         // Reserve space for locals and stack values.
219         Register valuesSize = regs.takeAny();
220         masm.mov(numStackValues, valuesSize);
221         masm.shll(Imm32(3), valuesSize);
222         masm.subPtr(valuesSize, rsp);
223 
224         // Enter exit frame.
225         masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), valuesSize);
226         masm.makeFrameDescriptor(valuesSize, JitFrame_BaselineJS);
227         masm.push(valuesSize);
228         masm.push(Imm32(0)); // Fake return address.
229         // No GC things to mark, push a bare token.
230         masm.enterFakeExitFrame(ExitFrameLayoutBareToken);
231 
232         regs.add(valuesSize);
233 
234         masm.push(framePtr);
235         masm.push(reg_code);
236 
237         masm.setupUnalignedABICall(scratch);
238         masm.passABIArg(framePtr); // BaselineFrame
239         masm.passABIArg(OsrFrameReg); // InterpreterFrame
240         masm.passABIArg(numStackValues);
241         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
242 
243         masm.pop(reg_code);
244         masm.pop(framePtr);
245 
246         MOZ_ASSERT(reg_code != ReturnReg);
247 
248         Label error;
249         masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), rsp);
250         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
251         masm.branchIfFalseBool(ReturnReg, &error);
252 
253         // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
254         // if profiler instrumentation is enabled.
255         {
256             Label skipProfilingInstrumentation;
257             Register realFramePtr = numStackValues;
258             AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
259             masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
260                           &skipProfilingInstrumentation);
261             masm.lea(Operand(framePtr, sizeof(void*)), realFramePtr);
262             masm.profilerEnterFrame(realFramePtr, scratch);
263             masm.bind(&skipProfilingInstrumentation);
264         }
265 
266         masm.jump(reg_code);
267 
268         // OOM: load error value, discard return address and previous frame
269         // pointer and return.
270         masm.bind(&error);
271         masm.mov(framePtr, rsp);
272         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), rsp);
273         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
274         masm.mov(oomReturnLabel.patchAt(), scratch);
275         masm.jump(scratch);
276 
277         masm.bind(&notOsr);
278         masm.movq(scopeChain, R1.scratchReg());
279     }
280 
281     // The call will push the return address on the stack, thus we check that
282     // the stack would be aligned once the call is complete.
283     masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
284 
285     // Call function.
286     masm.callJitNoProfiler(reg_code);
287 
288     if (type == EnterJitBaseline) {
289         // Baseline OSR will return here.
290         masm.use(returnLabel.target());
291         masm.addCodeLabel(returnLabel);
292         masm.use(oomReturnLabel.target());
293         masm.addCodeLabel(oomReturnLabel);
294     }
295 
296     // Pop arguments and padding from stack.
297     masm.pop(r14);              // Pop and decode descriptor.
298     masm.shrq(Imm32(FRAMESIZE_SHIFT), r14);
299     masm.addq(r14, rsp);        // Remove arguments.
300 
301     /*****************************************************************
302     Place return value where it belongs, pop all saved registers
303     *****************************************************************/
304     masm.pop(r12); // vp
305     masm.storeValue(JSReturnOperand, Operand(r12, 0));
306 
307     // Restore non-volatile registers.
308 #if defined(_WIN64)
309     masm.vmovdqa(Operand(rsp, 16 * 0), xmm6);
310     masm.vmovdqa(Operand(rsp, 16 * 1), xmm7);
311     masm.vmovdqa(Operand(rsp, 16 * 2), xmm8);
312     masm.vmovdqa(Operand(rsp, 16 * 3), xmm9);
313     masm.vmovdqa(Operand(rsp, 16 * 4), xmm10);
314     masm.vmovdqa(Operand(rsp, 16 * 5), xmm11);
315     masm.vmovdqa(Operand(rsp, 16 * 6), xmm12);
316     masm.vmovdqa(Operand(rsp, 16 * 7), xmm13);
317     masm.vmovdqa(Operand(rsp, 16 * 8), xmm14);
318     masm.vmovdqa(Operand(rsp, 16 * 9), xmm15);
319 
320     masm.addq(Imm32(16 * 10 + 8), rsp);
321 
322     masm.pop(rsi);
323     masm.pop(rdi);
324 #endif
325     masm.pop(r15);
326     masm.pop(r14);
327     masm.pop(r13);
328     masm.pop(r12);
329     masm.pop(rbx);
330 
331     // Restore frame pointer and return.
332     masm.pop(rbp);
333     masm.ret();
334 
335     Linker linker(masm);
336     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
337 
338 #ifdef JS_ION_PERF
339     writePerfSpewerJitCodeProfile(code, "EnterJIT");
340 #endif
341 
342     return code;
343 }
344 
345 JitCode*
generateInvalidator(JSContext * cx)346 JitRuntime::generateInvalidator(JSContext* cx)
347 {
348     AutoJitContextAlloc ajca(cx);
349     MacroAssembler masm(cx);
350 
351     // See explanatory comment in x86's JitRuntime::generateInvalidator.
352 
353     masm.addq(Imm32(sizeof(uintptr_t)), rsp);
354 
355     // Push registers such that we can access them from [base + code].
356     masm.PushRegsInMask(AllRegs);
357 
358     masm.movq(rsp, rax); // Argument to jit::InvalidationBailout.
359 
360     // Make space for InvalidationBailout's frameSize outparam.
361     masm.reserveStack(sizeof(size_t));
362     masm.movq(rsp, rbx);
363 
364     // Make space for InvalidationBailout's bailoutInfo outparam.
365     masm.reserveStack(sizeof(void*));
366     masm.movq(rsp, r9);
367 
368     masm.setupUnalignedABICall(rdx);
369     masm.passABIArg(rax);
370     masm.passABIArg(rbx);
371     masm.passABIArg(r9);
372     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
373 
374     masm.pop(r9); // Get the bailoutInfo outparam.
375     masm.pop(rbx); // Get the frameSize outparam.
376 
377     // Pop the machine state and the dead frame.
378     masm.lea(Operand(rsp, rbx, TimesOne, sizeof(InvalidationBailoutStack)), rsp);
379 
380     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
381     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
382     masm.jmp(bailoutTail);
383 
384     Linker linker(masm);
385     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
386 
387 #ifdef JS_ION_PERF
388     writePerfSpewerJitCodeProfile(code, "Invalidator");
389 #endif
390 
391     return code;
392 }
393 
394 JitCode*
generateArgumentsRectifier(JSContext * cx,void ** returnAddrOut)395 JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
396 {
397     // Do not erase the frame pointer in this function.
398 
399     MacroAssembler masm(cx);
400     // Caller:
401     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- rsp
402     // '--- #r8 ---'
403 
404     // ArgumentsRectifierReg contains the |nargs| pushed onto the current frame.
405     // Including |this|, there are (|nargs| + 1) arguments to copy.
406     MOZ_ASSERT(ArgumentsRectifierReg == r8);
407 
408     // Add |this|, in the counter of known arguments.
409     masm.addl(Imm32(1), r8);
410 
411     // Load |nformals| into %rcx.
412     masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfCalleeToken()), rax);
413     masm.mov(rax, rcx);
414     masm.andq(Imm32(uint32_t(CalleeTokenMask)), rcx);
415     masm.movzwl(Operand(rcx, JSFunction::offsetOfNargs()), rcx);
416 
417     // Stash another copy in r11, since we are going to do destructive operations
418     // on rcx
419     masm.mov(rcx, r11);
420 
421     static_assert(CalleeToken_FunctionConstructing == 1,
422       "Ensure that we can use the constructing bit to count the value");
423     masm.mov(rax, rdx);
424     masm.andq(Imm32(uint32_t(CalleeToken_FunctionConstructing)), rdx);
425 
426     // Including |this|, and |new.target|, there are (|nformals| + 1 + isConstructing)
427     // arguments to push to the stack.  Then we push a JitFrameLayout.  We
428     // compute the padding expressed in the number of extra |undefined| values
429     // to push on the stack.
430     static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
431       "No need to consider the JitFrameLayout for aligning the stack");
432     static_assert(JitStackAlignment % sizeof(Value) == 0,
433       "Ensure that we can pad the stack by pushing extra UndefinedValue");
434 
435     MOZ_ASSERT(IsPowerOfTwo(JitStackValueAlignment));
436     masm.addl(Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */), rcx);
437     masm.addl(rdx, rcx);
438     masm.andl(Imm32(~(JitStackValueAlignment - 1)), rcx);
439 
440     // Load the number of |undefined|s to push into %rcx.
441     masm.subq(r8, rcx);
442 
443     // Caller:
444     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- rsp <- r9
445     // '------ #r8 -------'
446     //
447     // Rectifier frame:
448     // [undef] [undef] [undef] [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]]
449     // '------- #rcx --------' '------ #r8 -------'
450 
451     // Copy the number of actual arguments
452     masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfNumActualArgs()), rdx);
453 
454     masm.moveValue(UndefinedValue(), r10);
455 
456     masm.movq(rsp, r9); // Save %rsp.
457 
458     // Push undefined. (including the padding)
459     {
460         Label undefLoopTop;
461         masm.bind(&undefLoopTop);
462 
463         masm.push(r10);
464         masm.subl(Imm32(1), rcx);
465         masm.j(Assembler::NonZero, &undefLoopTop);
466     }
467 
468     // Get the topmost argument.
469     static_assert(sizeof(Value) == 8, "TimesEight is used to skip arguments");
470 
471     // | - sizeof(Value)| is used to put rcx such that we can read the last
472     // argument, and not the value which is after.
473     BaseIndex b = BaseIndex(r9, r8, TimesEight, sizeof(RectifierFrameLayout) - sizeof(Value));
474     masm.lea(Operand(b), rcx);
475 
476     // Copy & Push arguments, |nargs| + 1 times (to include |this|).
477     {
478         Label copyLoopTop;
479 
480         masm.bind(&copyLoopTop);
481         masm.push(Operand(rcx, 0x0));
482         masm.subq(Imm32(sizeof(Value)), rcx);
483         masm.subl(Imm32(1), r8);
484         masm.j(Assembler::NonZero, &copyLoopTop);
485     }
486 
487     // if constructing, copy newTarget
488     {
489         Label notConstructing;
490 
491         masm.branchTest32(Assembler::Zero, rax, Imm32(CalleeToken_FunctionConstructing),
492                           &notConstructing);
493 
494         // thisFrame[numFormals] = prevFrame[argc]
495         ValueOperand newTarget(r10);
496 
497         // +1 for |this|. We want vp[argc], so don't subtract 1
498         BaseIndex newTargetSrc(r9, rdx, TimesEight, sizeof(RectifierFrameLayout) + sizeof(Value));
499         masm.loadValue(newTargetSrc, newTarget);
500 
501         // Again, 1 for |this|
502         BaseIndex newTargetDest(rsp, r11, TimesEight, sizeof(Value));
503         masm.storeValue(newTarget, newTargetDest);
504 
505         masm.bind(&notConstructing);
506     }
507 
508 
509     // Caller:
510     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- r9
511     //
512     //
513     // Rectifier frame:
514     // [undef] [undef] [undef] [arg2] [arg1] [this] <- rsp [[argc] [callee] [descr] [raddr]]
515     //
516 
517     // Construct descriptor.
518     masm.subq(rsp, r9);
519     masm.makeFrameDescriptor(r9, JitFrame_Rectifier);
520 
521     // Construct JitFrameLayout.
522     masm.push(rdx); // numActualArgs
523     masm.push(rax); // callee token
524     masm.push(r9); // descriptor
525 
526     // Call the target function.
527     // Note that this code assumes the function is JITted.
528     masm.andq(Imm32(uint32_t(CalleeTokenMask)), rax);
529     masm.loadPtr(Address(rax, JSFunction::offsetOfNativeOrScript()), rax);
530     masm.loadBaselineOrIonRaw(rax, rax, nullptr);
531     uint32_t returnOffset = masm.callJitNoProfiler(rax);
532 
533     // Remove the rectifier frame.
534     masm.pop(r9);             // r9 <- descriptor with FrameType.
535     masm.shrq(Imm32(FRAMESIZE_SHIFT), r9);
536     masm.pop(r11);            // Discard calleeToken.
537     masm.pop(r11);            // Discard numActualArgs.
538     masm.addq(r9, rsp);       // Discard pushed arguments.
539 
540     masm.ret();
541 
542     Linker linker(masm);
543     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
544 
545 #ifdef JS_ION_PERF
546     writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
547 #endif
548 
549     if (returnAddrOut)
550         *returnAddrOut = (void*)(code->raw() + returnOffset);
551     return code;
552 }
553 
554 static void
PushBailoutFrame(MacroAssembler & masm,Register spArg)555 PushBailoutFrame(MacroAssembler& masm, Register spArg)
556 {
557     // Push registers such that we can access them from [base + code].
558     if (JitSupportsSimd()) {
559         masm.PushRegsInMask(AllRegs);
560     } else {
561         // When SIMD isn't supported, PushRegsInMask reduces the set of float
562         // registers to be double-sized, while the RegisterDump expects each of
563         // the float registers to have the maximal possible size
564         // (Simd128DataSize). To work around this, we just spill the double
565         // registers by hand here, using the register dump offset directly.
566         for (GeneralRegisterBackwardIterator iter(AllRegs.gprs()); iter.more(); iter++)
567             masm.Push(*iter);
568 
569         masm.reserveStack(sizeof(RegisterDump::FPUArray));
570         for (FloatRegisterBackwardIterator iter(AllRegs.fpus()); iter.more(); iter++) {
571             FloatRegister reg = *iter;
572             Address spillAddress(StackPointer, reg.getRegisterDumpOffsetInBytes());
573             masm.storeDouble(reg, spillAddress);
574         }
575     }
576 
577     // Get the stack pointer into a register, pre-alignment.
578     masm.movq(rsp, spArg);
579 }
580 
581 static void
GenerateBailoutThunk(JSContext * cx,MacroAssembler & masm,uint32_t frameClass)582 GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
583 {
584     PushBailoutFrame(masm, r8);
585 
586     // Make space for Bailout's bailoutInfo outparam.
587     masm.reserveStack(sizeof(void*));
588     masm.movq(rsp, r9);
589 
590     // Call the bailout function.
591     masm.setupUnalignedABICall(rax);
592     masm.passABIArg(r8);
593     masm.passABIArg(r9);
594     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
595 
596     masm.pop(r9); // Get the bailoutInfo outparam.
597 
598     // Stack is:
599     //     [frame]
600     //     snapshotOffset
601     //     frameSize
602     //     [bailoutFrame]
603     //
604     // Remove both the bailout frame and the topmost Ion frame's stack.
605     static const uint32_t BailoutDataSize = sizeof(RegisterDump);
606     masm.addq(Imm32(BailoutDataSize), rsp);
607     masm.pop(rcx);
608     masm.lea(Operand(rsp, rcx, TimesOne, sizeof(void*)), rsp);
609 
610     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
611     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
612     masm.jmp(bailoutTail);
613 }
614 
615 JitCode*
generateBailoutTable(JSContext * cx,uint32_t frameClass)616 JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
617 {
618     MOZ_CRASH("x64 does not use bailout tables");
619 }
620 
621 JitCode*
generateBailoutHandler(JSContext * cx)622 JitRuntime::generateBailoutHandler(JSContext* cx)
623 {
624     MacroAssembler masm;
625     GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
626 
627     Linker linker(masm);
628     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
629 
630 #ifdef JS_ION_PERF
631     writePerfSpewerJitCodeProfile(code, "BailoutHandler");
632 #endif
633 
634     return code;
635 }
636 
637 JitCode*
generateVMWrapper(JSContext * cx,const VMFunction & f)638 JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
639 {
640     MOZ_ASSERT(functionWrappers_);
641     MOZ_ASSERT(functionWrappers_->initialized());
642     VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
643     if (p)
644         return p->value();
645 
646     // Generate a separated code for the wrapper.
647     MacroAssembler masm;
648 
649     // Avoid conflicts with argument registers while discarding the result after
650     // the function call.
651     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
652 
653     // Wrapper register set is a superset of Volatile register set.
654     JS_STATIC_ASSERT((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0);
655 
656     // The context is the first argument.
657     Register cxreg = IntArgReg0;
658     regs.take(cxreg);
659 
660     // Stack is:
661     //    ... frame ...
662     //  +12 [args]
663     //  +8  descriptor
664     //  +0  returnAddress
665     //
666     // We're aligned to an exit frame, so link it up.
667     masm.enterExitFrame(&f);
668     masm.loadJSContext(cxreg);
669 
670     // Save the current stack pointer as the base for copying arguments.
671     Register argsBase = InvalidReg;
672     if (f.explicitArgs) {
673         argsBase = r10;
674         regs.take(argsBase);
675         masm.lea(Operand(rsp, ExitFrameLayout::SizeWithFooter()), argsBase);
676     }
677 
678     // Reserve space for the outparameter.
679     Register outReg = InvalidReg;
680     switch (f.outParam) {
681       case Type_Value:
682         outReg = regs.takeAny();
683         masm.reserveStack(sizeof(Value));
684         masm.movq(esp, outReg);
685         break;
686 
687       case Type_Handle:
688         outReg = regs.takeAny();
689         masm.PushEmptyRooted(f.outParamRootType);
690         masm.movq(esp, outReg);
691         break;
692 
693       case Type_Int32:
694       case Type_Bool:
695         outReg = regs.takeAny();
696         masm.reserveStack(sizeof(int32_t));
697         masm.movq(esp, outReg);
698         break;
699 
700       case Type_Double:
701         outReg = regs.takeAny();
702         masm.reserveStack(sizeof(double));
703         masm.movq(esp, outReg);
704         break;
705 
706       case Type_Pointer:
707         outReg = regs.takeAny();
708         masm.reserveStack(sizeof(uintptr_t));
709         masm.movq(esp, outReg);
710         break;
711 
712       default:
713         MOZ_ASSERT(f.outParam == Type_Void);
714         break;
715     }
716 
717     masm.setupUnalignedABICall(regs.getAny());
718     masm.passABIArg(cxreg);
719 
720     size_t argDisp = 0;
721 
722     // Copy arguments.
723     for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
724         MoveOperand from;
725         switch (f.argProperties(explicitArg)) {
726           case VMFunction::WordByValue:
727             if (f.argPassedInFloatReg(explicitArg))
728                 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
729             else
730                 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
731             argDisp += sizeof(void*);
732             break;
733           case VMFunction::WordByRef:
734             masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
735                             MoveOp::GENERAL);
736             argDisp += sizeof(void*);
737             break;
738           case VMFunction::DoubleByValue:
739           case VMFunction::DoubleByRef:
740             MOZ_CRASH("NYI: x64 callVM should not be used with 128bits values.");
741         }
742     }
743 
744     // Copy the implicit outparam, if any.
745     if (outReg != InvalidReg)
746         masm.passABIArg(outReg);
747 
748     masm.callWithABI(f.wrapped);
749 
750     // Test for failure.
751     switch (f.failType()) {
752       case Type_Object:
753         masm.branchTestPtr(Assembler::Zero, rax, rax, masm.failureLabel());
754         break;
755       case Type_Bool:
756         masm.testb(rax, rax);
757         masm.j(Assembler::Zero, masm.failureLabel());
758         break;
759       default:
760         MOZ_CRASH("unknown failure kind");
761     }
762 
763     // Load the outparam and free any allocated stack.
764     switch (f.outParam) {
765       case Type_Handle:
766         masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
767         break;
768 
769       case Type_Value:
770         masm.loadValue(Address(esp, 0), JSReturnOperand);
771         masm.freeStack(sizeof(Value));
772         break;
773 
774       case Type_Int32:
775         masm.load32(Address(esp, 0), ReturnReg);
776         masm.freeStack(sizeof(int32_t));
777         break;
778 
779       case Type_Bool:
780         masm.load8ZeroExtend(Address(esp, 0), ReturnReg);
781         masm.freeStack(sizeof(int32_t));
782         break;
783 
784       case Type_Double:
785         MOZ_ASSERT(cx->runtime()->jitSupportsFloatingPoint);
786         masm.loadDouble(Address(esp, 0), ReturnDoubleReg);
787         masm.freeStack(sizeof(double));
788         break;
789 
790       case Type_Pointer:
791         masm.loadPtr(Address(esp, 0), ReturnReg);
792         masm.freeStack(sizeof(uintptr_t));
793         break;
794 
795       default:
796         MOZ_ASSERT(f.outParam == Type_Void);
797         break;
798     }
799     masm.leaveExitFrame();
800     masm.retn(Imm32(sizeof(ExitFrameLayout) +
801                     f.explicitStackSlots() * sizeof(void*) +
802                     f.extraValuesToPop * sizeof(Value)));
803 
804     Linker linker(masm);
805     JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
806     if (!wrapper)
807         return nullptr;
808 
809 #ifdef JS_ION_PERF
810     writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
811 #endif
812 
813     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
814     // use relookupOrAdd instead of add.
815     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
816         return nullptr;
817 
818     return wrapper;
819 }
820 
821 JitCode*
generatePreBarrier(JSContext * cx,MIRType type)822 JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
823 {
824     MacroAssembler masm;
825 
826     LiveRegisterSet regs =
827         LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
828                              FloatRegisterSet(FloatRegisters::VolatileMask));
829     masm.PushRegsInMask(regs);
830 
831     MOZ_ASSERT(PreBarrierReg == rdx);
832     masm.mov(ImmPtr(cx->runtime()), rcx);
833 
834     masm.setupUnalignedABICall(rax);
835     masm.passABIArg(rcx);
836     masm.passABIArg(rdx);
837     masm.callWithABI(IonMarkFunction(type));
838 
839     masm.PopRegsInMask(regs);
840     masm.ret();
841 
842     Linker linker(masm);
843     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
844 
845 #ifdef JS_ION_PERF
846     writePerfSpewerJitCodeProfile(code, "PreBarrier");
847 #endif
848 
849     return code;
850 }
851 
852 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
853 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
854 
855 JitCode*
generateDebugTrapHandler(JSContext * cx)856 JitRuntime::generateDebugTrapHandler(JSContext* cx)
857 {
858     MacroAssembler masm;
859 #ifndef JS_USE_LINK_REGISTER
860     // The first value contains the return addres,
861     // which we pull into ICTailCallReg for tail calls.
862     masm.setFramePushed(sizeof(intptr_t));
863 #endif
864 
865     Register scratch1 = rax;
866     Register scratch2 = rcx;
867     Register scratch3 = rdx;
868 
869     // Load the return address in scratch1.
870     masm.loadPtr(Address(rsp, 0), scratch1);
871 
872     // Load BaselineFrame pointer in scratch2.
873     masm.mov(rbp, scratch2);
874     masm.subPtr(Imm32(BaselineFrame::Size()), scratch2);
875 
876     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
877     // the stub frame has a nullptr ICStub pointer, since this pointer is marked
878     // during GC.
879     masm.movePtr(ImmPtr(nullptr), ICStubReg);
880     EmitBaselineEnterStubFrame(masm, scratch3);
881 
882     JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
883     if (!code)
884         return nullptr;
885 
886     masm.push(scratch1);
887     masm.push(scratch2);
888     EmitBaselineCallVM(code, masm);
889 
890     EmitBaselineLeaveStubFrame(masm);
891 
892     // If the stub returns |true|, we have to perform a forced return
893     // (return from the JS frame). If the stub returns |false|, just return
894     // from the trap stub so that execution continues at the current pc.
895     Label forcedReturn;
896     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
897     masm.ret();
898 
899     masm.bind(&forcedReturn);
900     masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()),
901                    JSReturnOperand);
902     masm.mov(rbp, rsp);
903     masm.pop(rbp);
904 
905     // Before returning, if profiling is turned on, make sure that lastProfilingFrame
906     // is set to the correct caller frame.
907     {
908         Label skipProfilingInstrumentation;
909         AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
910         masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
911         masm.profilerExitFrame();
912         masm.bind(&skipProfilingInstrumentation);
913     }
914 
915     masm.ret();
916 
917     Linker linker(masm);
918     JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
919 
920 #ifdef JS_ION_PERF
921     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
922 #endif
923 
924     return codeDbg;
925 }
926 
927 JitCode*
generateExceptionTailStub(JSContext * cx,void * handler)928 JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
929 {
930     MacroAssembler masm;
931 
932     masm.handleFailureWithHandlerTail(handler);
933 
934     Linker linker(masm);
935     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
936 
937 #ifdef JS_ION_PERF
938     writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
939 #endif
940 
941     return code;
942 }
943 
944 JitCode*
generateBailoutTailStub(JSContext * cx)945 JitRuntime::generateBailoutTailStub(JSContext* cx)
946 {
947     MacroAssembler masm;
948 
949     masm.generateBailoutTail(rdx, r9);
950 
951     Linker linker(masm);
952     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
953 
954 #ifdef JS_ION_PERF
955     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
956 #endif
957 
958     return code;
959 }
960 
961 JitCode*
generateProfilerExitFrameTailStub(JSContext * cx)962 JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
963 {
964     MacroAssembler masm;
965 
966     Register scratch1 = r8;
967     Register scratch2 = r9;
968     Register scratch3 = r10;
969     Register scratch4 = r11;
970 
971     //
972     // The code generated below expects that the current stack pointer points
973     // to an Ion or Baseline frame, at the state it would be immediately
974     // before a ret().  Thus, after this stub's business is done, it executes
975     // a ret() and returns directly to the caller script, on behalf of the
976     // callee script that jumped to this code.
977     //
978     // Thus the expected stack is:
979     //
980     //                                   StackPointer ----+
981     //                                                    v
982     // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
983     // MEM-HI                                       MEM-LOW
984     //
985     //
986     // The generated jitcode is responsible for overwriting the
987     // jitActivation->lastProfilingFrame field with a pointer to the previous
988     // Ion or Baseline jit-frame that was pushed before this one. It is also
989     // responsible for overwriting jitActivation->lastProfilingCallSite with
990     // the return address into that frame.  The frame could either be an
991     // immediate "caller" frame, or it could be a frame in a previous
992     // JitActivation (if the current frame was entered from C++, and the C++
993     // was entered by some caller jit-frame further down the stack).
994     //
995     // So this jitcode is responsible for "walking up" the jit stack, finding
996     // the previous Ion or Baseline JS frame, and storing its address and the
997     // return address into the appropriate fields on the current jitActivation.
998     //
999     // There are a fixed number of different path types that can lead to the
1000     // current frame, which is either a baseline or ion frame:
1001     //
1002     // <Baseline-Or-Ion>
1003     // ^
1004     // |
1005     // ^--- Ion
1006     // |
1007     // ^--- Baseline Stub <---- Baseline
1008     // |
1009     // ^--- Argument Rectifier
1010     // |    ^
1011     // |    |
1012     // |    ^--- Ion
1013     // |    |
1014     // |    ^--- Baseline Stub <---- Baseline
1015     // |
1016     // ^--- Entry Frame (From C++)
1017     //
1018     Register actReg = scratch4;
1019     AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
1020     masm.loadPtr(activationAddr, actReg);
1021 
1022     Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
1023     Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
1024 
1025 #ifdef DEBUG
1026     // Ensure that frame we are exiting is current lastProfilingFrame
1027     {
1028         masm.loadPtr(lastProfilingFrame, scratch1);
1029         Label checkOk;
1030         masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
1031         masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
1032         masm.assumeUnreachable(
1033             "Mismatch between stored lastProfilingFrame and current stack pointer.");
1034         masm.bind(&checkOk);
1035     }
1036 #endif
1037 
1038     // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
1039     masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
1040 
1041     // Going into the conditionals, we will have:
1042     //      FrameDescriptor.size in scratch1
1043     //      FrameDescriptor.type in scratch2
1044     masm.movePtr(scratch1, scratch2);
1045     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1046     masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch2);
1047 
1048     // Handling of each case is dependent on FrameDescriptor.type
1049     Label handle_IonJS;
1050     Label handle_BaselineStub;
1051     Label handle_Rectifier;
1052     Label handle_IonAccessorIC;
1053     Label handle_Entry;
1054     Label end;
1055 
1056     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
1057     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
1058     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
1059     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
1060     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonAccessorIC), &handle_IonAccessorIC);
1061     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
1062 
1063     masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
1064 
1065     //
1066     // JitFrame_IonJS
1067     //
1068     // Stack layout:
1069     //                  ...
1070     //                  Ion-Descriptor
1071     //     Prev-FP ---> Ion-ReturnAddr
1072     //                  ... previous frame data ... |- Descriptor.Size
1073     //                  ... arguments ...           |
1074     //                  ActualArgc          |
1075     //                  CalleeToken         |- JitFrameLayout::Size()
1076     //                  Descriptor          |
1077     //        FP -----> ReturnAddr          |
1078     //
1079     masm.bind(&handle_IonJS);
1080     {
1081         // returning directly to an IonJS frame.  Store return addr to frame
1082         // in lastProfilingCallSite.
1083         masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
1084         masm.storePtr(scratch2, lastProfilingCallSite);
1085 
1086         // Store return frame in lastProfilingFrame.
1087         // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1088         masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1089         masm.storePtr(scratch2, lastProfilingFrame);
1090         masm.ret();
1091     }
1092 
1093     //
1094     // JitFrame_BaselineStub
1095     //
1096     // Look past the stub and store the frame pointer to
1097     // the baselineJS frame prior to it.
1098     //
1099     // Stack layout:
1100     //              ...
1101     //              BL-Descriptor
1102     // Prev-FP ---> BL-ReturnAddr
1103     //      +-----> BL-PrevFramePointer
1104     //      |       ... BL-FrameData ...
1105     //      |       BLStub-Descriptor
1106     //      |       BLStub-ReturnAddr
1107     //      |       BLStub-StubPointer          |
1108     //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
1109     //              ... arguments ...           |
1110     //              ActualArgc          |
1111     //              CalleeToken         |- JitFrameLayout::Size()
1112     //              Descriptor          |
1113     //    FP -----> ReturnAddr          |
1114     //
1115     // We take advantage of the fact that the stub frame saves the frame
1116     // pointer pointing to the baseline frame, so a bunch of calculation can
1117     // be avoided.
1118     //
1119     masm.bind(&handle_BaselineStub);
1120     {
1121         BaseIndex stubFrameReturnAddr(StackPointer, scratch1, TimesOne,
1122                                       JitFrameLayout::Size() +
1123                                       BaselineStubFrameLayout::offsetOfReturnAddress());
1124         masm.loadPtr(stubFrameReturnAddr, scratch2);
1125         masm.storePtr(scratch2, lastProfilingCallSite);
1126 
1127         BaseIndex stubFrameSavedFramePtr(StackPointer, scratch1, TimesOne,
1128                                          JitFrameLayout::Size() - (2 * sizeof(void*)));
1129         masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1130         masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr
1131         masm.storePtr(scratch2, lastProfilingFrame);
1132         masm.ret();
1133     }
1134 
1135 
1136     //
1137     // JitFrame_Rectifier
1138     //
1139     // The rectifier frame can be preceded by either an IonJS or a
1140     // BaselineStub frame.
1141     //
1142     // Stack layout if caller of rectifier was Ion:
1143     //
1144     //              Ion-Descriptor
1145     //              Ion-ReturnAddr
1146     //              ... ion frame data ... |- Rect-Descriptor.Size
1147     //              < COMMON LAYOUT >
1148     //
1149     // Stack layout if caller of rectifier was Baseline:
1150     //
1151     //              BL-Descriptor
1152     // Prev-FP ---> BL-ReturnAddr
1153     //      +-----> BL-SavedFramePointer
1154     //      |       ... baseline frame data ...
1155     //      |       BLStub-Descriptor
1156     //      |       BLStub-ReturnAddr
1157     //      |       BLStub-StubPointer          |
1158     //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
1159     //              ... args to rectifier ...   |
1160     //              < COMMON LAYOUT >
1161     //
1162     // Common stack layout:
1163     //
1164     //              ActualArgc          |
1165     //              CalleeToken         |- IonRectitiferFrameLayout::Size()
1166     //              Rect-Descriptor     |
1167     //              Rect-ReturnAddr     |
1168     //              ... rectifier data & args ... |- Descriptor.Size
1169     //              ActualArgc      |
1170     //              CalleeToken     |- JitFrameLayout::Size()
1171     //              Descriptor      |
1172     //    FP -----> ReturnAddr      |
1173     //
1174     masm.bind(&handle_Rectifier);
1175     {
1176         // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1177         masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1178         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1179         masm.movePtr(scratch3, scratch1);
1180         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1181         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1182 
1183         // Now |scratch1| contains Rect-Descriptor.Size
1184         // and |scratch2| points to Rectifier frame
1185         // and |scratch3| contains Rect-Descriptor.Type
1186 
1187         // Check for either Ion or BaselineStub frame.
1188         Label handle_Rectifier_BaselineStub;
1189         masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1190                       &handle_Rectifier_BaselineStub);
1191 
1192         // Handle Rectifier <- IonJS
1193         // scratch3 := RectFrame[ReturnAddr]
1194         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1195         masm.storePtr(scratch3, lastProfilingCallSite);
1196 
1197         // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1198         masm.lea(Operand(scratch2, scratch1, TimesOne, RectifierFrameLayout::Size()), scratch3);
1199         masm.storePtr(scratch3, lastProfilingFrame);
1200         masm.ret();
1201 
1202         // Handle Rectifier <- BaselineStub <- BaselineJS
1203         masm.bind(&handle_Rectifier_BaselineStub);
1204 #ifdef DEBUG
1205         {
1206             Label checkOk;
1207             masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1208             masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1209             masm.bind(&checkOk);
1210         }
1211 #endif
1212         BaseIndex stubFrameReturnAddr(scratch2, scratch1, TimesOne,
1213                                          RectifierFrameLayout::Size() +
1214                                          BaselineStubFrameLayout::offsetOfReturnAddress());
1215         masm.loadPtr(stubFrameReturnAddr, scratch3);
1216         masm.storePtr(scratch3, lastProfilingCallSite);
1217 
1218         BaseIndex stubFrameSavedFramePtr(scratch2, scratch1, TimesOne,
1219                                          RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1220         masm.loadPtr(stubFrameSavedFramePtr, scratch3);
1221         masm.addPtr(Imm32(sizeof(void*)), scratch3);
1222         masm.storePtr(scratch3, lastProfilingFrame);
1223         masm.ret();
1224     }
1225 
1226     // JitFrame_IonAccessorIC
1227     //
1228     // The caller is always an IonJS frame.
1229     //
1230     //              Ion-Descriptor
1231     //              Ion-ReturnAddr
1232     //              ... ion frame data ... |- AccFrame-Descriptor.Size
1233     //              StubCode             |
1234     //              AccFrame-Descriptor  |- IonAccessorICFrameLayout::Size()
1235     //              AccFrame-ReturnAddr  |
1236     //              ... accessor frame data & args ... |- Descriptor.Size
1237     //              ActualArgc      |
1238     //              CalleeToken     |- JitFrameLayout::Size()
1239     //              Descriptor      |
1240     //    FP -----> ReturnAddr      |
1241     masm.bind(&handle_IonAccessorIC);
1242     {
1243         // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1244         masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1245 
1246         // scratch3 := AccFrame-Descriptor.Size
1247         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfDescriptor()), scratch3);
1248 #ifdef DEBUG
1249         // Assert previous frame is an IonJS frame.
1250         masm.movePtr(scratch3, scratch1);
1251         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1252         {
1253             Label checkOk;
1254             masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1255             masm.assumeUnreachable("IonAccessorIC frame must be preceded by IonJS frame");
1256             masm.bind(&checkOk);
1257         }
1258 #endif
1259         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1260 
1261         // lastProfilingCallSite := AccFrame-ReturnAddr
1262         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfReturnAddress()), scratch1);
1263         masm.storePtr(scratch1, lastProfilingCallSite);
1264 
1265         // lastProfilingFrame := AccessorFrame + AccFrame-Descriptor.Size +
1266         //                       IonAccessorICFrameLayout::Size()
1267         masm.lea(Operand(scratch2, scratch3, TimesOne, IonAccessorICFrameLayout::Size()), scratch1);
1268         masm.storePtr(scratch1, lastProfilingFrame);
1269         masm.ret();
1270     }
1271 
1272     //
1273     // JitFrame_Entry
1274     //
1275     // If at an entry frame, store null into both fields.
1276     //
1277     masm.bind(&handle_Entry);
1278     {
1279         masm.movePtr(ImmPtr(nullptr), scratch1);
1280         masm.storePtr(scratch1, lastProfilingCallSite);
1281         masm.storePtr(scratch1, lastProfilingFrame);
1282         masm.ret();
1283     }
1284 
1285     Linker linker(masm);
1286     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1287 
1288 #ifdef JS_ION_PERF
1289     writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1290 #endif
1291 
1292     return code;
1293 }
1294