1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "mozilla/DebugOnly.h"
8 
9 #include "jscompartment.h"
10 
11 #include "jit/Bailouts.h"
12 #include "jit/JitCompartment.h"
13 #include "jit/JitFrames.h"
14 #include "jit/JitSpewer.h"
15 #include "jit/Linker.h"
16 #include "jit/mips32/Bailouts-mips32.h"
17 #include "jit/mips32/SharedICHelpers-mips32.h"
18 #ifdef JS_ION_PERF
19 # include "jit/PerfSpewer.h"
20 #endif
21 #include "jit/VMFunctions.h"
22 
23 #include "jit/MacroAssembler-inl.h"
24 
25 using namespace js;
26 using namespace js::jit;
27 
28 static_assert(sizeof(uintptr_t) == sizeof(uint32_t), "Not 64-bit clean.");
29 
30 struct EnterJITRegs
31 {
32     double f30;
33     double f28;
34     double f26;
35     double f24;
36     double f22;
37     double f20;
38 
39     // empty slot for alignment
40     uintptr_t align;
41 
42     // non-volatile registers.
43     uintptr_t ra;
44     uintptr_t s7;
45     uintptr_t s6;
46     uintptr_t s5;
47     uintptr_t s4;
48     uintptr_t s3;
49     uintptr_t s2;
50     uintptr_t s1;
51     uintptr_t s0;
52 };
53 
54 struct EnterJITArgs
55 {
56     // First 4 argumet placeholders
57     void* jitcode; // <- sp points here when function is entered.
58     int maxArgc;
59     Value* maxArgv;
60     InterpreterFrame* fp;
61 
62     // Arguments on stack
63     CalleeToken calleeToken;
64     JSObject* scopeChain;
65     size_t numStackValues;
66     Value* vp;
67 };
68 
69 static void
GenerateReturn(MacroAssembler & masm,int returnCode)70 GenerateReturn(MacroAssembler& masm, int returnCode)
71 {
72     MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
73 
74     // Restore non-volatile registers
75     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s0)), s0);
76     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s1)), s1);
77     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s2)), s2);
78     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s3)), s3);
79     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s4)), s4);
80     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s5)), s5);
81     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s6)), s6);
82     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s7)), s7);
83     masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, ra)), ra);
84 
85     // Restore non-volatile floating point registers
86     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f20)), f20);
87     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f22)), f22);
88     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f24)), f24);
89     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f26)), f26);
90     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f28)), f28);
91     masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f30)), f30);
92 
93     masm.freeStack(sizeof(EnterJITRegs));
94 
95     masm.branch(ra);
96 }
97 
98 static void
GeneratePrologue(MacroAssembler & masm)99 GeneratePrologue(MacroAssembler& masm)
100 {
101     // Save non-volatile registers. These must be saved by the trampoline,
102     // rather than the JIT'd code, because they are scanned by the conservative
103     // scanner.
104     masm.reserveStack(sizeof(EnterJITRegs));
105     masm.storePtr(s0, Address(StackPointer, offsetof(EnterJITRegs, s0)));
106     masm.storePtr(s1, Address(StackPointer, offsetof(EnterJITRegs, s1)));
107     masm.storePtr(s2, Address(StackPointer, offsetof(EnterJITRegs, s2)));
108     masm.storePtr(s3, Address(StackPointer, offsetof(EnterJITRegs, s3)));
109     masm.storePtr(s4, Address(StackPointer, offsetof(EnterJITRegs, s4)));
110     masm.storePtr(s5, Address(StackPointer, offsetof(EnterJITRegs, s5)));
111     masm.storePtr(s6, Address(StackPointer, offsetof(EnterJITRegs, s6)));
112     masm.storePtr(s7, Address(StackPointer, offsetof(EnterJITRegs, s7)));
113     masm.storePtr(ra, Address(StackPointer, offsetof(EnterJITRegs, ra)));
114 
115     masm.as_sd(f20, StackPointer, offsetof(EnterJITRegs, f20));
116     masm.as_sd(f22, StackPointer, offsetof(EnterJITRegs, f22));
117     masm.as_sd(f24, StackPointer, offsetof(EnterJITRegs, f24));
118     masm.as_sd(f26, StackPointer, offsetof(EnterJITRegs, f26));
119     masm.as_sd(f28, StackPointer, offsetof(EnterJITRegs, f28));
120     masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30));
121 }
122 
123 
124 /*
125  * This method generates a trampoline for a c++ function with the following
126  * signature:
127  *   void enter(void* code, int argc, Value* argv, InterpreterFrame* fp,
128  *              CalleeToken calleeToken, JSObject* scopeChain, Value* vp)
129  *   ...using standard EABI calling convention
130  */
131 JitCode*
generateEnterJIT(JSContext * cx,EnterJitType type)132 JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
133 {
134     const Register reg_code = a0;
135     const Register reg_argc = a1;
136     const Register reg_argv = a2;
137     const mozilla::DebugOnly<Register> reg_frame = a3;
138 
139     MOZ_ASSERT(OsrFrameReg == reg_frame);
140 
141     MacroAssembler masm(cx);
142     GeneratePrologue(masm);
143 
144     const Address slotToken(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, calleeToken));
145     const Address slotVp(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, vp));
146 
147     // Save stack pointer into s4
148     masm.movePtr(StackPointer, s4);
149 
150     // Load calleeToken into s2.
151     masm.loadPtr(slotToken, s2);
152 
153     // Save stack pointer as baseline frame.
154     if (type == EnterJitBaseline)
155         masm.movePtr(StackPointer, BaselineFrameReg);
156 
157     // Load the number of actual arguments into s3.
158     masm.loadPtr(slotVp, s3);
159     masm.unboxInt32(Address(s3, 0), s3);
160 
161     /***************************************************************
162     Loop over argv vector, push arguments onto stack in reverse order
163     ***************************************************************/
164 
165     // if we are constructing, that also needs to include newTarget
166     {
167         Label noNewTarget;
168         masm.branchTest32(Assembler::Zero, s2, Imm32(CalleeToken_FunctionConstructing),
169                           &noNewTarget);
170 
171         masm.add32(Imm32(1), reg_argc);
172 
173         masm.bind(&noNewTarget);
174     }
175 
176     masm.as_sll(s0, reg_argc, 3); // s0 = argc * 8
177     masm.addPtr(reg_argv, s0); // s0 = argv + argc * 8
178 
179     // Loop over arguments, copying them from an unknown buffer onto the Ion
180     // stack so they can be accessed from JIT'ed code.
181     Label header, footer;
182     // If there aren't any arguments, don't do anything
183     masm.ma_b(s0, reg_argv, &footer, Assembler::BelowOrEqual, ShortJump);
184     {
185         masm.bind(&header);
186 
187         masm.subPtr(Imm32(2 * sizeof(uintptr_t)), s0);
188         masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
189 
190         ValueOperand value = ValueOperand(s6, s7);
191         masm.loadValue(Address(s0, 0), value);
192         masm.storeValue(value, Address(StackPointer, 0));
193 
194         masm.ma_b(s0, reg_argv, &header, Assembler::Above, ShortJump);
195     }
196     masm.bind(&footer);
197 
198     masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
199     masm.storePtr(s3, Address(StackPointer, sizeof(uintptr_t))); // actual arguments
200     masm.storePtr(s2, Address(StackPointer, 0)); // callee token
201 
202     masm.subPtr(StackPointer, s4);
203     masm.makeFrameDescriptor(s4, JitFrame_Entry);
204     masm.push(s4); // descriptor
205 
206     CodeLabel returnLabel;
207     CodeLabel oomReturnLabel;
208     if (type == EnterJitBaseline) {
209         // Handle OSR.
210         AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
211         regs.take(OsrFrameReg);
212         regs.take(BaselineFrameReg);
213         regs.take(reg_code);
214         regs.take(ReturnReg);
215 
216         const Address slotNumStackValues(BaselineFrameReg, sizeof(EnterJITRegs) +
217                                          offsetof(EnterJITArgs, numStackValues));
218         const Address slotScopeChain(BaselineFrameReg, sizeof(EnterJITRegs) +
219                                      offsetof(EnterJITArgs, scopeChain));
220 
221         Label notOsr;
222         masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
223 
224         Register scratch = regs.takeAny();
225 
226         Register numStackValues = regs.takeAny();
227         masm.load32(slotNumStackValues, numStackValues);
228 
229         // Push return address.
230         masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
231         masm.ma_li(scratch, returnLabel.patchAt());
232         masm.storePtr(scratch, Address(StackPointer, 0));
233 
234         // Push previous frame pointer.
235         masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
236         masm.storePtr(BaselineFrameReg, Address(StackPointer, 0));
237 
238         // Reserve frame.
239         Register framePtr = BaselineFrameReg;
240         masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
241         masm.movePtr(StackPointer, framePtr);
242 
243         // Reserve space for locals and stack values.
244         masm.ma_sll(scratch, numStackValues, Imm32(3));
245         masm.subPtr(scratch, StackPointer);
246 
247         // Enter exit frame.
248         masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch);
249         masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
250 
251         // Push frame descriptor and fake return address.
252         masm.reserveStack(2 * sizeof(uintptr_t));
253         masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor
254         masm.storePtr(zero, Address(StackPointer, 0)); // fake return address
255 
256         // No GC things to mark, push a bare token.
257         masm.enterFakeExitFrame(ExitFrameLayoutBareToken);
258 
259         masm.reserveStack(2 * sizeof(uintptr_t));
260         masm.storePtr(framePtr, Address(StackPointer, sizeof(uintptr_t))); // BaselineFrame
261         masm.storePtr(reg_code, Address(StackPointer, 0)); // jitcode
262 
263         masm.setupUnalignedABICall(scratch);
264         masm.passABIArg(BaselineFrameReg); // BaselineFrame
265         masm.passABIArg(OsrFrameReg); // InterpreterFrame
266         masm.passABIArg(numStackValues);
267         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
268 
269         regs.add(OsrFrameReg);
270         regs.take(JSReturnOperand);
271         Register jitcode = regs.takeAny();
272         masm.loadPtr(Address(StackPointer, 0), jitcode);
273         masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
274         masm.freeStack(2 * sizeof(uintptr_t));
275 
276         Label error;
277         masm.freeStack(ExitFrameLayout::SizeWithFooter());
278         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
279         masm.branchIfFalseBool(ReturnReg, &error);
280 
281         // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
282         // if profiler instrumentation is enabled.
283         {
284             Label skipProfilingInstrumentation;
285             Register realFramePtr = numStackValues;
286             AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
287             masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
288                           &skipProfilingInstrumentation);
289             masm.ma_addu(realFramePtr, framePtr, Imm32(sizeof(void*)));
290             masm.profilerEnterFrame(realFramePtr, scratch);
291             masm.bind(&skipProfilingInstrumentation);
292         }
293 
294         masm.jump(jitcode);
295 
296         // OOM: load error value, discard return address and previous frame
297         // pointer and return.
298         masm.bind(&error);
299         masm.movePtr(framePtr, StackPointer);
300         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
301         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
302         masm.ma_li(scratch, oomReturnLabel.patchAt());
303         masm.jump(scratch);
304 
305         masm.bind(&notOsr);
306         // Load the scope chain in R1.
307         MOZ_ASSERT(R1.scratchReg() != reg_code);
308         masm.loadPtr(slotScopeChain, R1.scratchReg());
309     }
310 
311     // The call will push the return address on the stack, thus we check that
312     // the stack would be aligned once the call is complete.
313     masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
314 
315     // Call the function with pushing return address to stack.
316     masm.callJitNoProfiler(reg_code);
317 
318     if (type == EnterJitBaseline) {
319         // Baseline OSR will return here.
320         masm.bind(returnLabel.target());
321         masm.addCodeLabel(returnLabel);
322         masm.bind(oomReturnLabel.target());
323         masm.addCodeLabel(oomReturnLabel);
324     }
325 
326     // Pop arguments off the stack.
327     // s0 <- 8*argc (size of all arguments we pushed on the stack)
328     masm.pop(s0);
329     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), s0);
330     masm.addPtr(s0, StackPointer);
331 
332     // Store the returned value into the slotVp
333     masm.loadPtr(slotVp, s1);
334     masm.storeValue(JSReturnOperand, Address(s1, 0));
335 
336     // Restore non-volatile registers and return.
337     GenerateReturn(masm, ShortJump);
338 
339     Linker linker(masm);
340     AutoFlushICache afc("GenerateEnterJIT");
341     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
342 
343 #ifdef JS_ION_PERF
344     writePerfSpewerJitCodeProfile(code, "EnterJIT");
345 #endif
346 
347     return code;
348 }
349 
350 JitCode*
generateInvalidator(JSContext * cx)351 JitRuntime::generateInvalidator(JSContext* cx)
352 {
353     MacroAssembler masm(cx);
354 
355     // NOTE: Members ionScript_ and osiPointReturnAddress_ of
356     // InvalidationBailoutStack are already on the stack.
357     static const uint32_t STACK_DATA_SIZE = sizeof(InvalidationBailoutStack) -
358                                             2 * sizeof(uintptr_t);
359 
360     // Stack has to be alligned here. If not, we will have to fix it.
361     masm.checkStackAlignment();
362 
363     // Make room for data on stack.
364     masm.subPtr(Imm32(STACK_DATA_SIZE), StackPointer);
365 
366     // Save general purpose registers
367     for (uint32_t i = 0; i < Registers::Total; i++) {
368         Address address = Address(StackPointer, InvalidationBailoutStack::offsetOfRegs() +
369                                                 i * sizeof(uintptr_t));
370         masm.storePtr(Register::FromCode(i), address);
371     }
372 
373     // Save floating point registers
374     // We can use as_sd because stack is alligned.
375     for (uint32_t i = 0; i < FloatRegisters::TotalDouble; i ++)
376         masm.as_sd(FloatRegister::FromIndex(i, FloatRegister::Double), StackPointer,
377                    InvalidationBailoutStack::offsetOfFpRegs() + i * sizeof(double));
378 
379     // Pass pointer to InvalidationBailoutStack structure.
380     masm.movePtr(StackPointer, a0);
381 
382     // Reserve place for return value and BailoutInfo pointer
383     masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
384     // Pass pointer to return value.
385     masm.ma_addu(a1, StackPointer, Imm32(sizeof(uintptr_t)));
386     // Pass pointer to BailoutInfo
387     masm.movePtr(StackPointer, a2);
388 
389     masm.setupAlignedABICall();
390     masm.passABIArg(a0);
391     masm.passABIArg(a1);
392     masm.passABIArg(a2);
393     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
394 
395     masm.loadPtr(Address(StackPointer, 0), a2);
396     masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), a1);
397     // Remove the return address, the IonScript, the register state
398     // (InvaliationBailoutStack) and the space that was allocated for the
399     // return value.
400     masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
401     // remove the space that this frame was using before the bailout
402     // (computed by InvalidationBailout)
403     masm.addPtr(a1, StackPointer);
404 
405     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
406     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
407     masm.branch(bailoutTail);
408 
409     Linker linker(masm);
410     AutoFlushICache afc("Invalidator");
411     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
412     JitSpew(JitSpew_IonInvalidate, "   invalidation thunk created at %p", (void*) code->raw());
413 
414 #ifdef JS_ION_PERF
415     writePerfSpewerJitCodeProfile(code, "Invalidator");
416 #endif
417 
418     return code;
419 }
420 
421 JitCode*
generateArgumentsRectifier(JSContext * cx,void ** returnAddrOut)422 JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
423 {
424     MacroAssembler masm(cx);
425 
426     // ArgumentsRectifierReg contains the |nargs| pushed onto the current
427     // frame. Including |this|, there are (|nargs| + 1) arguments to copy.
428     MOZ_ASSERT(ArgumentsRectifierReg == s3);
429 
430     Register numActArgsReg = t6;
431     Register calleeTokenReg = t7;
432     Register numArgsReg = t5;
433 
434     // Copy number of actual arguments into numActArgsReg
435     masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfNumActualArgs()),
436                  numActArgsReg);
437 
438     // Load the number of |undefined|s to push into t1.
439     masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfCalleeToken()),
440                  calleeTokenReg);
441     masm.mov(calleeTokenReg, numArgsReg);
442     masm.andPtr(Imm32(CalleeTokenMask), numArgsReg);
443     masm.load16ZeroExtend(Address(numArgsReg, JSFunction::offsetOfNargs()), numArgsReg);
444 
445     masm.as_subu(t1, numArgsReg, s3);
446 
447     // Get the topmost argument.
448     masm.ma_sll(t0, s3, Imm32(3)); // t0 <- nargs * 8
449     masm.as_addu(t2, sp, t0); // t2 <- sp + nargs * 8
450     masm.addPtr(Imm32(sizeof(RectifierFrameLayout)), t2);
451 
452     {
453         Label notConstructing;
454 
455         masm.branchTest32(Assembler::Zero, calleeTokenReg, Imm32(CalleeToken_FunctionConstructing),
456                           &notConstructing);
457 
458         // Add sizeof(Value) to overcome |this|
459         masm.subPtr(Imm32(sizeof(Value)), StackPointer);
460         masm.load32(Address(t2, NUNBOX32_TYPE_OFFSET + sizeof(Value)), t0);
461         masm.store32(t0, Address(StackPointer, NUNBOX32_TYPE_OFFSET));
462         masm.load32(Address(t2, NUNBOX32_PAYLOAD_OFFSET + sizeof(Value)), t0);
463         masm.store32(t0, Address(StackPointer, NUNBOX32_PAYLOAD_OFFSET));
464 
465         // Include the newly pushed newTarget value in the frame size
466         // calculated below.
467         masm.add32(Imm32(1), numArgsReg);
468 
469         masm.bind(&notConstructing);
470     }
471 
472     // Push undefined.
473     masm.moveValue(UndefinedValue(), ValueOperand(t3, t4));
474     {
475         Label undefLoopTop;
476         masm.bind(&undefLoopTop);
477 
478         masm.subPtr(Imm32(sizeof(Value)), StackPointer);
479         masm.storeValue(ValueOperand(t3, t4), Address(StackPointer, 0));
480         masm.sub32(Imm32(1), t1);
481 
482         masm.ma_b(t1, t1, &undefLoopTop, Assembler::NonZero, ShortJump);
483     }
484 
485     // Push arguments, |nargs| + 1 times (to include |this|).
486     {
487         Label copyLoopTop, initialSkip;
488 
489         masm.ma_b(&initialSkip, ShortJump);
490 
491         masm.bind(&copyLoopTop);
492         masm.subPtr(Imm32(sizeof(Value)), t2);
493         masm.sub32(Imm32(1), s3);
494 
495         masm.bind(&initialSkip);
496 
497         MOZ_ASSERT(sizeof(Value) == 2 * sizeof(uint32_t));
498         // Read argument and push to stack.
499         masm.subPtr(Imm32(sizeof(Value)), StackPointer);
500         masm.load32(Address(t2, NUNBOX32_TYPE_OFFSET), t0);
501         masm.store32(t0, Address(StackPointer, NUNBOX32_TYPE_OFFSET));
502         masm.load32(Address(t2, NUNBOX32_PAYLOAD_OFFSET), t0);
503         masm.store32(t0, Address(StackPointer, NUNBOX32_PAYLOAD_OFFSET));
504 
505         masm.ma_b(s3, s3, &copyLoopTop, Assembler::NonZero, ShortJump);
506     }
507 
508     // translate the framesize from values into bytes
509     masm.ma_addu(t0, numArgsReg, Imm32(1));
510     masm.lshiftPtr(Imm32(3), t0);
511 
512     // Construct sizeDescriptor.
513     masm.makeFrameDescriptor(t0, JitFrame_Rectifier);
514 
515     // Construct JitFrameLayout.
516     masm.subPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
517     // Push actual arguments.
518     masm.storePtr(numActArgsReg, Address(StackPointer, 2 * sizeof(uintptr_t)));
519     // Push callee token.
520     masm.storePtr(calleeTokenReg, Address(StackPointer, sizeof(uintptr_t)));
521     // Push frame descriptor.
522     masm.storePtr(t0, Address(StackPointer, 0));
523 
524     // Call the target function.
525     // Note that this code assumes the function is JITted.
526     masm.andPtr(Imm32(CalleeTokenMask), calleeTokenReg);
527     masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
528     masm.loadBaselineOrIonRaw(t1, t1, nullptr);
529     uint32_t returnOffset = masm.callJitNoProfiler(t1);
530 
531     // arg1
532     //  ...
533     // argN
534     // num actual args
535     // callee token
536     // sizeDescriptor     <- sp now
537     // return address
538 
539     // Remove the rectifier frame.
540     // t0 <- descriptor with FrameType.
541     masm.loadPtr(Address(StackPointer, 0), t0);
542     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t0); // t0 <- descriptor.
543 
544     // Discard descriptor, calleeToken and number of actual arguments.
545     masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
546 
547     // arg1
548     //  ...
549     // argN               <- sp now; t0 <- frame descriptor
550     // num actual args
551     // callee token
552     // sizeDescriptor
553     // return address
554 
555     // Discard pushed arguments.
556     masm.addPtr(t0, StackPointer);
557 
558     masm.ret();
559     Linker linker(masm);
560     AutoFlushICache afc("ArgumentsRectifier");
561     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
562 
563     if (returnAddrOut)
564         *returnAddrOut = (void*) (code->raw() + returnOffset);
565 
566 #ifdef JS_ION_PERF
567     writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
568 #endif
569 
570     return code;
571 }
572 
573 // NOTE: Members snapshotOffset_ and padding_ of BailoutStack
574 // are not stored in PushBailoutFrame().
575 static const uint32_t bailoutDataSize = sizeof(BailoutStack) - 2 * sizeof(uintptr_t);
576 static const uint32_t bailoutInfoOutParamSize = 2 * sizeof(uintptr_t);
577 
578 /* There are two different stack layouts when doing bailout. They are
579  * represented via class BailoutStack.
580  *
581  * - First case is when bailout is done trough bailout table. In this case
582  * table offset is stored in $ra (look at JitRuntime::generateBailoutTable())
583  * and thunk code should save it on stack. In this case frameClassId_ cannot
584  * be NO_FRAME_SIZE_CLASS_ID. Members snapshotOffset_ and padding_ are not on
585  * the stack.
586  *
587  * - Other case is when bailout is done via out of line code (lazy bailout).
588  * In this case frame size is stored in $ra (look at
589  * CodeGeneratorMIPS::generateOutOfLineCode()) and thunk code should save it
590  * on stack. Other difference is that members snapshotOffset_ and padding_ are
591  * pushed to the stack by CodeGeneratorMIPS::visitOutOfLineBailout(). Field
592  * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID
593  * (See: JitRuntime::generateBailoutHandler).
594  */
595 static void
PushBailoutFrame(MacroAssembler & masm,uint32_t frameClass,Register spArg)596 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
597 {
598     // Make sure that alignment is proper.
599     masm.checkStackAlignment();
600 
601     // Make room for data.
602     masm.subPtr(Imm32(bailoutDataSize), StackPointer);
603 
604     // Save general purpose registers.
605     for (uint32_t i = 0; i < Registers::Total; i++) {
606         uint32_t off = BailoutStack::offsetOfRegs() + i * sizeof(uintptr_t);
607         masm.storePtr(Register::FromCode(i), Address(StackPointer, off));
608     }
609 
610     // Save floating point registers
611     // We can use as_sd because stack is alligned.
612     for (uint32_t i = 0; i < FloatRegisters::TotalDouble; i++)
613         masm.as_sd(FloatRegister::FromIndex(i, FloatRegister::Double), StackPointer,
614                    BailoutStack::offsetOfFpRegs() + i * sizeof(double));
615 
616     // Store the frameSize_ or tableOffset_ stored in ra
617     // See: JitRuntime::generateBailoutTable()
618     // See: CodeGeneratorMIPS::generateOutOfLineCode()
619     masm.storePtr(ra, Address(StackPointer, BailoutStack::offsetOfFrameSize()));
620 
621     // Put frame class to stack
622     masm.storePtr(ImmWord(frameClass), Address(StackPointer, BailoutStack::offsetOfFrameClass()));
623 
624     // Put pointer to BailoutStack as first argument to the Bailout()
625     masm.movePtr(StackPointer, spArg);
626 }
627 
628 static void
GenerateBailoutThunk(JSContext * cx,MacroAssembler & masm,uint32_t frameClass)629 GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
630 {
631     PushBailoutFrame(masm, frameClass, a0);
632 
633     // Put pointer to BailoutInfo
634     masm.subPtr(Imm32(bailoutInfoOutParamSize), StackPointer);
635     masm.storePtr(ImmPtr(nullptr), Address(StackPointer, 0));
636     masm.movePtr(StackPointer, a1);
637 
638     masm.setupAlignedABICall();
639     masm.passABIArg(a0);
640     masm.passABIArg(a1);
641     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
642 
643     // Get BailoutInfo pointer
644     masm.loadPtr(Address(StackPointer, 0), a2);
645 
646     // Remove both the bailout frame and the topmost Ion frame's stack.
647     if (frameClass == NO_FRAME_SIZE_CLASS_ID) {
648         // Load frameSize from stack
649         masm.loadPtr(Address(StackPointer,
650                              bailoutInfoOutParamSize + BailoutStack::offsetOfFrameSize()), a1);
651 
652         // Remove complete BailoutStack class and data after it
653         masm.addPtr(Imm32(sizeof(BailoutStack) + bailoutInfoOutParamSize), StackPointer);
654         // Remove frame size srom stack
655         masm.addPtr(a1, StackPointer);
656     } else {
657         uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
658         // Remove the data this fuction added and frame size.
659         masm.addPtr(Imm32(bailoutDataSize + bailoutInfoOutParamSize + frameSize), StackPointer);
660     }
661 
662     // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
663     JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
664     masm.branch(bailoutTail);
665 }
666 
667 JitCode*
generateBailoutTable(JSContext * cx,uint32_t frameClass)668 JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
669 {
670     MacroAssembler masm(cx);
671 
672     Label bailout;
673     for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++) {
674         // Calculate offset to the end of table
675         int32_t offset = (BAILOUT_TABLE_SIZE - i) * BAILOUT_TABLE_ENTRY_SIZE;
676 
677         // We use the 'ra' as table offset later in GenerateBailoutThunk
678         masm.as_bal(BOffImm16(offset));
679         masm.nop();
680     }
681     masm.bind(&bailout);
682 
683     GenerateBailoutThunk(cx, masm, frameClass);
684 
685     Linker linker(masm);
686     AutoFlushICache afc("BailoutTable");
687     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
688 
689 #ifdef JS_ION_PERF
690     writePerfSpewerJitCodeProfile(code, "BailoutTable");
691 #endif
692 
693     return code;
694 }
695 
696 JitCode*
generateBailoutHandler(JSContext * cx)697 JitRuntime::generateBailoutHandler(JSContext* cx)
698 {
699     MacroAssembler masm(cx);
700     GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
701 
702     Linker linker(masm);
703     AutoFlushICache afc("BailoutHandler");
704     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
705 
706 #ifdef JS_ION_PERF
707     writePerfSpewerJitCodeProfile(code, "BailoutHandler");
708 #endif
709 
710     return code;
711 }
712 
713 JitCode*
generateVMWrapper(JSContext * cx,const VMFunction & f)714 JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
715 {
716     MOZ_ASSERT(functionWrappers_);
717     MOZ_ASSERT(functionWrappers_->initialized());
718     VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
719     if (p)
720         return p->value();
721 
722     MacroAssembler masm(cx);
723 
724     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
725 
726     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
727                   "Wrapper register set should be a superset of Volatile register set.");
728 
729     // The context is the first argument; a0 is the first argument register.
730     Register cxreg = a0;
731     regs.take(cxreg);
732 
733     // We're aligned to an exit frame, so link it up.
734     masm.enterExitFrame(&f);
735     masm.loadJSContext(cxreg);
736 
737     // Save the base of the argument set stored on the stack.
738     Register argsBase = InvalidReg;
739     if (f.explicitArgs) {
740         argsBase = t1; // Use temporary register.
741         regs.take(argsBase);
742         masm.ma_addu(argsBase, StackPointer, Imm32(ExitFrameLayout::SizeWithFooter()));
743     }
744 
745     masm.alignStackPointer();
746 
747     // Reserve space for the outparameter. Reserve sizeof(Value) for every
748     // case so that stack stays aligned.
749     uint32_t outParamSize = 0;
750     switch (f.outParam) {
751       case Type_Value:
752         outParamSize = sizeof(Value);
753         masm.reserveStack(outParamSize);
754         break;
755 
756       case Type_Handle:
757         {
758             uint32_t pushed = masm.framePushed();
759             masm.PushEmptyRooted(f.outParamRootType);
760             outParamSize = masm.framePushed() - pushed;
761         }
762         break;
763 
764       case Type_Bool:
765       case Type_Int32:
766         MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
767       case Type_Pointer:
768         outParamSize = sizeof(uintptr_t);
769         masm.reserveStack(outParamSize);
770         break;
771 
772       case Type_Double:
773         outParamSize = sizeof(double);
774         masm.reserveStack(outParamSize);
775         break;
776       default:
777         MOZ_ASSERT(f.outParam == Type_Void);
778         break;
779     }
780 
781     uint32_t outParamOffset = 0;
782     if (f.outParam != Type_Void) {
783         // Make sure that stack is double aligned after outParam.
784         MOZ_ASSERT(outParamSize <= sizeof(double));
785         outParamOffset += sizeof(double) - outParamSize;
786     }
787     // Reserve stack for double sized args that are copied to be aligned.
788     outParamOffset += f.doubleByRefArgs() * sizeof(double);
789 
790     Register doubleArgs = t0;
791     masm.reserveStack(outParamOffset);
792     masm.movePtr(StackPointer, doubleArgs);
793 
794     masm.setupAlignedABICall();
795     masm.passABIArg(cxreg);
796 
797     size_t argDisp = 0;
798     size_t doubleArgDisp = 0;
799 
800     // Copy any arguments.
801     for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
802         MoveOperand from;
803         switch (f.argProperties(explicitArg)) {
804           case VMFunction::WordByValue:
805             masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
806             argDisp += sizeof(uint32_t);
807             break;
808           case VMFunction::DoubleByValue:
809             // Values should be passed by reference, not by value, so we
810             // assert that the argument is a double-precision float.
811             MOZ_ASSERT(f.argPassedInFloatReg(explicitArg));
812             masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
813             argDisp += sizeof(double);
814             break;
815           case VMFunction::WordByRef:
816             masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
817                             MoveOp::GENERAL);
818             argDisp += sizeof(uint32_t);
819             break;
820           case VMFunction::DoubleByRef:
821             // Copy double sized argument to aligned place.
822             masm.ma_ld(ScratchDoubleReg, Address(argsBase, argDisp));
823             masm.as_sd(ScratchDoubleReg, doubleArgs, doubleArgDisp);
824             masm.passABIArg(MoveOperand(doubleArgs, doubleArgDisp, MoveOperand::EFFECTIVE_ADDRESS),
825                             MoveOp::GENERAL);
826             doubleArgDisp += sizeof(double);
827             argDisp += sizeof(double);
828             break;
829         }
830     }
831 
832     MOZ_ASSERT_IF(f.outParam != Type_Void,
833                   doubleArgDisp + sizeof(double) == outParamOffset + outParamSize);
834 
835     // Copy the implicit outparam, if any.
836     if (f.outParam != Type_Void) {
837         masm.passABIArg(MoveOperand(doubleArgs, outParamOffset, MoveOperand::EFFECTIVE_ADDRESS),
838                             MoveOp::GENERAL);
839     }
840 
841     masm.callWithABI(f.wrapped);
842 
843     // Test for failure.
844     switch (f.failType()) {
845       case Type_Object:
846         masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel());
847         break;
848       case Type_Bool:
849         // Called functions return bools, which are 0/false and non-zero/true
850         masm.branchIfFalseBool(v0, masm.failureLabel());
851         break;
852       default:
853         MOZ_CRASH("unknown failure kind");
854     }
855 
856     masm.freeStack(outParamOffset);
857 
858     // Load the outparam and free any allocated stack.
859     switch (f.outParam) {
860       case Type_Handle:
861         masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
862         break;
863 
864       case Type_Value:
865         masm.loadValue(Address(StackPointer, 0), JSReturnOperand);
866         masm.freeStack(sizeof(Value));
867         break;
868 
869       case Type_Int32:
870         MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
871       case Type_Pointer:
872         masm.load32(Address(StackPointer, 0), ReturnReg);
873         masm.freeStack(sizeof(uintptr_t));
874         break;
875 
876       case Type_Bool:
877         masm.load8ZeroExtend(Address(StackPointer, 0), ReturnReg);
878         masm.freeStack(sizeof(uintptr_t));
879         break;
880 
881       case Type_Double:
882         if (cx->runtime()->jitSupportsFloatingPoint) {
883             masm.as_ld(ReturnDoubleReg, StackPointer, 0);
884         } else {
885             masm.assumeUnreachable("Unable to load into float reg, with no FP support.");
886         }
887         masm.freeStack(sizeof(double));
888         break;
889 
890       default:
891         MOZ_ASSERT(f.outParam == Type_Void);
892         break;
893     }
894 
895     masm.restoreStackPointer();
896 
897     masm.leaveExitFrame();
898     masm.retn(Imm32(sizeof(ExitFrameLayout) +
899                     f.explicitStackSlots() * sizeof(uintptr_t) +
900                     f.extraValuesToPop * sizeof(Value)));
901 
902     Linker linker(masm);
903     AutoFlushICache afc("VMWrapper");
904     JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
905     if (!wrapper)
906         return nullptr;
907 
908     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have
909     // to use relookupOrAdd instead of add.
910     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
911         return nullptr;
912 
913 #ifdef JS_ION_PERF
914     writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
915 #endif
916 
917     return wrapper;
918 }
919 
920 JitCode*
generatePreBarrier(JSContext * cx,MIRType type)921 JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
922 {
923     MacroAssembler masm(cx);
924 
925     LiveRegisterSet save;
926     if (cx->runtime()->jitSupportsFloatingPoint) {
927         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
928                            FloatRegisterSet(FloatRegisters::VolatileMask));
929     } else {
930         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
931                            FloatRegisterSet());
932     }
933     masm.PushRegsInMask(save);
934 
935     MOZ_ASSERT(PreBarrierReg == a1);
936     masm.movePtr(ImmPtr(cx->runtime()), a0);
937 
938     masm.setupUnalignedABICall(a2);
939     masm.passABIArg(a0);
940     masm.passABIArg(a1);
941     masm.callWithABI(IonMarkFunction(type));
942 
943     masm.PopRegsInMask(save);
944     masm.ret();
945 
946     Linker linker(masm);
947     AutoFlushICache afc("PreBarrier");
948     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
949 
950 #ifdef JS_ION_PERF
951     writePerfSpewerJitCodeProfile(code, "PreBarrier");
952 #endif
953 
954     return code;
955 }
956 
957 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
958 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
959 
960 JitCode*
generateDebugTrapHandler(JSContext * cx)961 JitRuntime::generateDebugTrapHandler(JSContext* cx)
962 {
963     MacroAssembler masm(cx);
964 
965     Register scratch1 = t0;
966     Register scratch2 = t1;
967 
968     // Load BaselineFrame pointer in scratch1.
969     masm.movePtr(s5, scratch1);
970     masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
971 
972     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
973     // the stub frame has a nullptr ICStub pointer, since this pointer is
974     // marked during GC.
975     masm.movePtr(ImmPtr(nullptr), ICStubReg);
976     EmitBaselineEnterStubFrame(masm, scratch2);
977 
978     JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
979     if (!code)
980         return nullptr;
981 
982     masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
983     masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
984     masm.storePtr(scratch1, Address(StackPointer, 0));
985 
986     EmitBaselineCallVM(code, masm);
987 
988     EmitBaselineLeaveStubFrame(masm);
989 
990     // If the stub returns |true|, we have to perform a forced return
991     // (return from the JS frame). If the stub returns |false|, just return
992     // from the trap stub so that execution continues at the current pc.
993     Label forcedReturn;
994     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
995 
996     // ra was restored by EmitLeaveStubFrame
997     masm.branch(ra);
998 
999     masm.bind(&forcedReturn);
1000     masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()),
1001                    JSReturnOperand);
1002     masm.movePtr(s5, StackPointer);
1003     masm.pop(s5);
1004 
1005     // Before returning, if profiling is turned on, make sure that lastProfilingFrame
1006     // is set to the correct caller frame.
1007     {
1008         Label skipProfilingInstrumentation;
1009         AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
1010         masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
1011         masm.profilerExitFrame();
1012         masm.bind(&skipProfilingInstrumentation);
1013     }
1014 
1015     masm.ret();
1016 
1017     Linker linker(masm);
1018     AutoFlushICache afc("DebugTrapHandler");
1019     JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
1020 
1021 #ifdef JS_ION_PERF
1022     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
1023 #endif
1024 
1025     return codeDbg;
1026 }
1027 
1028 
1029 JitCode*
generateExceptionTailStub(JSContext * cx,void * handler)1030 JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
1031 {
1032     MacroAssembler masm;
1033 
1034     masm.handleFailureWithHandlerTail(handler);
1035 
1036     Linker linker(masm);
1037     AutoFlushICache afc("ExceptionTailStub");
1038     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1039 
1040 #ifdef JS_ION_PERF
1041     writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
1042 #endif
1043 
1044     return code;
1045 }
1046 
1047 JitCode*
generateBailoutTailStub(JSContext * cx)1048 JitRuntime::generateBailoutTailStub(JSContext* cx)
1049 {
1050     MacroAssembler masm;
1051 
1052     masm.generateBailoutTail(a1, a2);
1053 
1054     Linker linker(masm);
1055     AutoFlushICache afc("BailoutTailStub");
1056     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1057 
1058 #ifdef JS_ION_PERF
1059     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
1060 #endif
1061 
1062     return code;
1063 }
1064 
1065 JitCode*
generateProfilerExitFrameTailStub(JSContext * cx)1066 JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
1067 {
1068     MacroAssembler masm;
1069 
1070     Register scratch1 = t0;
1071     Register scratch2 = t1;
1072     Register scratch3 = t2;
1073     Register scratch4 = t3;
1074 
1075     //
1076     // The code generated below expects that the current stack pointer points
1077     // to an Ion or Baseline frame, at the state it would be immediately
1078     // before a ret().  Thus, after this stub's business is done, it executes
1079     // a ret() and returns directly to the caller script, on behalf of the
1080     // callee script that jumped to this code.
1081     //
1082     // Thus the expected stack is:
1083     //
1084     //                                   StackPointer ----+
1085     //                                                    v
1086     // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
1087     // MEM-HI                                       MEM-LOW
1088     //
1089     //
1090     // The generated jitcode is responsible for overwriting the
1091     // jitActivation->lastProfilingFrame field with a pointer to the previous
1092     // Ion or Baseline jit-frame that was pushed before this one. It is also
1093     // responsible for overwriting jitActivation->lastProfilingCallSite with
1094     // the return address into that frame.  The frame could either be an
1095     // immediate "caller" frame, or it could be a frame in a previous
1096     // JitActivation (if the current frame was entered from C++, and the C++
1097     // was entered by some caller jit-frame further down the stack).
1098     //
1099     // So this jitcode is responsible for "walking up" the jit stack, finding
1100     // the previous Ion or Baseline JS frame, and storing its address and the
1101     // return address into the appropriate fields on the current jitActivation.
1102     //
1103     // There are a fixed number of different path types that can lead to the
1104     // current frame, which is either a baseline or ion frame:
1105     //
1106     // <Baseline-Or-Ion>
1107     // ^
1108     // |
1109     // ^--- Ion
1110     // |
1111     // ^--- Baseline Stub <---- Baseline
1112     // |
1113     // ^--- Argument Rectifier
1114     // |    ^
1115     // |    |
1116     // |    ^--- Ion
1117     // |    |
1118     // |    ^--- Baseline Stub <---- Baseline
1119     // |
1120     // ^--- Entry Frame (From C++)
1121     //
1122     Register actReg = scratch4;
1123     AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
1124     masm.loadPtr(activationAddr, actReg);
1125 
1126     Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
1127     Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
1128 
1129 #ifdef DEBUG
1130     // Ensure that frame we are exiting is current lastProfilingFrame
1131     {
1132         masm.loadPtr(lastProfilingFrame, scratch1);
1133         Label checkOk;
1134         masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
1135         masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
1136         masm.assumeUnreachable(
1137             "Mismatch between stored lastProfilingFrame and current stack pointer.");
1138         masm.bind(&checkOk);
1139     }
1140 #endif
1141 
1142     // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
1143     masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
1144 
1145     // Going into the conditionals, we will have:
1146     //      FrameDescriptor.size in scratch1
1147     //      FrameDescriptor.type in scratch2
1148     masm.ma_and(scratch2, scratch1, Imm32((1 << FRAMETYPE_BITS) - 1));
1149     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1150 
1151     // Handling of each case is dependent on FrameDescriptor.type
1152     Label handle_IonJS;
1153     Label handle_BaselineStub;
1154     Label handle_Rectifier;
1155     Label handle_IonAccessorIC;
1156     Label handle_Entry;
1157     Label end;
1158 
1159     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
1160     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
1161     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
1162     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
1163     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonAccessorIC), &handle_IonAccessorIC);
1164     masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
1165 
1166     masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
1167 
1168     //
1169     // JitFrame_IonJS
1170     //
1171     // Stack layout:
1172     //                  ...
1173     //                  Ion-Descriptor
1174     //     Prev-FP ---> Ion-ReturnAddr
1175     //                  ... previous frame data ... |- Descriptor.Size
1176     //                  ... arguments ...           |
1177     //                  ActualArgc          |
1178     //                  CalleeToken         |- JitFrameLayout::Size()
1179     //                  Descriptor          |
1180     //        FP -----> ReturnAddr          |
1181     //
1182     masm.bind(&handle_IonJS);
1183     {
1184         // |scratch1| contains Descriptor.size
1185 
1186         // returning directly to an IonJS frame.  Store return addr to frame
1187         // in lastProfilingCallSite.
1188         masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
1189         masm.storePtr(scratch2, lastProfilingCallSite);
1190 
1191         // Store return frame in lastProfilingFrame.
1192         // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1193         masm.as_addu(scratch2, StackPointer, scratch1);
1194         masm.ma_addu(scratch2, scratch2, Imm32(JitFrameLayout::Size()));
1195         masm.storePtr(scratch2, lastProfilingFrame);
1196         masm.ret();
1197     }
1198 
1199     //
1200     // JitFrame_BaselineStub
1201     //
1202     // Look past the stub and store the frame pointer to
1203     // the baselineJS frame prior to it.
1204     //
1205     // Stack layout:
1206     //              ...
1207     //              BL-Descriptor
1208     // Prev-FP ---> BL-ReturnAddr
1209     //      +-----> BL-PrevFramePointer
1210     //      |       ... BL-FrameData ...
1211     //      |       BLStub-Descriptor
1212     //      |       BLStub-ReturnAddr
1213     //      |       BLStub-StubPointer          |
1214     //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
1215     //              ... arguments ...           |
1216     //              ActualArgc          |
1217     //              CalleeToken         |- JitFrameLayout::Size()
1218     //              Descriptor          |
1219     //    FP -----> ReturnAddr          |
1220     //
1221     // We take advantage of the fact that the stub frame saves the frame
1222     // pointer pointing to the baseline frame, so a bunch of calculation can
1223     // be avoided.
1224     //
1225     masm.bind(&handle_BaselineStub);
1226     {
1227         masm.as_addu(scratch3, StackPointer, scratch1);
1228         Address stubFrameReturnAddr(scratch3,
1229                                     JitFrameLayout::Size() +
1230                                     BaselineStubFrameLayout::offsetOfReturnAddress());
1231         masm.loadPtr(stubFrameReturnAddr, scratch2);
1232         masm.storePtr(scratch2, lastProfilingCallSite);
1233 
1234         Address stubFrameSavedFramePtr(scratch3,
1235                                        JitFrameLayout::Size() - (2 * sizeof(void*)));
1236         masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1237         masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr
1238         masm.storePtr(scratch2, lastProfilingFrame);
1239         masm.ret();
1240     }
1241 
1242 
1243     //
1244     // JitFrame_Rectifier
1245     //
1246     // The rectifier frame can be preceded by either an IonJS or a
1247     // BaselineStub frame.
1248     //
1249     // Stack layout if caller of rectifier was Ion:
1250     //
1251     //              Ion-Descriptor
1252     //              Ion-ReturnAddr
1253     //              ... ion frame data ... |- Rect-Descriptor.Size
1254     //              < COMMON LAYOUT >
1255     //
1256     // Stack layout if caller of rectifier was Baseline:
1257     //
1258     //              BL-Descriptor
1259     // Prev-FP ---> BL-ReturnAddr
1260     //      +-----> BL-SavedFramePointer
1261     //      |       ... baseline frame data ...
1262     //      |       BLStub-Descriptor
1263     //      |       BLStub-ReturnAddr
1264     //      |       BLStub-StubPointer          |
1265     //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
1266     //              ... args to rectifier ...   |
1267     //              < COMMON LAYOUT >
1268     //
1269     // Common stack layout:
1270     //
1271     //              ActualArgc          |
1272     //              CalleeToken         |- IonRectitiferFrameLayout::Size()
1273     //              Rect-Descriptor     |
1274     //              Rect-ReturnAddr     |
1275     //              ... rectifier data & args ... |- Descriptor.Size
1276     //              ActualArgc      |
1277     //              CalleeToken     |- JitFrameLayout::Size()
1278     //              Descriptor      |
1279     //    FP -----> ReturnAddr      |
1280     //
1281     masm.bind(&handle_Rectifier);
1282     {
1283         // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1284         masm.as_addu(scratch2, StackPointer, scratch1);
1285         masm.add32(Imm32(JitFrameLayout::Size()), scratch2);
1286         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1287         masm.ma_srl(scratch1, scratch3, Imm32(FRAMESIZE_SHIFT));
1288         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1289 
1290         // Now |scratch1| contains Rect-Descriptor.Size
1291         // and |scratch2| points to Rectifier frame
1292         // and |scratch3| contains Rect-Descriptor.Type
1293 
1294         // Check for either Ion or BaselineStub frame.
1295         Label handle_Rectifier_BaselineStub;
1296         masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1297                       &handle_Rectifier_BaselineStub);
1298 
1299         // Handle Rectifier <- IonJS
1300         // scratch3 := RectFrame[ReturnAddr]
1301         masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1302         masm.storePtr(scratch3, lastProfilingCallSite);
1303 
1304         // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1305         masm.as_addu(scratch3, scratch2, scratch1);
1306         masm.add32(Imm32(RectifierFrameLayout::Size()), scratch3);
1307         masm.storePtr(scratch3, lastProfilingFrame);
1308         masm.ret();
1309 
1310         // Handle Rectifier <- BaselineStub <- BaselineJS
1311         masm.bind(&handle_Rectifier_BaselineStub);
1312 #ifdef DEBUG
1313         {
1314             Label checkOk;
1315             masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1316             masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1317             masm.bind(&checkOk);
1318         }
1319 #endif
1320         masm.as_addu(scratch3, scratch2, scratch1);
1321         Address stubFrameReturnAddr(scratch3, RectifierFrameLayout::Size() +
1322                                               BaselineStubFrameLayout::offsetOfReturnAddress());
1323         masm.loadPtr(stubFrameReturnAddr, scratch2);
1324         masm.storePtr(scratch2, lastProfilingCallSite);
1325 
1326         Address stubFrameSavedFramePtr(scratch3,
1327                                        RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1328         masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1329         masm.addPtr(Imm32(sizeof(void*)), scratch2);
1330         masm.storePtr(scratch2, lastProfilingFrame);
1331         masm.ret();
1332     }
1333 
1334     // JitFrame_IonAccessorIC
1335     //
1336     // The caller is always an IonJS frame.
1337     //
1338     //              Ion-Descriptor
1339     //              Ion-ReturnAddr
1340     //              ... ion frame data ... |- AccFrame-Descriptor.Size
1341     //              StubCode             |
1342     //              AccFrame-Descriptor  |- IonAccessorICFrameLayout::Size()
1343     //              AccFrame-ReturnAddr  |
1344     //              ... accessor frame data & args ... |- Descriptor.Size
1345     //              ActualArgc      |
1346     //              CalleeToken     |- JitFrameLayout::Size()
1347     //              Descriptor      |
1348     //    FP -----> ReturnAddr      |
1349     masm.bind(&handle_IonAccessorIC);
1350     {
1351         // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1352         masm.as_addu(scratch2, StackPointer, scratch1);
1353         masm.addPtr(Imm32(JitFrameLayout::Size()), scratch2);
1354 
1355         // scratch3 := AccFrame-Descriptor.Size
1356         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfDescriptor()), scratch3);
1357 #ifdef DEBUG
1358         // Assert previous frame is an IonJS frame.
1359         masm.movePtr(scratch3, scratch1);
1360         masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1361         {
1362             Label checkOk;
1363             masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1364             masm.assumeUnreachable("IonAccessorIC frame must be preceded by IonJS frame");
1365             masm.bind(&checkOk);
1366         }
1367 #endif
1368         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1369 
1370         // lastProfilingCallSite := AccFrame-ReturnAddr
1371         masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfReturnAddress()), scratch1);
1372         masm.storePtr(scratch1, lastProfilingCallSite);
1373 
1374         // lastProfilingFrame := AccessorFrame + AccFrame-Descriptor.Size +
1375         //                       IonAccessorICFrameLayout::Size()
1376         masm.as_addu(scratch1, scratch2, scratch3);
1377         masm.addPtr(Imm32(IonAccessorICFrameLayout::Size()), scratch1);
1378         masm.storePtr(scratch1, lastProfilingFrame);
1379         masm.ret();
1380     }
1381 
1382     //
1383     // JitFrame_Entry
1384     //
1385     // If at an entry frame, store null into both fields.
1386     //
1387     masm.bind(&handle_Entry);
1388     {
1389         masm.movePtr(ImmPtr(nullptr), scratch1);
1390         masm.storePtr(scratch1, lastProfilingCallSite);
1391         masm.storePtr(scratch1, lastProfilingFrame);
1392         masm.ret();
1393     }
1394 
1395     Linker linker(masm);
1396     AutoFlushICache afc("ProfilerExitFrameTailStub");
1397     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1398 
1399 #ifdef JS_ION_PERF
1400     writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1401 #endif
1402 
1403     return code;
1404 }
1405