1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "mozilla/DebugOnly.h"
8
9 #include "jscompartment.h"
10
11 #include "jit/Bailouts.h"
12 #include "jit/JitCompartment.h"
13 #include "jit/JitFrames.h"
14 #include "jit/JitSpewer.h"
15 #include "jit/Linker.h"
16 #include "jit/mips64/Bailouts-mips64.h"
17 #include "jit/mips64/SharedICHelpers-mips64.h"
18 #ifdef JS_ION_PERF
19 # include "jit/PerfSpewer.h"
20 #endif
21 #include "jit/VMFunctions.h"
22
23 #include "jit/MacroAssembler-inl.h"
24
25 using namespace js;
26 using namespace js::jit;
27
28 // All registers to save and restore. This includes the stack pointer, since we
29 // use the ability to reference register values on the stack by index.
30 static const LiveRegisterSet AllRegs =
31 LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
32 FloatRegisterSet(FloatRegisters::AllMask));
33
34 static_assert(sizeof(uintptr_t) == sizeof(uint64_t), "Not 32-bit clean.");
35
36 struct EnterJITRegs
37 {
38 double f31;
39 double f30;
40 double f29;
41 double f28;
42 double f27;
43 double f26;
44 double f25;
45 double f24;
46
47 // non-volatile registers.
48 uint64_t ra;
49 uint64_t s7;
50 uint64_t s6;
51 uint64_t s5;
52 uint64_t s4;
53 uint64_t s3;
54 uint64_t s2;
55 uint64_t s1;
56 uint64_t s0;
57 // Save reg_vp(a7) on stack, use it after call jit code.
58 uint64_t a7;
59 };
60
61 static void
GenerateReturn(MacroAssembler & masm,int returnCode)62 GenerateReturn(MacroAssembler& masm, int returnCode)
63 {
64 MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
65
66 // Restore non-volatile registers
67 masm.as_ld(s0, StackPointer, offsetof(EnterJITRegs, s0));
68 masm.as_ld(s1, StackPointer, offsetof(EnterJITRegs, s1));
69 masm.as_ld(s2, StackPointer, offsetof(EnterJITRegs, s2));
70 masm.as_ld(s3, StackPointer, offsetof(EnterJITRegs, s3));
71 masm.as_ld(s4, StackPointer, offsetof(EnterJITRegs, s4));
72 masm.as_ld(s5, StackPointer, offsetof(EnterJITRegs, s5));
73 masm.as_ld(s6, StackPointer, offsetof(EnterJITRegs, s6));
74 masm.as_ld(s7, StackPointer, offsetof(EnterJITRegs, s7));
75 masm.as_ld(ra, StackPointer, offsetof(EnterJITRegs, ra));
76
77 // Restore non-volatile floating point registers
78 masm.as_ld(f24, StackPointer, offsetof(EnterJITRegs, f24));
79 masm.as_ld(f25, StackPointer, offsetof(EnterJITRegs, f25));
80 masm.as_ld(f26, StackPointer, offsetof(EnterJITRegs, f26));
81 masm.as_ld(f27, StackPointer, offsetof(EnterJITRegs, f27));
82 masm.as_ld(f28, StackPointer, offsetof(EnterJITRegs, f28));
83 masm.as_ld(f29, StackPointer, offsetof(EnterJITRegs, f29));
84 masm.as_ld(f30, StackPointer, offsetof(EnterJITRegs, f30));
85 masm.as_ld(f31, StackPointer, offsetof(EnterJITRegs, f31));
86
87 masm.freeStack(sizeof(EnterJITRegs));
88
89 masm.branch(ra);
90 }
91
92 static void
GeneratePrologue(MacroAssembler & masm)93 GeneratePrologue(MacroAssembler& masm)
94 {
95 masm.reserveStack(sizeof(EnterJITRegs));
96
97 masm.as_sd(s0, StackPointer, offsetof(EnterJITRegs, s0));
98 masm.as_sd(s1, StackPointer, offsetof(EnterJITRegs, s1));
99 masm.as_sd(s2, StackPointer, offsetof(EnterJITRegs, s2));
100 masm.as_sd(s3, StackPointer, offsetof(EnterJITRegs, s3));
101 masm.as_sd(s4, StackPointer, offsetof(EnterJITRegs, s4));
102 masm.as_sd(s5, StackPointer, offsetof(EnterJITRegs, s5));
103 masm.as_sd(s6, StackPointer, offsetof(EnterJITRegs, s6));
104 masm.as_sd(s7, StackPointer, offsetof(EnterJITRegs, s7));
105 masm.as_sd(ra, StackPointer, offsetof(EnterJITRegs, ra));
106 masm.as_sd(a7, StackPointer, offsetof(EnterJITRegs, a7));
107
108 masm.as_sd(f24, StackPointer, offsetof(EnterJITRegs, f24));
109 masm.as_sd(f25, StackPointer, offsetof(EnterJITRegs, f25));
110 masm.as_sd(f26, StackPointer, offsetof(EnterJITRegs, f26));
111 masm.as_sd(f27, StackPointer, offsetof(EnterJITRegs, f27));
112 masm.as_sd(f28, StackPointer, offsetof(EnterJITRegs, f28));
113 masm.as_sd(f29, StackPointer, offsetof(EnterJITRegs, f29));
114 masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30));
115 masm.as_sd(f31, StackPointer, offsetof(EnterJITRegs, f31));
116 }
117
118
119 // Generates a trampoline for calling Jit compiled code from a C++ function.
120 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
121 // calling convention.
122 JitCode *
generateEnterJIT(JSContext * cx,EnterJitType type)123 JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
124 {
125 const Register reg_code = IntArgReg0;
126 const Register reg_argc = IntArgReg1;
127 const Register reg_argv = IntArgReg2;
128 const mozilla::DebugOnly<Register> reg_frame = IntArgReg3;
129 const Register reg_token = IntArgReg4;
130 const Register reg_chain = IntArgReg5;
131 const Register reg_values = IntArgReg6;
132 const Register reg_vp = IntArgReg7;
133 MacroAssembler masm(cx);
134
135 MOZ_ASSERT(OsrFrameReg == reg_frame);
136
137 GeneratePrologue(masm);
138
139 // Save stack pointer into s4
140 masm.movePtr(StackPointer, s4);
141
142 // Save stack pointer as baseline frame.
143 if (type == EnterJitBaseline)
144 masm.movePtr(StackPointer, BaselineFrameReg);
145
146 // Load the number of actual arguments into s3.
147 masm.unboxInt32(Address(reg_vp, 0), s3);
148
149 /***************************************************************
150 Loop over argv vector, push arguments onto stack in reverse order
151 ***************************************************************/
152
153 // if we are constructing, that also needs to include newTarget
154 {
155 Label noNewTarget;
156 masm.branchTest32(Assembler::Zero, reg_token, Imm32(CalleeToken_FunctionConstructing),
157 &noNewTarget);
158
159 masm.add32(Imm32(1), reg_argc);
160
161 masm.bind(&noNewTarget);
162 }
163
164 // Make stack algined
165 masm.ma_and(s0, reg_argc, Imm32(1));
166 masm.ma_dsubu(s1, StackPointer, Imm32(sizeof(Value)));
167 masm.as_movn(StackPointer, s1, s0);
168
169 masm.as_dsll(s0, reg_argc, 3); // Value* argv
170 masm.addPtr(reg_argv, s0); // s0 = &argv[argc]
171
172 // Loop over arguments, copying them from an unknown buffer onto the Ion
173 // stack so they can be accessed from JIT'ed code.
174 Label header, footer;
175 // If there aren't any arguments, don't do anything
176 masm.ma_b(s0, reg_argv, &footer, Assembler::BelowOrEqual, ShortJump);
177 {
178 masm.bind(&header);
179
180 masm.subPtr(Imm32(sizeof(Value)), s0);
181 masm.subPtr(Imm32(sizeof(Value)), StackPointer);
182
183 ValueOperand value = ValueOperand(s6);
184 masm.loadValue(Address(s0, 0), value);
185 masm.storeValue(value, Address(StackPointer, 0));
186
187 masm.ma_b(s0, reg_argv, &header, Assembler::Above, ShortJump);
188 }
189 masm.bind(&footer);
190
191 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
192 masm.storePtr(s3, Address(StackPointer, sizeof(uintptr_t))); // actual arguments
193 masm.storePtr(reg_token, Address(StackPointer, 0)); // callee token
194
195 masm.subPtr(StackPointer, s4);
196 masm.makeFrameDescriptor(s4, JitFrame_Entry);
197 masm.push(s4); // descriptor
198
199 CodeLabel returnLabel;
200 CodeLabel oomReturnLabel;
201 if (type == EnterJitBaseline) {
202 // Handle OSR.
203 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
204 regs.take(OsrFrameReg);
205 regs.take(BaselineFrameReg);
206 regs.take(reg_code);
207 regs.take(ReturnReg);
208 regs.take(JSReturnOperand);
209
210 Label notOsr;
211 masm.ma_b(OsrFrameReg, OsrFrameReg, ¬Osr, Assembler::Zero, ShortJump);
212
213 Register numStackValues = reg_values;
214 regs.take(numStackValues);
215 Register scratch = regs.takeAny();
216
217 // Push return address.
218 masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
219 masm.ma_li(scratch, returnLabel.patchAt());
220 masm.storePtr(scratch, Address(StackPointer, 0));
221
222 // Push previous frame pointer.
223 masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
224 masm.storePtr(BaselineFrameReg, Address(StackPointer, 0));
225
226 // Reserve frame.
227 Register framePtr = BaselineFrameReg;
228 masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
229 masm.movePtr(StackPointer, framePtr);
230
231 // Reserve space for locals and stack values.
232 masm.ma_dsll(scratch, numStackValues, Imm32(3));
233 masm.subPtr(scratch, StackPointer);
234
235 // Enter exit frame.
236 masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch);
237 masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
238
239 // Push frame descriptor and fake return address.
240 masm.reserveStack(2 * sizeof(uintptr_t));
241 masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor
242 masm.storePtr(zero, Address(StackPointer, 0)); // fake return address
243
244 // No GC things to mark, push a bare token.
245 masm.enterFakeExitFrame(ExitFrameLayoutBareToken);
246
247 masm.reserveStack(2 * sizeof(uintptr_t));
248 masm.storePtr(framePtr, Address(StackPointer, sizeof(uintptr_t))); // BaselineFrame
249 masm.storePtr(reg_code, Address(StackPointer, 0)); // jitcode
250
251 masm.setupUnalignedABICall(scratch);
252 masm.passABIArg(BaselineFrameReg); // BaselineFrame
253 masm.passABIArg(OsrFrameReg); // InterpreterFrame
254 masm.passABIArg(numStackValues);
255 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
256
257 regs.add(OsrFrameReg);
258 Register jitcode = regs.takeAny();
259 masm.loadPtr(Address(StackPointer, 0), jitcode);
260 masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
261 masm.freeStack(2 * sizeof(uintptr_t));
262
263 Label error;
264 masm.freeStack(ExitFrameLayout::SizeWithFooter());
265 masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
266 masm.branchIfFalseBool(ReturnReg, &error);
267
268 // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
269 // if profiler instrumentation is enabled.
270 {
271 Label skipProfilingInstrumentation;
272 Register realFramePtr = numStackValues;
273 AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
274 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
275 &skipProfilingInstrumentation);
276 masm.ma_daddu(realFramePtr, framePtr, Imm32(sizeof(void*)));
277 masm.profilerEnterFrame(realFramePtr, scratch);
278 masm.bind(&skipProfilingInstrumentation);
279 }
280
281 masm.jump(jitcode);
282
283 // OOM: load error value, discard return address and previous frame
284 // pointer and return.
285 masm.bind(&error);
286 masm.movePtr(framePtr, StackPointer);
287 masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
288 masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
289 masm.ma_li(scratch, oomReturnLabel.patchAt());
290 masm.jump(scratch);
291
292 masm.bind(¬Osr);
293 // Load the scope chain in R1.
294 MOZ_ASSERT(R1.scratchReg() != reg_code);
295 masm.ma_move(R1.scratchReg(), reg_chain);
296 }
297
298 // The call will push the return address on the stack, thus we check that
299 // the stack would be aligned once the call is complete.
300 masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
301
302 // Call the function with pushing return address to stack.
303 masm.callJitNoProfiler(reg_code);
304
305 if (type == EnterJitBaseline) {
306 // Baseline OSR will return here.
307 masm.bind(returnLabel.target());
308 masm.addCodeLabel(returnLabel);
309 masm.bind(oomReturnLabel.target());
310 masm.addCodeLabel(oomReturnLabel);
311 }
312
313 // Pop arguments off the stack.
314 // s0 <- 8*argc (size of all arguments we pushed on the stack)
315 masm.pop(s0);
316 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), s0);
317 masm.addPtr(s0, StackPointer);
318
319 // Store the returned value into the vp
320 masm.as_ld(reg_vp, StackPointer, offsetof(EnterJITRegs, a7));
321 masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
322
323 // Restore non-volatile registers and return.
324 GenerateReturn(masm, ShortJump);
325
326 Linker linker(masm);
327 AutoFlushICache afc("GenerateEnterJIT");
328 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
329
330 #ifdef JS_ION_PERF
331 writePerfSpewerJitCodeProfile(code, "EnterJIT");
332 #endif
333
334 return code;
335 }
336
337 JitCode*
generateInvalidator(JSContext * cx)338 JitRuntime::generateInvalidator(JSContext* cx)
339 {
340 MacroAssembler masm(cx);
341
342 // Stack has to be alligned here. If not, we will have to fix it.
343 masm.checkStackAlignment();
344
345 // Push registers such that we can access them from [base + code].
346 masm.PushRegsInMask(AllRegs);
347
348 // Pass pointer to InvalidationBailoutStack structure.
349 masm.movePtr(StackPointer, a0);
350
351 // Reserve place for return value and BailoutInfo pointer
352 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
353 // Pass pointer to return value.
354 masm.ma_daddu(a1, StackPointer, Imm32(sizeof(uintptr_t)));
355 // Pass pointer to BailoutInfo
356 masm.movePtr(StackPointer, a2);
357
358 masm.setupAlignedABICall();
359 masm.passABIArg(a0);
360 masm.passABIArg(a1);
361 masm.passABIArg(a2);
362 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
363
364 masm.loadPtr(Address(StackPointer, 0), a2);
365 masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), a1);
366 // Remove the return address, the IonScript, the register state
367 // (InvaliationBailoutStack) and the space that was allocated for the
368 // return value.
369 masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
370 // remove the space that this frame was using before the bailout
371 // (computed by InvalidationBailout)
372 masm.addPtr(a1, StackPointer);
373
374 // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
375 JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
376 masm.branch(bailoutTail);
377
378 Linker linker(masm);
379 AutoFlushICache afc("Invalidator");
380 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
381 JitSpew(JitSpew_IonInvalidate, " invalidation thunk created at %p", (void*) code->raw());
382
383 #ifdef JS_ION_PERF
384 writePerfSpewerJitCodeProfile(code, "Invalidator");
385 #endif
386
387 return code;
388 }
389
390 JitCode*
generateArgumentsRectifier(JSContext * cx,void ** returnAddrOut)391 JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
392 {
393 // Do not erase the frame pointer in this function.
394
395 MacroAssembler masm(cx);
396 // Caller:
397 // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- sp
398 // '--- s3 ---'
399
400 // ArgumentsRectifierReg contains the |nargs| pushed onto the current
401 // frame. Including |this|, there are (|nargs| + 1) arguments to copy.
402 MOZ_ASSERT(ArgumentsRectifierReg == s3);
403
404 // Add |this|, in the counter of known arguments.
405 masm.addPtr(Imm32(1), ArgumentsRectifierReg);
406
407 Register numActArgsReg = a6;
408 Register calleeTokenReg = a7;
409 Register numArgsReg = a5;
410
411 // Load |nformals| into numArgsReg.
412 masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfCalleeToken()),
413 calleeTokenReg);
414 masm.mov(calleeTokenReg, numArgsReg);
415 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), numArgsReg);
416 masm.load16ZeroExtend(Address(numArgsReg, JSFunction::offsetOfNargs()), numArgsReg);
417
418 // Stash another copy in t3, since we are going to do destructive operations
419 // on numArgsReg
420 masm.mov(numArgsReg, t3);
421
422 static_assert(CalleeToken_FunctionConstructing == 1,
423 "Ensure that we can use the constructing bit to count the value");
424 masm.mov(calleeTokenReg, t2);
425 masm.ma_and(t2, Imm32(uint32_t(CalleeToken_FunctionConstructing)));
426
427 // Including |this|, and |new.target|, there are (|nformals| + 1 + isConstructing)
428 // arguments to push to the stack. Then we push a JitFrameLayout. We
429 // compute the padding expressed in the number of extra |undefined| values
430 // to push on the stack.
431 static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
432 "No need to consider the JitFrameLayout for aligning the stack");
433 static_assert(JitStackAlignment % sizeof(Value) == 0,
434 "Ensure that we can pad the stack by pushing extra UndefinedValue");
435
436 MOZ_ASSERT(IsPowerOfTwo(JitStackValueAlignment));
437 masm.add32(Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */), numArgsReg);
438 masm.add32(t2, numArgsReg);
439 masm.and32(Imm32(~(JitStackValueAlignment - 1)), numArgsReg);
440
441 // Load the number of |undefined|s to push into t1.
442 masm.as_dsubu(t1, numArgsReg, s3);
443
444 // Caller:
445 // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- sp <- t2
446 // '------ s3 -------'
447 //
448 // Rectifier frame:
449 // [undef] [undef] [undef] [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]]
450 // '-------- t1 ---------' '------- s3 -------'
451
452 // Copy number of actual arguments into numActArgsReg
453 masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfNumActualArgs()),
454 numActArgsReg);
455
456
457 masm.moveValue(UndefinedValue(), ValueOperand(t0));
458
459 masm.movePtr(StackPointer, t2); // Save %sp.
460
461 // Push undefined. (including the padding)
462 {
463 Label undefLoopTop;
464
465 masm.bind(&undefLoopTop);
466 masm.sub32(Imm32(1), t1);
467 masm.subPtr(Imm32(sizeof(Value)), StackPointer);
468 masm.storeValue(ValueOperand(t0), Address(StackPointer, 0));
469
470 masm.ma_b(t1, t1, &undefLoopTop, Assembler::NonZero, ShortJump);
471 }
472
473 // Get the topmost argument.
474 static_assert(sizeof(Value) == 8, "TimesEight is used to skip arguments");
475
476 // | - sizeof(Value)| is used to put rcx such that we can read the last
477 // argument, and not the value which is after.
478 masm.ma_dsll(t0, s3, Imm32(3)); // t0 <- nargs * 8
479 masm.as_daddu(t1, t2, t0); // t1 <- t2(saved sp) + nargs * 8
480 masm.addPtr(Imm32(sizeof(RectifierFrameLayout) - sizeof(Value)), t1);
481
482 // Copy & Push arguments, |nargs| + 1 times (to include |this|).
483 {
484 Label copyLoopTop;
485
486 masm.bind(©LoopTop);
487 masm.sub32(Imm32(1), s3);
488 masm.subPtr(Imm32(sizeof(Value)), StackPointer);
489 masm.loadValue(Address(t1, 0), ValueOperand(t0));
490 masm.storeValue(ValueOperand(t0), Address(StackPointer, 0));
491 masm.subPtr(Imm32(sizeof(Value)), t1);
492
493 masm.ma_b(s3, s3, ©LoopTop, Assembler::NonZero, ShortJump);
494 }
495
496 // if constructing, copy newTarget
497 {
498 Label notConstructing;
499
500 masm.branchTest32(Assembler::Zero, calleeTokenReg, Imm32(CalleeToken_FunctionConstructing),
501 ¬Constructing);
502
503 // thisFrame[numFormals] = prevFrame[argc]
504 ValueOperand newTarget(t0);
505
506 // +1 for |this|. We want vp[argc], so don't subtract 1
507 BaseIndex newTargetSrc(t2, numActArgsReg, TimesEight, sizeof(RectifierFrameLayout) + sizeof(Value));
508 masm.loadValue(newTargetSrc, newTarget);
509
510 // Again, 1 for |this|
511 BaseIndex newTargetDest(StackPointer, t3, TimesEight, sizeof(Value));
512 masm.storeValue(newTarget, newTargetDest);
513
514 masm.bind(¬Constructing);
515 }
516
517 // Caller:
518 // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- t2
519 //
520 //
521 // Rectifier frame:
522 // [undef] [undef] [undef] [arg2] [arg1] [this] <- sp [[argc] [callee] [descr] [raddr]]
523
524 // Construct sizeDescriptor.
525 masm.subPtr(StackPointer, t2);
526 masm.makeFrameDescriptor(t2, JitFrame_Rectifier);
527
528 // Construct JitFrameLayout.
529 masm.subPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
530 // Push actual arguments.
531 masm.storePtr(numActArgsReg, Address(StackPointer, 2 * sizeof(uintptr_t)));
532 // Push callee token.
533 masm.storePtr(calleeTokenReg, Address(StackPointer, sizeof(uintptr_t)));
534 // Push frame descriptor.
535 masm.storePtr(t2, Address(StackPointer, 0));
536
537 // Call the target function.
538 // Note that this code assumes the function is JITted.
539 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), calleeTokenReg);
540 masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
541 masm.loadBaselineOrIonRaw(t1, t1, nullptr);
542 uint32_t returnOffset = masm.callJitNoProfiler(t1);
543
544 // Remove the rectifier frame.
545 // t2 <- descriptor with FrameType.
546 masm.loadPtr(Address(StackPointer, 0), t2);
547 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t2); // t2 <- descriptor.
548
549 // Discard descriptor, calleeToken and number of actual arguments.
550 masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
551
552 // Discard pushed arguments.
553 masm.addPtr(t2, StackPointer);
554
555 masm.ret();
556 Linker linker(masm);
557 AutoFlushICache afc("ArgumentsRectifier");
558 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
559
560 if (returnAddrOut)
561 *returnAddrOut = (void*) (code->raw() + returnOffset);
562
563 #ifdef JS_ION_PERF
564 writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
565 #endif
566
567 return code;
568 }
569
570 /* - When bailout is done via out of line code (lazy bailout).
571 * Frame size is stored in $ra (look at
572 * CodeGeneratorMIPS64::generateOutOfLineCode()) and thunk code should save it
573 * on stack. Other difference is that members snapshotOffset_ and padding_ are
574 * pushed to the stack by CodeGeneratorMIPS64::visitOutOfLineBailout(). Field
575 * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID
576 * (See: JitRuntime::generateBailoutHandler).
577 */
578 static void
PushBailoutFrame(MacroAssembler & masm,Register spArg)579 PushBailoutFrame(MacroAssembler& masm, Register spArg)
580 {
581 // Push the frameSize_ stored in ra
582 // See: CodeGeneratorMIPS64::generateOutOfLineCode()
583 masm.push(ra);
584
585 // Push registers such that we can access them from [base + code].
586 masm.PushRegsInMask(AllRegs);
587
588 // Put pointer to BailoutStack as first argument to the Bailout()
589 masm.movePtr(StackPointer, spArg);
590 }
591
592 static void
GenerateBailoutThunk(JSContext * cx,MacroAssembler & masm,uint32_t frameClass)593 GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
594 {
595 PushBailoutFrame(masm, a0);
596
597 // Put pointer to BailoutInfo
598 static const uint32_t sizeOfBailoutInfo = sizeof(uintptr_t) * 2;
599 masm.subPtr(Imm32(sizeOfBailoutInfo), StackPointer);
600 masm.movePtr(StackPointer, a1);
601
602 masm.setupAlignedABICall();
603 masm.passABIArg(a0);
604 masm.passABIArg(a1);
605 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
606
607 // Get BailoutInfo pointer
608 masm.loadPtr(Address(StackPointer, 0), a2);
609
610 // Stack is:
611 // [frame]
612 // snapshotOffset
613 // frameSize
614 // [bailoutFrame]
615 // [bailoutInfo]
616 //
617 // Remove both the bailout frame and the topmost Ion frame's stack.
618 // Load frameSize from stack
619 masm.loadPtr(Address(StackPointer,
620 sizeOfBailoutInfo + BailoutStack::offsetOfFrameSize()), a1);
621 // Remove complete BailoutStack class and data after it
622 masm.addPtr(Imm32(sizeof(BailoutStack) + sizeOfBailoutInfo), StackPointer);
623 // Remove frame size srom stack
624 masm.addPtr(a1, StackPointer);
625
626 // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
627 JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
628 masm.branch(bailoutTail);
629 }
630
631 JitCode*
generateBailoutTable(JSContext * cx,uint32_t frameClass)632 JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
633 {
634 MOZ_CRASH("MIPS64 does not use bailout tables");
635 }
636
637 JitCode*
generateBailoutHandler(JSContext * cx)638 JitRuntime::generateBailoutHandler(JSContext* cx)
639 {
640 MacroAssembler masm(cx);
641 GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
642
643 Linker linker(masm);
644 AutoFlushICache afc("BailoutHandler");
645 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
646
647 #ifdef JS_ION_PERF
648 writePerfSpewerJitCodeProfile(code, "BailoutHandler");
649 #endif
650
651 return code;
652 }
653
654 JitCode*
generateVMWrapper(JSContext * cx,const VMFunction & f)655 JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
656 {
657 MOZ_ASSERT(functionWrappers_);
658 MOZ_ASSERT(functionWrappers_->initialized());
659 VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
660 if (p)
661 return p->value();
662
663 MacroAssembler masm(cx);
664
665 AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
666
667 static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
668 "Wrapper register set should be a superset of Volatile register set.");
669
670 // The context is the first argument; a0 is the first argument register.
671 Register cxreg = a0;
672 regs.take(cxreg);
673
674 // We're aligned to an exit frame, so link it up.
675 masm.enterExitFrame(&f);
676 masm.loadJSContext(cxreg);
677
678 // Save the base of the argument set stored on the stack.
679 Register argsBase = InvalidReg;
680 if (f.explicitArgs) {
681 argsBase = t1; // Use temporary register.
682 regs.take(argsBase);
683 masm.ma_daddu(argsBase, StackPointer, Imm32(ExitFrameLayout::SizeWithFooter()));
684 }
685
686 // Reserve space for the outparameter.
687 Register outReg = InvalidReg;
688 switch (f.outParam) {
689 case Type_Value:
690 outReg = regs.takeAny();
691 masm.reserveStack(sizeof(Value));
692 masm.movePtr(StackPointer, outReg);
693 break;
694
695 case Type_Handle:
696 outReg = regs.takeAny();
697 masm.PushEmptyRooted(f.outParamRootType);
698 masm.movePtr(StackPointer, outReg);
699 break;
700
701 case Type_Bool:
702 case Type_Int32:
703 outReg = regs.takeAny();
704 // Reserve 4-byte space to make stack aligned to 8-byte.
705 masm.reserveStack(2 * sizeof(int32_t));
706 masm.movePtr(StackPointer, outReg);
707 break;
708
709 case Type_Pointer:
710 outReg = regs.takeAny();
711 masm.reserveStack(sizeof(uintptr_t));
712 masm.movePtr(StackPointer, outReg);
713 break;
714
715 case Type_Double:
716 outReg = regs.takeAny();
717 masm.reserveStack(sizeof(double));
718 masm.movePtr(StackPointer, outReg);
719 break;
720
721 default:
722 MOZ_ASSERT(f.outParam == Type_Void);
723 break;
724 }
725
726 masm.setupUnalignedABICall(regs.getAny());
727 masm.passABIArg(cxreg);
728
729 size_t argDisp = 0;
730
731 // Copy any arguments.
732 for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
733 MoveOperand from;
734 switch (f.argProperties(explicitArg)) {
735 case VMFunction::WordByValue:
736 if (f.argPassedInFloatReg(explicitArg))
737 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
738 else
739 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
740 argDisp += sizeof(void*);
741 break;
742 case VMFunction::WordByRef:
743 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
744 MoveOp::GENERAL);
745 argDisp += sizeof(void*);
746 break;
747 case VMFunction::DoubleByValue:
748 case VMFunction::DoubleByRef:
749 MOZ_CRASH("NYI: MIPS64 callVM should not be used with 128bits values.");
750 break;
751 }
752 }
753
754 // Copy the implicit outparam, if any.
755 if (InvalidReg != outReg)
756 masm.passABIArg(outReg);
757
758 masm.callWithABI(f.wrapped);
759
760 // Test for failure.
761 switch (f.failType()) {
762 case Type_Object:
763 masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel());
764 break;
765 case Type_Bool:
766 // Called functions return bools, which are 0/false and non-zero/true
767 masm.branchIfFalseBool(v0, masm.failureLabel());
768 break;
769 default:
770 MOZ_CRASH("unknown failure kind");
771 }
772
773 // Load the outparam and free any allocated stack.
774 switch (f.outParam) {
775 case Type_Handle:
776 masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
777 break;
778
779 case Type_Value:
780 masm.loadValue(Address(StackPointer, 0), JSReturnOperand);
781 masm.freeStack(sizeof(Value));
782 break;
783
784 case Type_Int32:
785 masm.load32(Address(StackPointer, 0), ReturnReg);
786 masm.freeStack(2 * sizeof(int32_t));
787 break;
788
789 case Type_Pointer:
790 masm.loadPtr(Address(StackPointer, 0), ReturnReg);
791 masm.freeStack(sizeof(uintptr_t));
792 break;
793
794 case Type_Bool:
795 masm.load8ZeroExtend(Address(StackPointer, 0), ReturnReg);
796 masm.freeStack(2 * sizeof(int32_t));
797 break;
798
799 case Type_Double:
800 if (cx->runtime()->jitSupportsFloatingPoint) {
801 masm.as_ld(ReturnDoubleReg, StackPointer, 0);
802 } else {
803 masm.assumeUnreachable("Unable to load into float reg, with no FP support.");
804 }
805 masm.freeStack(sizeof(double));
806 break;
807
808 default:
809 MOZ_ASSERT(f.outParam == Type_Void);
810 break;
811 }
812
813 masm.leaveExitFrame();
814 masm.retn(Imm32(sizeof(ExitFrameLayout) +
815 f.explicitStackSlots() * sizeof(void*) +
816 f.extraValuesToPop * sizeof(Value)));
817
818 Linker linker(masm);
819 AutoFlushICache afc("VMWrapper");
820 JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
821 if (!wrapper)
822 return nullptr;
823
824 // linker.newCode may trigger a GC and sweep functionWrappers_ so we have
825 // to use relookupOrAdd instead of add.
826 if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
827 return nullptr;
828
829 #ifdef JS_ION_PERF
830 writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
831 #endif
832
833 return wrapper;
834 }
835
836 JitCode*
generatePreBarrier(JSContext * cx,MIRType type)837 JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
838 {
839 MacroAssembler masm(cx);
840
841 LiveRegisterSet save;
842 if (cx->runtime()->jitSupportsFloatingPoint) {
843 save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
844 FloatRegisterSet(FloatRegisters::VolatileMask));
845 } else {
846 save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
847 FloatRegisterSet());
848 }
849 masm.PushRegsInMask(save);
850
851 MOZ_ASSERT(PreBarrierReg == a1);
852 masm.movePtr(ImmPtr(cx->runtime()), a0);
853
854 masm.setupUnalignedABICall(a2);
855 masm.passABIArg(a0);
856 masm.passABIArg(a1);
857 masm.callWithABI(IonMarkFunction(type));
858
859 masm.PopRegsInMask(save);
860 masm.ret();
861
862 Linker linker(masm);
863 AutoFlushICache afc("PreBarrier");
864 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
865
866 #ifdef JS_ION_PERF
867 writePerfSpewerJitCodeProfile(code, "PreBarrier");
868 #endif
869
870 return code;
871 }
872
873 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
874 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
875
876 JitCode*
generateDebugTrapHandler(JSContext * cx)877 JitRuntime::generateDebugTrapHandler(JSContext* cx)
878 {
879 MacroAssembler masm(cx);
880
881 Register scratch1 = t0;
882 Register scratch2 = t1;
883
884 // Load BaselineFrame pointer in scratch1.
885 masm.movePtr(s5, scratch1);
886 masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
887
888 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
889 // the stub frame has a nullptr ICStub pointer, since this pointer is
890 // marked during GC.
891 masm.movePtr(ImmPtr(nullptr), ICStubReg);
892 EmitBaselineEnterStubFrame(masm, scratch2);
893
894 JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
895 if (!code)
896 return nullptr;
897
898 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
899 masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
900 masm.storePtr(scratch1, Address(StackPointer, 0));
901
902 EmitBaselineCallVM(code, masm);
903
904 EmitBaselineLeaveStubFrame(masm);
905
906 // If the stub returns |true|, we have to perform a forced return
907 // (return from the JS frame). If the stub returns |false|, just return
908 // from the trap stub so that execution continues at the current pc.
909 Label forcedReturn;
910 masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
911
912 // ra was restored by EmitLeaveStubFrame
913 masm.branch(ra);
914
915 masm.bind(&forcedReturn);
916 masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()),
917 JSReturnOperand);
918 masm.movePtr(s5, StackPointer);
919 masm.pop(s5);
920
921 // Before returning, if profiling is turned on, make sure that lastProfilingFrame
922 // is set to the correct caller frame.
923 {
924 Label skipProfilingInstrumentation;
925 AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
926 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
927 masm.profilerExitFrame();
928 masm.bind(&skipProfilingInstrumentation);
929 }
930
931 masm.ret();
932
933 Linker linker(masm);
934 AutoFlushICache afc("DebugTrapHandler");
935 JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
936
937 #ifdef JS_ION_PERF
938 writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
939 #endif
940
941 return codeDbg;
942 }
943
944
945 JitCode*
generateExceptionTailStub(JSContext * cx,void * handler)946 JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
947 {
948 MacroAssembler masm;
949
950 masm.handleFailureWithHandlerTail(handler);
951
952 Linker linker(masm);
953 AutoFlushICache afc("ExceptionTailStub");
954 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
955
956 #ifdef JS_ION_PERF
957 writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
958 #endif
959
960 return code;
961 }
962
963 JitCode*
generateBailoutTailStub(JSContext * cx)964 JitRuntime::generateBailoutTailStub(JSContext* cx)
965 {
966 MacroAssembler masm;
967
968 masm.generateBailoutTail(a1, a2);
969
970 Linker linker(masm);
971 AutoFlushICache afc("BailoutTailStub");
972 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
973
974 #ifdef JS_ION_PERF
975 writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
976 #endif
977
978 return code;
979 }
980
981 JitCode*
generateProfilerExitFrameTailStub(JSContext * cx)982 JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
983 {
984 MacroAssembler masm;
985
986 Register scratch1 = t0;
987 Register scratch2 = t1;
988 Register scratch3 = t2;
989 Register scratch4 = t3;
990
991 //
992 // The code generated below expects that the current stack pointer points
993 // to an Ion or Baseline frame, at the state it would be immediately
994 // before a ret(). Thus, after this stub's business is done, it executes
995 // a ret() and returns directly to the caller script, on behalf of the
996 // callee script that jumped to this code.
997 //
998 // Thus the expected stack is:
999 //
1000 // StackPointer ----+
1001 // v
1002 // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
1003 // MEM-HI MEM-LOW
1004 //
1005 //
1006 // The generated jitcode is responsible for overwriting the
1007 // jitActivation->lastProfilingFrame field with a pointer to the previous
1008 // Ion or Baseline jit-frame that was pushed before this one. It is also
1009 // responsible for overwriting jitActivation->lastProfilingCallSite with
1010 // the return address into that frame. The frame could either be an
1011 // immediate "caller" frame, or it could be a frame in a previous
1012 // JitActivation (if the current frame was entered from C++, and the C++
1013 // was entered by some caller jit-frame further down the stack).
1014 //
1015 // So this jitcode is responsible for "walking up" the jit stack, finding
1016 // the previous Ion or Baseline JS frame, and storing its address and the
1017 // return address into the appropriate fields on the current jitActivation.
1018 //
1019 // There are a fixed number of different path types that can lead to the
1020 // current frame, which is either a baseline or ion frame:
1021 //
1022 // <Baseline-Or-Ion>
1023 // ^
1024 // |
1025 // ^--- Ion
1026 // |
1027 // ^--- Baseline Stub <---- Baseline
1028 // |
1029 // ^--- Argument Rectifier
1030 // | ^
1031 // | |
1032 // | ^--- Ion
1033 // | |
1034 // | ^--- Baseline Stub <---- Baseline
1035 // |
1036 // ^--- Entry Frame (From C++)
1037 //
1038 Register actReg = scratch4;
1039 AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
1040 masm.loadPtr(activationAddr, actReg);
1041
1042 Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
1043 Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
1044
1045 #ifdef DEBUG
1046 // Ensure that frame we are exiting is current lastProfilingFrame
1047 {
1048 masm.loadPtr(lastProfilingFrame, scratch1);
1049 Label checkOk;
1050 masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
1051 masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
1052 masm.assumeUnreachable(
1053 "Mismatch between stored lastProfilingFrame and current stack pointer.");
1054 masm.bind(&checkOk);
1055 }
1056 #endif
1057
1058 // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
1059 masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
1060
1061 // Going into the conditionals, we will have:
1062 // FrameDescriptor.size in scratch1
1063 // FrameDescriptor.type in scratch2
1064 masm.ma_and(scratch2, scratch1, Imm32((1 << FRAMETYPE_BITS) - 1));
1065 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1066
1067 // Handling of each case is dependent on FrameDescriptor.type
1068 Label handle_IonJS;
1069 Label handle_BaselineStub;
1070 Label handle_Rectifier;
1071 Label handle_IonAccessorIC;
1072 Label handle_Entry;
1073 Label end;
1074
1075 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
1076 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
1077 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
1078 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
1079 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonAccessorIC), &handle_IonAccessorIC);
1080 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
1081
1082 masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
1083
1084 //
1085 // JitFrame_IonJS
1086 //
1087 // Stack layout:
1088 // ...
1089 // Ion-Descriptor
1090 // Prev-FP ---> Ion-ReturnAddr
1091 // ... previous frame data ... |- Descriptor.Size
1092 // ... arguments ... |
1093 // ActualArgc |
1094 // CalleeToken |- JitFrameLayout::Size()
1095 // Descriptor |
1096 // FP -----> ReturnAddr |
1097 //
1098 masm.bind(&handle_IonJS);
1099 {
1100 // |scratch1| contains Descriptor.size
1101
1102 // returning directly to an IonJS frame. Store return addr to frame
1103 // in lastProfilingCallSite.
1104 masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
1105 masm.storePtr(scratch2, lastProfilingCallSite);
1106
1107 // Store return frame in lastProfilingFrame.
1108 // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1109 masm.as_daddu(scratch2, StackPointer, scratch1);
1110 masm.ma_daddu(scratch2, scratch2, Imm32(JitFrameLayout::Size()));
1111 masm.storePtr(scratch2, lastProfilingFrame);
1112 masm.ret();
1113 }
1114
1115 //
1116 // JitFrame_BaselineStub
1117 //
1118 // Look past the stub and store the frame pointer to
1119 // the baselineJS frame prior to it.
1120 //
1121 // Stack layout:
1122 // ...
1123 // BL-Descriptor
1124 // Prev-FP ---> BL-ReturnAddr
1125 // +-----> BL-PrevFramePointer
1126 // | ... BL-FrameData ...
1127 // | BLStub-Descriptor
1128 // | BLStub-ReturnAddr
1129 // | BLStub-StubPointer |
1130 // +------ BLStub-SavedFramePointer |- Descriptor.Size
1131 // ... arguments ... |
1132 // ActualArgc |
1133 // CalleeToken |- JitFrameLayout::Size()
1134 // Descriptor |
1135 // FP -----> ReturnAddr |
1136 //
1137 // We take advantage of the fact that the stub frame saves the frame
1138 // pointer pointing to the baseline frame, so a bunch of calculation can
1139 // be avoided.
1140 //
1141 masm.bind(&handle_BaselineStub);
1142 {
1143 masm.as_daddu(scratch3, StackPointer, scratch1);
1144 Address stubFrameReturnAddr(scratch3,
1145 JitFrameLayout::Size() +
1146 BaselineStubFrameLayout::offsetOfReturnAddress());
1147 masm.loadPtr(stubFrameReturnAddr, scratch2);
1148 masm.storePtr(scratch2, lastProfilingCallSite);
1149
1150 Address stubFrameSavedFramePtr(scratch3,
1151 JitFrameLayout::Size() - (2 * sizeof(void*)));
1152 masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1153 masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr
1154 masm.storePtr(scratch2, lastProfilingFrame);
1155 masm.ret();
1156 }
1157
1158
1159 //
1160 // JitFrame_Rectifier
1161 //
1162 // The rectifier frame can be preceded by either an IonJS or a
1163 // BaselineStub frame.
1164 //
1165 // Stack layout if caller of rectifier was Ion:
1166 //
1167 // Ion-Descriptor
1168 // Ion-ReturnAddr
1169 // ... ion frame data ... |- Rect-Descriptor.Size
1170 // < COMMON LAYOUT >
1171 //
1172 // Stack layout if caller of rectifier was Baseline:
1173 //
1174 // BL-Descriptor
1175 // Prev-FP ---> BL-ReturnAddr
1176 // +-----> BL-SavedFramePointer
1177 // | ... baseline frame data ...
1178 // | BLStub-Descriptor
1179 // | BLStub-ReturnAddr
1180 // | BLStub-StubPointer |
1181 // +------ BLStub-SavedFramePointer |- Rect-Descriptor.Size
1182 // ... args to rectifier ... |
1183 // < COMMON LAYOUT >
1184 //
1185 // Common stack layout:
1186 //
1187 // ActualArgc |
1188 // CalleeToken |- IonRectitiferFrameLayout::Size()
1189 // Rect-Descriptor |
1190 // Rect-ReturnAddr |
1191 // ... rectifier data & args ... |- Descriptor.Size
1192 // ActualArgc |
1193 // CalleeToken |- JitFrameLayout::Size()
1194 // Descriptor |
1195 // FP -----> ReturnAddr |
1196 //
1197 masm.bind(&handle_Rectifier);
1198 {
1199 // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1200 masm.as_daddu(scratch2, StackPointer, scratch1);
1201 masm.addPtr(Imm32(JitFrameLayout::Size()), scratch2);
1202 masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1203 masm.ma_dsrl(scratch1, scratch3, Imm32(FRAMESIZE_SHIFT));
1204 masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1205
1206 // Now |scratch1| contains Rect-Descriptor.Size
1207 // and |scratch2| points to Rectifier frame
1208 // and |scratch3| contains Rect-Descriptor.Type
1209
1210 // Check for either Ion or BaselineStub frame.
1211 Label handle_Rectifier_BaselineStub;
1212 masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1213 &handle_Rectifier_BaselineStub);
1214
1215 // Handle Rectifier <- IonJS
1216 // scratch3 := RectFrame[ReturnAddr]
1217 masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1218 masm.storePtr(scratch3, lastProfilingCallSite);
1219
1220 // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1221 masm.as_daddu(scratch3, scratch2, scratch1);
1222 masm.addPtr(Imm32(RectifierFrameLayout::Size()), scratch3);
1223 masm.storePtr(scratch3, lastProfilingFrame);
1224 masm.ret();
1225
1226 // Handle Rectifier <- BaselineStub <- BaselineJS
1227 masm.bind(&handle_Rectifier_BaselineStub);
1228 #ifdef DEBUG
1229 {
1230 Label checkOk;
1231 masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1232 masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1233 masm.bind(&checkOk);
1234 }
1235 #endif
1236 masm.as_daddu(scratch3, scratch2, scratch1);
1237 Address stubFrameReturnAddr(scratch3, RectifierFrameLayout::Size() +
1238 BaselineStubFrameLayout::offsetOfReturnAddress());
1239 masm.loadPtr(stubFrameReturnAddr, scratch2);
1240 masm.storePtr(scratch2, lastProfilingCallSite);
1241
1242 Address stubFrameSavedFramePtr(scratch3,
1243 RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1244 masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1245 masm.addPtr(Imm32(sizeof(void*)), scratch2);
1246 masm.storePtr(scratch2, lastProfilingFrame);
1247 masm.ret();
1248 }
1249
1250 // JitFrame_IonAccessorIC
1251 //
1252 // The caller is always an IonJS frame.
1253 //
1254 // Ion-Descriptor
1255 // Ion-ReturnAddr
1256 // ... ion frame data ... |- AccFrame-Descriptor.Size
1257 // StubCode |
1258 // AccFrame-Descriptor |- IonAccessorICFrameLayout::Size()
1259 // AccFrame-ReturnAddr |
1260 // ... accessor frame data & args ... |- Descriptor.Size
1261 // ActualArgc |
1262 // CalleeToken |- JitFrameLayout::Size()
1263 // Descriptor |
1264 // FP -----> ReturnAddr |
1265 masm.bind(&handle_IonAccessorIC);
1266 {
1267 // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1268 masm.as_daddu(scratch2, StackPointer, scratch1);
1269 masm.addPtr(Imm32(JitFrameLayout::Size()), scratch2);
1270
1271 // scratch3 := AccFrame-Descriptor.Size
1272 masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfDescriptor()), scratch3);
1273 #ifdef DEBUG
1274 // Assert previous frame is an IonJS frame.
1275 masm.movePtr(scratch3, scratch1);
1276 masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1277 {
1278 Label checkOk;
1279 masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1280 masm.assumeUnreachable("IonAccessorIC frame must be preceded by IonJS frame");
1281 masm.bind(&checkOk);
1282 }
1283 #endif
1284 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1285
1286 // lastProfilingCallSite := AccFrame-ReturnAddr
1287 masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfReturnAddress()), scratch1);
1288 masm.storePtr(scratch1, lastProfilingCallSite);
1289
1290 // lastProfilingFrame := AccessorFrame + AccFrame-Descriptor.Size +
1291 // IonAccessorICFrameLayout::Size()
1292 masm.as_daddu(scratch1, scratch2, scratch3);
1293 masm.addPtr(Imm32(IonAccessorICFrameLayout::Size()), scratch1);
1294 masm.storePtr(scratch1, lastProfilingFrame);
1295 masm.ret();
1296 }
1297
1298 //
1299 // JitFrame_Entry
1300 //
1301 // If at an entry frame, store null into both fields.
1302 //
1303 masm.bind(&handle_Entry);
1304 {
1305 masm.movePtr(ImmPtr(nullptr), scratch1);
1306 masm.storePtr(scratch1, lastProfilingCallSite);
1307 masm.storePtr(scratch1, lastProfilingFrame);
1308 masm.ret();
1309 }
1310
1311 Linker linker(masm);
1312 AutoFlushICache afc("ProfilerExitFrameTailStub");
1313 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1314
1315 #ifdef JS_ION_PERF
1316 writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1317 #endif
1318
1319 return code;
1320 }
1321