1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "jscompartment.h"
8
9 #include "jit/Bailouts.h"
10 #include "jit/BaselineJIT.h"
11 #include "jit/JitCompartment.h"
12 #include "jit/JitFrames.h"
13 #include "jit/JitSpewer.h"
14 #include "jit/Linker.h"
15 #ifdef JS_ION_PERF
16 # include "jit/PerfSpewer.h"
17 #endif
18 #include "jit/VMFunctions.h"
19 #include "jit/x86/SharedICHelpers-x86.h"
20
21 #include "jsscriptinlines.h"
22
23 #include "jit/MacroAssembler-inl.h"
24
25 using namespace js;
26 using namespace js::jit;
27
28 // All registers to save and restore. This includes the stack pointer, since we
29 // use the ability to reference register values on the stack by index.
30 static const LiveRegisterSet AllRegs =
31 LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
32 FloatRegisterSet(FloatRegisters::AllMask));
33
34 enum EnterJitEbpArgumentOffset {
35 ARG_JITCODE = 2 * sizeof(void*),
36 ARG_ARGC = 3 * sizeof(void*),
37 ARG_ARGV = 4 * sizeof(void*),
38 ARG_STACKFRAME = 5 * sizeof(void*),
39 ARG_CALLEETOKEN = 6 * sizeof(void*),
40 ARG_SCOPECHAIN = 7 * sizeof(void*),
41 ARG_STACKVALUES = 8 * sizeof(void*),
42 ARG_RESULT = 9 * sizeof(void*)
43 };
44
45
46 // Generates a trampoline for calling Jit compiled code from a C++ function.
47 // The trampoline use the EnterJitCode signature, with the standard cdecl
48 // calling convention.
49 JitCode*
generateEnterJIT(JSContext * cx,EnterJitType type)50 JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
51 {
52 MacroAssembler masm(cx);
53 masm.assertStackAlignment(ABIStackAlignment, -int32_t(sizeof(uintptr_t)) /* return address */);
54
55 // Save old stack frame pointer, set new stack frame pointer.
56 masm.push(ebp);
57 masm.movl(esp, ebp);
58
59 // Save non-volatile registers. These must be saved by the trampoline,
60 // rather than the JIT'd code, because they are scanned by the conservative
61 // scanner.
62 masm.push(ebx);
63 masm.push(esi);
64 masm.push(edi);
65
66 // Keep track of the stack which has to be unwound after returning from the
67 // compiled function.
68 masm.movl(esp, esi);
69
70 // Load the number of values to be copied (argc) into eax
71 masm.loadPtr(Address(ebp, ARG_ARGC), eax);
72
73 // If we are constructing, that also needs to include newTarget
74 {
75 Label noNewTarget;
76 masm.loadPtr(Address(ebp, ARG_CALLEETOKEN), edx);
77 masm.branchTest32(Assembler::Zero, edx, Imm32(CalleeToken_FunctionConstructing),
78 &noNewTarget);
79
80 masm.addl(Imm32(1), eax);
81
82 masm.bind(&noNewTarget);
83 }
84
85 // eax <- 8*numValues, eax is now the offset betwen argv and the last value.
86 masm.shll(Imm32(3), eax);
87
88 // Guarantee stack alignment of Jit frames.
89 //
90 // This code compensates for the offset created by the copy of the vector of
91 // arguments, such that the jit frame will be aligned once the return
92 // address is pushed on the stack.
93 //
94 // In the computation of the offset, we omit the size of the JitFrameLayout
95 // which is pushed on the stack, as the JitFrameLayout size is a multiple of
96 // the JitStackAlignment.
97 masm.movl(esp, ecx);
98 masm.subl(eax, ecx);
99 static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
100 "No need to consider the JitFrameLayout for aligning the stack");
101
102 // ecx = ecx & 15, holds alignment.
103 masm.andl(Imm32(JitStackAlignment - 1), ecx);
104 masm.subl(ecx, esp);
105
106 /***************************************************************
107 Loop over argv vector, push arguments onto stack in reverse order
108 ***************************************************************/
109
110 // ebx = argv --argv pointer is in ebp + 16
111 masm.loadPtr(Address(ebp, ARG_ARGV), ebx);
112
113 // eax = argv[8(argc)] --eax now points one value past the last argument
114 masm.addl(ebx, eax);
115
116 // while (eax > ebx) --while still looping through arguments
117 {
118 Label header, footer;
119 masm.bind(&header);
120
121 masm.cmp32(eax, ebx);
122 masm.j(Assembler::BelowOrEqual, &footer);
123
124 // eax -= 8 --move to previous argument
125 masm.subl(Imm32(8), eax);
126
127 // Push what eax points to on stack, a Value is 2 words
128 masm.push(Operand(eax, 4));
129 masm.push(Operand(eax, 0));
130
131 masm.jmp(&header);
132 masm.bind(&footer);
133 }
134
135
136 // Push the number of actual arguments. |result| is used to store the
137 // actual number of arguments without adding an extra argument to the enter
138 // JIT.
139 masm.mov(Operand(ebp, ARG_RESULT), eax);
140 masm.unboxInt32(Address(eax, 0x0), eax);
141 masm.push(eax);
142
143 // Push the callee token.
144 masm.push(Operand(ebp, ARG_CALLEETOKEN));
145
146 // Load the InterpreterFrame address into the OsrFrameReg.
147 // This address is also used for setting the constructing bit on all paths.
148 masm.loadPtr(Address(ebp, ARG_STACKFRAME), OsrFrameReg);
149
150 /*****************************************************************
151 Push the number of bytes we've pushed so far on the stack and call
152 *****************************************************************/
153 // Create a frame descriptor.
154 masm.subl(esp, esi);
155 masm.makeFrameDescriptor(esi, JitFrame_Entry);
156 masm.push(esi);
157
158 CodeLabel returnLabel;
159 CodeLabel oomReturnLabel;
160 if (type == EnterJitBaseline) {
161 // Handle OSR.
162 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
163 regs.take(JSReturnOperand);
164 regs.takeUnchecked(OsrFrameReg);
165 regs.take(ebp);
166 regs.take(ReturnReg);
167
168 Register scratch = regs.takeAny();
169
170 Label notOsr;
171 masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, ¬Osr);
172
173 Register numStackValues = regs.takeAny();
174 masm.loadPtr(Address(ebp, ARG_STACKVALUES), numStackValues);
175
176 Register jitcode = regs.takeAny();
177 masm.loadPtr(Address(ebp, ARG_JITCODE), jitcode);
178
179 // Push return address.
180 masm.mov(returnLabel.patchAt(), scratch);
181 masm.push(scratch);
182
183 // Push previous frame pointer.
184 masm.push(ebp);
185
186 // Reserve frame.
187 Register framePtr = ebp;
188 masm.subPtr(Imm32(BaselineFrame::Size()), esp);
189 masm.mov(esp, framePtr);
190
191 #ifdef XP_WIN
192 // Can't push large frames blindly on windows. Touch frame memory incrementally.
193 masm.mov(numStackValues, scratch);
194 masm.shll(Imm32(3), scratch);
195 masm.subPtr(scratch, framePtr);
196 {
197 masm.movePtr(esp, scratch);
198 masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
199
200 Label touchFrameLoop;
201 Label touchFrameLoopEnd;
202 masm.bind(&touchFrameLoop);
203 masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
204 masm.store32(Imm32(0), Address(scratch, 0));
205 masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
206 masm.jump(&touchFrameLoop);
207 masm.bind(&touchFrameLoopEnd);
208 }
209 masm.mov(esp, framePtr);
210 #endif
211
212 // Reserve space for locals and stack values.
213 masm.mov(numStackValues, scratch);
214 masm.shll(Imm32(3), scratch);
215 masm.subPtr(scratch, esp);
216
217 // Enter exit frame.
218 masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch);
219 masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
220 masm.push(scratch); // Fake return address.
221 masm.push(Imm32(0));
222 // No GC things to mark on the stack, push a bare token.
223 masm.enterFakeExitFrame(ExitFrameLayoutBareToken);
224
225 masm.push(framePtr);
226 masm.push(jitcode);
227
228 masm.setupUnalignedABICall(scratch);
229 masm.passABIArg(framePtr); // BaselineFrame
230 masm.passABIArg(OsrFrameReg); // InterpreterFrame
231 masm.passABIArg(numStackValues);
232 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
233
234 masm.pop(jitcode);
235 masm.pop(framePtr);
236
237 MOZ_ASSERT(jitcode != ReturnReg);
238
239 Label error;
240 masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), esp);
241 masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
242 masm.branchIfFalseBool(ReturnReg, &error);
243
244 // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
245 // if profiler instrumentation is enabled.
246 {
247 Label skipProfilingInstrumentation;
248 Register realFramePtr = numStackValues;
249 AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
250 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
251 &skipProfilingInstrumentation);
252 masm.lea(Operand(framePtr, sizeof(void*)), realFramePtr);
253 masm.profilerEnterFrame(realFramePtr, scratch);
254 masm.bind(&skipProfilingInstrumentation);
255 }
256
257 masm.jump(jitcode);
258
259 // OOM: load error value, discard return address and previous frame
260 // pointer and return.
261 masm.bind(&error);
262 masm.mov(framePtr, esp);
263 masm.addPtr(Imm32(2 * sizeof(uintptr_t)), esp);
264 masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
265 masm.mov(oomReturnLabel.patchAt(), scratch);
266 masm.jump(scratch);
267
268 masm.bind(¬Osr);
269 masm.loadPtr(Address(ebp, ARG_SCOPECHAIN), R1.scratchReg());
270 }
271
272 // The call will push the return address on the stack, thus we check that
273 // the stack would be aligned once the call is complete.
274 masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
275
276 /***************************************************************
277 Call passed-in code, get return value and fill in the
278 passed in return value pointer
279 ***************************************************************/
280 masm.call(Address(ebp, ARG_JITCODE));
281
282 if (type == EnterJitBaseline) {
283 // Baseline OSR will return here.
284 masm.use(returnLabel.target());
285 masm.addCodeLabel(returnLabel);
286 masm.use(oomReturnLabel.target());
287 masm.addCodeLabel(oomReturnLabel);
288 }
289
290 // Pop arguments off the stack.
291 // eax <- 8*argc (size of all arguments we pushed on the stack)
292 masm.pop(eax);
293 masm.shrl(Imm32(FRAMESIZE_SHIFT), eax); // Unmark EntryFrame.
294 masm.addl(eax, esp);
295
296 // |ebp| could have been clobbered by the inner function.
297 // Grab the address for the Value result from the argument stack.
298 // +20 ... arguments ...
299 // +16 <return>
300 // +12 ebp <- original %ebp pointing here.
301 // +8 ebx
302 // +4 esi
303 // +0 edi
304 masm.loadPtr(Address(esp, ARG_RESULT + 3 * sizeof(void*)), eax);
305 masm.storeValue(JSReturnOperand, Operand(eax, 0));
306
307 /**************************************************************
308 Return stack and registers to correct state
309 **************************************************************/
310
311 // Restore non-volatile registers
312 masm.pop(edi);
313 masm.pop(esi);
314 masm.pop(ebx);
315
316 // Restore old stack frame pointer
317 masm.pop(ebp);
318 masm.ret();
319
320 Linker linker(masm);
321 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
322
323 #ifdef JS_ION_PERF
324 writePerfSpewerJitCodeProfile(code, "EnterJIT");
325 #endif
326
327 return code;
328 }
329
330 JitCode*
generateInvalidator(JSContext * cx)331 JitRuntime::generateInvalidator(JSContext* cx)
332 {
333 AutoJitContextAlloc ajca(cx);
334 MacroAssembler masm(cx);
335
336 // We do the minimum amount of work in assembly and shunt the rest
337 // off to InvalidationBailout. Assembly does:
338 //
339 // - Pop the return address from the invalidation epilogue call.
340 // - Push the machine state onto the stack.
341 // - Call the InvalidationBailout routine with the stack pointer.
342 // - Now that the frame has been bailed out, convert the invalidated
343 // frame into an exit frame.
344 // - Do the normal check-return-code-and-thunk-to-the-interpreter dance.
345
346 masm.addl(Imm32(sizeof(uintptr_t)), esp);
347
348 // Push registers such that we can access them from [base + code].
349 masm.PushRegsInMask(AllRegs);
350
351 masm.movl(esp, eax); // Argument to jit::InvalidationBailout.
352
353 // Make space for InvalidationBailout's frameSize outparam.
354 masm.reserveStack(sizeof(size_t));
355 masm.movl(esp, ebx);
356
357 // Make space for InvalidationBailout's bailoutInfo outparam.
358 masm.reserveStack(sizeof(void*));
359 masm.movl(esp, ecx);
360
361 masm.setupUnalignedABICall(edx);
362 masm.passABIArg(eax);
363 masm.passABIArg(ebx);
364 masm.passABIArg(ecx);
365 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
366
367 masm.pop(ecx); // Get bailoutInfo outparam.
368 masm.pop(ebx); // Get the frameSize outparam.
369
370 // Pop the machine state and the dead frame.
371 masm.lea(Operand(esp, ebx, TimesOne, sizeof(InvalidationBailoutStack)), esp);
372
373 // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
374 JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
375 masm.jmp(bailoutTail);
376
377 Linker linker(masm);
378 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
379 JitSpew(JitSpew_IonInvalidate, " invalidation thunk created at %p", (void*) code->raw());
380
381 #ifdef JS_ION_PERF
382 writePerfSpewerJitCodeProfile(code, "Invalidator");
383 #endif
384
385 return code;
386 }
387
388 JitCode*
generateArgumentsRectifier(JSContext * cx,void ** returnAddrOut)389 JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
390 {
391 MacroAssembler masm(cx);
392 // Caller:
393 // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- esp
394 // '-- #esi ---'
395
396 // ArgumentsRectifierReg contains the |nargs| pushed onto the current frame.
397 // Including |this|, there are (|nargs| + 1) arguments to copy.
398 MOZ_ASSERT(ArgumentsRectifierReg == esi);
399
400 // Load the number of |undefined|s to push into %ecx.
401 masm.loadPtr(Address(esp, RectifierFrameLayout::offsetOfCalleeToken()), eax);
402 masm.mov(eax, ecx);
403 masm.andl(Imm32(CalleeTokenMask), ecx);
404 masm.movzwl(Operand(ecx, JSFunction::offsetOfNargs()), ecx);
405
406 // The frame pointer and its padding are pushed on the stack.
407 // Including |this|, there are (|nformals| + 1) arguments to push to the
408 // stack. Then we push a JitFrameLayout. We compute the padding expressed
409 // in the number of extra |undefined| values to push on the stack.
410 static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
411 "No need to consider the JitFrameLayout for aligning the stack");
412 static_assert((sizeof(Value) + 2 * sizeof(void*)) % JitStackAlignment == 0,
413 "No need to consider |this| and the frame pointer and its padding for aligning the stack");
414 static_assert(JitStackAlignment % sizeof(Value) == 0,
415 "Ensure that we can pad the stack by pushing extra UndefinedValue");
416
417 MOZ_ASSERT(IsPowerOfTwo(JitStackValueAlignment));
418 masm.addl(Imm32(JitStackValueAlignment - 1 /* for padding */), ecx);
419
420 // Account for newTarget, if necessary.
421 static_assert(CalleeToken_FunctionConstructing == 1,
422 "Ensure that we can use the constructing bit to count an extra push");
423 masm.mov(eax, edx);
424 masm.andl(Imm32(CalleeToken_FunctionConstructing), edx);
425 masm.addl(edx, ecx);
426
427 masm.andl(Imm32(~(JitStackValueAlignment - 1)), ecx);
428 masm.subl(esi, ecx);
429
430 // Copy the number of actual arguments.
431 masm.loadPtr(Address(esp, RectifierFrameLayout::offsetOfNumActualArgs()), edx);
432
433 masm.moveValue(UndefinedValue(), ebx, edi);
434
435 // NOTE: The fact that x86 ArgumentsRectifier saves the FramePointer is relied upon
436 // by the baseline bailout code. If this changes, fix that code! See
437 // BaselineJIT.cpp/BaselineStackBuilder::calculatePrevFramePtr, and
438 // BaselineJIT.cpp/InitFromBailout. Check for the |#if defined(JS_CODEGEN_X86)| portions.
439 masm.push(FramePointer);
440 masm.movl(esp, FramePointer); // Save %esp.
441 masm.push(FramePointer /* padding */);
442
443 // Caller:
444 // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]]
445 // '-- #esi ---'
446 //
447 // Rectifier frame:
448 // [ebp'] <- ebp [padding] <- esp [undef] [undef] [arg2] [arg1] [this]
449 // '--- #ecx ----' '-- #esi ---'
450 //
451 // [[argc] [callee] [descr] [raddr]]
452
453 // Push undefined.
454 {
455 Label undefLoopTop;
456 masm.bind(&undefLoopTop);
457
458 masm.push(ebx); // type(undefined);
459 masm.push(edi); // payload(undefined);
460 masm.subl(Imm32(1), ecx);
461 masm.j(Assembler::NonZero, &undefLoopTop);
462 }
463
464 // Get the topmost argument. We did a push of %ebp earlier, so be sure to
465 // account for this in the offset
466 BaseIndex b = BaseIndex(FramePointer, esi, TimesEight,
467 sizeof(RectifierFrameLayout) + sizeof(void*));
468 masm.lea(Operand(b), ecx);
469
470 // Push arguments, |nargs| + 1 times (to include |this|).
471 masm.addl(Imm32(1), esi);
472 {
473 Label copyLoopTop;
474
475 masm.bind(©LoopTop);
476 masm.push(Operand(ecx, sizeof(Value)/2));
477 masm.push(Operand(ecx, 0x0));
478 masm.subl(Imm32(sizeof(Value)), ecx);
479 masm.subl(Imm32(1), esi);
480 masm.j(Assembler::NonZero, ©LoopTop);
481 }
482
483 {
484 Label notConstructing;
485
486 masm.mov(eax, ebx);
487 masm.branchTest32(Assembler::Zero, ebx, Imm32(CalleeToken_FunctionConstructing),
488 ¬Constructing);
489
490 BaseValueIndex src(FramePointer, edx,
491 sizeof(RectifierFrameLayout) +
492 sizeof(Value) +
493 sizeof(void*));
494
495 masm.andl(Imm32(CalleeTokenMask), ebx);
496 masm.movzwl(Operand(ebx, JSFunction::offsetOfNargs()), ebx);
497
498 BaseValueIndex dst(esp, ebx, sizeof(Value));
499
500 ValueOperand newTarget(ecx, edi);
501
502 masm.loadValue(src, newTarget);
503 masm.storeValue(newTarget, dst);
504
505 masm.bind(¬Constructing);
506 }
507
508 // Construct descriptor, accounting for pushed frame pointer above
509 masm.lea(Operand(FramePointer, sizeof(void*)), ebx);
510 masm.subl(esp, ebx);
511 masm.makeFrameDescriptor(ebx, JitFrame_Rectifier);
512
513 // Construct JitFrameLayout.
514 masm.push(edx); // number of actual arguments
515 masm.push(eax); // callee token
516 masm.push(ebx); // descriptor
517
518 // Call the target function.
519 // Note that this assumes the function is JITted.
520 masm.andl(Imm32(CalleeTokenMask), eax);
521 masm.loadPtr(Address(eax, JSFunction::offsetOfNativeOrScript()), eax);
522 masm.loadBaselineOrIonRaw(eax, eax, nullptr);
523 uint32_t returnOffset = masm.callJitNoProfiler(eax);
524
525 // Remove the rectifier frame.
526 masm.pop(ebx); // ebx <- descriptor with FrameType.
527 masm.shrl(Imm32(FRAMESIZE_SHIFT), ebx); // ebx <- descriptor.
528 masm.pop(edi); // Discard calleeToken.
529 masm.pop(edi); // Discard number of actual arguments.
530
531 // Discard pushed arguments, but not the pushed frame pointer.
532 BaseIndex unwind = BaseIndex(esp, ebx, TimesOne, -int32_t(sizeof(void*)));
533 masm.lea(Operand(unwind), esp);
534
535 masm.pop(FramePointer);
536 masm.ret();
537
538 Linker linker(masm);
539 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
540
541 #ifdef JS_ION_PERF
542 writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
543 #endif
544
545 if (returnAddrOut)
546 *returnAddrOut = (void*) (code->raw() + returnOffset);
547 return code;
548 }
549
550 static void
PushBailoutFrame(MacroAssembler & masm,uint32_t frameClass,Register spArg)551 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
552 {
553 // Push registers such that we can access them from [base + code].
554 if (JitSupportsSimd()) {
555 masm.PushRegsInMask(AllRegs);
556 } else {
557 // When SIMD isn't supported, PushRegsInMask reduces the set of float
558 // registers to be double-sized, while the RegisterDump expects each of
559 // the float registers to have the maximal possible size
560 // (Simd128DataSize). To work around this, we just spill the double
561 // registers by hand here, using the register dump offset directly.
562 for (GeneralRegisterBackwardIterator iter(AllRegs.gprs()); iter.more(); iter++)
563 masm.Push(*iter);
564
565 masm.reserveStack(sizeof(RegisterDump::FPUArray));
566 for (FloatRegisterBackwardIterator iter(AllRegs.fpus()); iter.more(); iter++) {
567 FloatRegister reg = *iter;
568 Address spillAddress(StackPointer, reg.getRegisterDumpOffsetInBytes());
569 masm.storeDouble(reg, spillAddress);
570 }
571 }
572
573 // Push the bailout table number.
574 masm.push(Imm32(frameClass));
575
576 // The current stack pointer is the first argument to jit::Bailout.
577 masm.movl(esp, spArg);
578 }
579
580 static void
GenerateBailoutThunk(JSContext * cx,MacroAssembler & masm,uint32_t frameClass)581 GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
582 {
583 PushBailoutFrame(masm, frameClass, eax);
584
585 // Make space for Bailout's baioutInfo outparam.
586 masm.reserveStack(sizeof(void*));
587 masm.movl(esp, ebx);
588
589 // Call the bailout function. This will correct the size of the bailout.
590 masm.setupUnalignedABICall(ecx);
591 masm.passABIArg(eax);
592 masm.passABIArg(ebx);
593 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
594
595 masm.pop(ecx); // Get bailoutInfo outparam.
596
597 // Common size of stuff we've pushed.
598 static const uint32_t BailoutDataSize = 0
599 + sizeof(void*) // frameClass
600 + sizeof(RegisterDump);
601
602 // Remove both the bailout frame and the topmost Ion frame's stack.
603 if (frameClass == NO_FRAME_SIZE_CLASS_ID) {
604 // We want the frameSize. Stack is:
605 // ... frame ...
606 // snapshotOffset
607 // frameSize
608 // ... bailoutFrame ...
609 masm.addl(Imm32(BailoutDataSize), esp);
610 masm.pop(ebx);
611 masm.addl(Imm32(sizeof(uint32_t)), esp);
612 masm.addl(ebx, esp);
613 } else {
614 // Stack is:
615 // ... frame ...
616 // bailoutId
617 // ... bailoutFrame ...
618 uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
619 masm.addl(Imm32(BailoutDataSize + sizeof(void*) + frameSize), esp);
620 }
621
622 // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
623 JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
624 masm.jmp(bailoutTail);
625 }
626
627 JitCode*
generateBailoutTable(JSContext * cx,uint32_t frameClass)628 JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
629 {
630 MacroAssembler masm;
631
632 Label bailout;
633 for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++)
634 masm.call(&bailout);
635 masm.bind(&bailout);
636
637 GenerateBailoutThunk(cx, masm, frameClass);
638
639 Linker linker(masm);
640 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
641
642 #ifdef JS_ION_PERF
643 writePerfSpewerJitCodeProfile(code, "BailoutHandler");
644 #endif
645
646 return code;
647 }
648
649 JitCode*
generateBailoutHandler(JSContext * cx)650 JitRuntime::generateBailoutHandler(JSContext* cx)
651 {
652 MacroAssembler masm;
653 GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
654
655 Linker linker(masm);
656 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
657
658 #ifdef JS_ION_PERF
659 writePerfSpewerJitCodeProfile(code, "BailoutHandler");
660 #endif
661
662 return code;
663 }
664
665 JitCode*
generateVMWrapper(JSContext * cx,const VMFunction & f)666 JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
667 {
668 MOZ_ASSERT(functionWrappers_);
669 MOZ_ASSERT(functionWrappers_->initialized());
670 VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
671 if (p)
672 return p->value();
673
674 // Generate a separated code for the wrapper.
675 MacroAssembler masm;
676
677 // Avoid conflicts with argument registers while discarding the result after
678 // the function call.
679 AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
680
681 // Wrapper register set is a superset of Volatile register set.
682 JS_STATIC_ASSERT((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0);
683
684 // The context is the first argument.
685 Register cxreg = regs.takeAny();
686
687 // Stack is:
688 // ... frame ...
689 // +8 [args]
690 // +4 descriptor
691 // +0 returnAddress
692 //
693 // We're aligned to an exit frame, so link it up.
694 masm.enterExitFrame(&f);
695 masm.loadJSContext(cxreg);
696
697 // Save the current stack pointer as the base for copying arguments.
698 Register argsBase = InvalidReg;
699 if (f.explicitArgs) {
700 argsBase = regs.takeAny();
701 masm.lea(Operand(esp, ExitFrameLayout::SizeWithFooter()), argsBase);
702 }
703
704 // Reserve space for the outparameter.
705 Register outReg = InvalidReg;
706 switch (f.outParam) {
707 case Type_Value:
708 outReg = regs.takeAny();
709 masm.Push(UndefinedValue());
710 masm.movl(esp, outReg);
711 break;
712
713 case Type_Handle:
714 outReg = regs.takeAny();
715 masm.PushEmptyRooted(f.outParamRootType);
716 masm.movl(esp, outReg);
717 break;
718
719 case Type_Int32:
720 case Type_Pointer:
721 case Type_Bool:
722 outReg = regs.takeAny();
723 masm.reserveStack(sizeof(int32_t));
724 masm.movl(esp, outReg);
725 break;
726
727 case Type_Double:
728 outReg = regs.takeAny();
729 masm.reserveStack(sizeof(double));
730 masm.movl(esp, outReg);
731 break;
732
733 default:
734 MOZ_ASSERT(f.outParam == Type_Void);
735 break;
736 }
737
738 masm.setupUnalignedABICall(regs.getAny());
739 masm.passABIArg(cxreg);
740
741 size_t argDisp = 0;
742
743 // Copy arguments.
744 for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
745 MoveOperand from;
746 switch (f.argProperties(explicitArg)) {
747 case VMFunction::WordByValue:
748 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
749 argDisp += sizeof(void*);
750 break;
751 case VMFunction::DoubleByValue:
752 // We don't pass doubles in float registers on x86, so no need
753 // to check for argPassedInFloatReg.
754 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
755 argDisp += sizeof(void*);
756 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
757 argDisp += sizeof(void*);
758 break;
759 case VMFunction::WordByRef:
760 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
761 MoveOp::GENERAL);
762 argDisp += sizeof(void*);
763 break;
764 case VMFunction::DoubleByRef:
765 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
766 MoveOp::GENERAL);
767 argDisp += 2 * sizeof(void*);
768 break;
769 }
770 }
771
772 // Copy the implicit outparam, if any.
773 if (outReg != InvalidReg)
774 masm.passABIArg(outReg);
775
776 masm.callWithABI(f.wrapped);
777
778 // Test for failure.
779 switch (f.failType()) {
780 case Type_Object:
781 masm.branchTestPtr(Assembler::Zero, eax, eax, masm.failureLabel());
782 break;
783 case Type_Bool:
784 masm.testb(eax, eax);
785 masm.j(Assembler::Zero, masm.failureLabel());
786 break;
787 default:
788 MOZ_CRASH("unknown failure kind");
789 }
790
791 // Load the outparam and free any allocated stack.
792 switch (f.outParam) {
793 case Type_Handle:
794 masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
795 break;
796
797 case Type_Value:
798 masm.Pop(JSReturnOperand);
799 break;
800
801 case Type_Int32:
802 case Type_Pointer:
803 masm.Pop(ReturnReg);
804 break;
805
806 case Type_Bool:
807 masm.Pop(ReturnReg);
808 masm.movzbl(ReturnReg, ReturnReg);
809 break;
810
811 case Type_Double:
812 if (cx->runtime()->jitSupportsFloatingPoint)
813 masm.Pop(ReturnDoubleReg);
814 else
815 masm.assumeUnreachable("Unable to pop to float reg, with no FP support.");
816 break;
817
818 default:
819 MOZ_ASSERT(f.outParam == Type_Void);
820 break;
821 }
822 masm.leaveExitFrame();
823 masm.retn(Imm32(sizeof(ExitFrameLayout) +
824 f.explicitStackSlots() * sizeof(void*) +
825 f.extraValuesToPop * sizeof(Value)));
826
827 Linker linker(masm);
828 JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
829 if (!wrapper)
830 return nullptr;
831
832 #ifdef JS_ION_PERF
833 writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
834 #endif
835
836 // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
837 // use relookupOrAdd instead of add.
838 if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
839 return nullptr;
840
841 return wrapper;
842 }
843
844 JitCode*
generatePreBarrier(JSContext * cx,MIRType type)845 JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
846 {
847 MacroAssembler masm;
848
849 LiveRegisterSet save;
850 if (cx->runtime()->jitSupportsFloatingPoint) {
851 save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
852 FloatRegisterSet(FloatRegisters::VolatileMask));
853 } else {
854 save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
855 FloatRegisterSet());
856 }
857 masm.PushRegsInMask(save);
858
859 MOZ_ASSERT(PreBarrierReg == edx);
860 masm.movl(ImmPtr(cx->runtime()), ecx);
861
862 masm.setupUnalignedABICall(eax);
863 masm.passABIArg(ecx);
864 masm.passABIArg(edx);
865 masm.callWithABI(IonMarkFunction(type));
866
867 masm.PopRegsInMask(save);
868 masm.ret();
869
870 Linker linker(masm);
871 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
872
873 #ifdef JS_ION_PERF
874 writePerfSpewerJitCodeProfile(code, "PreBarrier");
875 #endif
876
877 return code;
878 }
879
880 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
881 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
882
883 JitCode*
generateDebugTrapHandler(JSContext * cx)884 JitRuntime::generateDebugTrapHandler(JSContext* cx)
885 {
886 MacroAssembler masm;
887 #ifndef JS_USE_LINK_REGISTER
888 // The first value contains the return addres,
889 // which we pull into ICTailCallReg for tail calls.
890 masm.setFramePushed(sizeof(intptr_t));
891 #endif
892
893 Register scratch1 = eax;
894 Register scratch2 = ecx;
895 Register scratch3 = edx;
896
897 // Load the return address in scratch1.
898 masm.loadPtr(Address(esp, 0), scratch1);
899
900 // Load BaselineFrame pointer in scratch2.
901 masm.mov(ebp, scratch2);
902 masm.subPtr(Imm32(BaselineFrame::Size()), scratch2);
903
904 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
905 // the stub frame has a nullptr ICStub pointer, since this pointer is
906 // marked during GC.
907 masm.movePtr(ImmPtr(nullptr), ICStubReg);
908 EmitBaselineEnterStubFrame(masm, scratch3);
909
910 JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
911 if (!code)
912 return nullptr;
913
914 masm.push(scratch1);
915 masm.push(scratch2);
916 EmitBaselineCallVM(code, masm);
917
918 EmitBaselineLeaveStubFrame(masm);
919
920 // If the stub returns |true|, we have to perform a forced return
921 // (return from the JS frame). If the stub returns |false|, just return
922 // from the trap stub so that execution continues at the current pc.
923 Label forcedReturn;
924 masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
925 masm.ret();
926
927 masm.bind(&forcedReturn);
928 masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()),
929 JSReturnOperand);
930 masm.mov(ebp, esp);
931 masm.pop(ebp);
932
933 // Before returning, if profiling is turned on, make sure that lastProfilingFrame
934 // is set to the correct caller frame.
935 {
936 Label skipProfilingInstrumentation;
937 AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
938 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
939 masm.profilerExitFrame();
940 masm.bind(&skipProfilingInstrumentation);
941 }
942
943 masm.ret();
944
945 Linker linker(masm);
946 JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
947
948 #ifdef JS_ION_PERF
949 writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
950 #endif
951
952 return codeDbg;
953 }
954
955 JitCode*
generateExceptionTailStub(JSContext * cx,void * handler)956 JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
957 {
958 MacroAssembler masm;
959
960 masm.handleFailureWithHandlerTail(handler);
961
962 Linker linker(masm);
963 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
964
965 #ifdef JS_ION_PERF
966 writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
967 #endif
968
969 return code;
970 }
971
972 JitCode*
generateBailoutTailStub(JSContext * cx)973 JitRuntime::generateBailoutTailStub(JSContext* cx)
974 {
975 MacroAssembler masm;
976
977 masm.generateBailoutTail(edx, ecx);
978
979 Linker linker(masm);
980 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
981
982 #ifdef JS_ION_PERF
983 writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
984 #endif
985
986 return code;
987 }
988
989 JitCode*
generateProfilerExitFrameTailStub(JSContext * cx)990 JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
991 {
992 MacroAssembler masm;
993
994 Register scratch1 = eax;
995 Register scratch2 = ebx;
996 Register scratch3 = esi;
997 Register scratch4 = edi;
998
999 //
1000 // The code generated below expects that the current stack pointer points
1001 // to an Ion or Baseline frame, at the state it would be immediately
1002 // before a ret(). Thus, after this stub's business is done, it executes
1003 // a ret() and returns directly to the caller script, on behalf of the
1004 // callee script that jumped to this code.
1005 //
1006 // Thus the expected stack is:
1007 //
1008 // StackPointer ----+
1009 // v
1010 // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
1011 // MEM-HI MEM-LOW
1012 //
1013 //
1014 // The generated jitcode is responsible for overwriting the
1015 // jitActivation->lastProfilingFrame field with a pointer to the previous
1016 // Ion or Baseline jit-frame that was pushed before this one. It is also
1017 // responsible for overwriting jitActivation->lastProfilingCallSite with
1018 // the return address into that frame. The frame could either be an
1019 // immediate "caller" frame, or it could be a frame in a previous
1020 // JitActivation (if the current frame was entered from C++, and the C++
1021 // was entered by some caller jit-frame further down the stack).
1022 //
1023 // So this jitcode is responsible for "walking up" the jit stack, finding
1024 // the previous Ion or Baseline JS frame, and storing its address and the
1025 // return address into the appropriate fields on the current jitActivation.
1026 //
1027 // There are a fixed number of different path types that can lead to the
1028 // current frame, which is either a baseline or ion frame:
1029 //
1030 // <Baseline-Or-Ion>
1031 // ^
1032 // |
1033 // ^--- Ion
1034 // |
1035 // ^--- Baseline Stub <---- Baseline
1036 // |
1037 // ^--- Argument Rectifier
1038 // | ^
1039 // | |
1040 // | ^--- Ion
1041 // | |
1042 // | ^--- Baseline Stub <---- Baseline
1043 // |
1044 // ^--- Entry Frame (From C++)
1045 //
1046 Register actReg = scratch4;
1047 AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
1048 masm.loadPtr(activationAddr, actReg);
1049
1050 Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
1051 Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
1052
1053 #ifdef DEBUG
1054 // Ensure that frame we are exiting is current lastProfilingFrame
1055 {
1056 masm.loadPtr(lastProfilingFrame, scratch1);
1057 Label checkOk;
1058 masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
1059 masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
1060 masm.assumeUnreachable(
1061 "Mismatch between stored lastProfilingFrame and current stack pointer.");
1062 masm.bind(&checkOk);
1063 }
1064 #endif
1065
1066 // Load the frame descriptor into |scratch1|, figure out what to do
1067 // depending on its type.
1068 masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
1069
1070 // Going into the conditionals, we will have:
1071 // FrameDescriptor.size in scratch1
1072 // FrameDescriptor.type in scratch2
1073 masm.movePtr(scratch1, scratch2);
1074 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1075 masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch2);
1076
1077 // Handling of each case is dependent on FrameDescriptor.type
1078 Label handle_IonJS;
1079 Label handle_BaselineStub;
1080 Label handle_Rectifier;
1081 Label handle_IonAccessorIC;
1082 Label handle_Entry;
1083 Label end;
1084
1085 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
1086 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
1087 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
1088 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
1089 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonAccessorIC), &handle_IonAccessorIC);
1090 masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
1091
1092 masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
1093
1094 //
1095 // JitFrame_IonJS
1096 //
1097 // Stack layout:
1098 // ...
1099 // Ion-Descriptor
1100 // Prev-FP ---> Ion-ReturnAddr
1101 // ... previous frame data ... |- Descriptor.Size
1102 // ... arguments ... |
1103 // ActualArgc |
1104 // CalleeToken |- JitFrameLayout::Size()
1105 // Descriptor |
1106 // FP -----> ReturnAddr |
1107 //
1108 masm.bind(&handle_IonJS);
1109 {
1110 // |scratch1| contains Descriptor.size
1111
1112 // returning directly to an IonJS frame. Store return addr to frame
1113 // in lastProfilingCallSite.
1114 masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
1115 masm.storePtr(scratch2, lastProfilingCallSite);
1116
1117 // Store return frame in lastProfilingFrame.
1118 // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1119 masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1120 masm.storePtr(scratch2, lastProfilingFrame);
1121 masm.ret();
1122 }
1123
1124 //
1125 // JitFrame_BaselineStub
1126 //
1127 // Look past the stub and store the frame pointer to
1128 // the baselineJS frame prior to it.
1129 //
1130 // Stack layout:
1131 // ...
1132 // BL-Descriptor
1133 // Prev-FP ---> BL-ReturnAddr
1134 // +-----> BL-PrevFramePointer
1135 // | ... BL-FrameData ...
1136 // | BLStub-Descriptor
1137 // | BLStub-ReturnAddr
1138 // | BLStub-StubPointer |
1139 // +------ BLStub-SavedFramePointer |- Descriptor.Size
1140 // ... arguments ... |
1141 // ActualArgc |
1142 // CalleeToken |- JitFrameLayout::Size()
1143 // Descriptor |
1144 // FP -----> ReturnAddr |
1145 //
1146 // We take advantage of the fact that the stub frame saves the frame
1147 // pointer pointing to the baseline frame, so a bunch of calculation can
1148 // be avoided.
1149 //
1150 masm.bind(&handle_BaselineStub);
1151 {
1152 BaseIndex stubFrameReturnAddr(StackPointer, scratch1, TimesOne,
1153 JitFrameLayout::Size() +
1154 BaselineStubFrameLayout::offsetOfReturnAddress());
1155 masm.loadPtr(stubFrameReturnAddr, scratch2);
1156 masm.storePtr(scratch2, lastProfilingCallSite);
1157
1158 BaseIndex stubFrameSavedFramePtr(StackPointer, scratch1, TimesOne,
1159 JitFrameLayout::Size() - (2 * sizeof(void*)));
1160 masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1161 masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr
1162 masm.storePtr(scratch2, lastProfilingFrame);
1163 masm.ret();
1164 }
1165
1166
1167 //
1168 // JitFrame_Rectifier
1169 //
1170 // The rectifier frame can be preceded by either an IonJS or a
1171 // BaselineStub frame.
1172 //
1173 // Stack layout if caller of rectifier was Ion:
1174 //
1175 // Ion-Descriptor
1176 // Ion-ReturnAddr
1177 // ... ion frame data ... |- Rect-Descriptor.Size
1178 // < COMMON LAYOUT >
1179 //
1180 // Stack layout if caller of rectifier was Baseline:
1181 //
1182 // BL-Descriptor
1183 // Prev-FP ---> BL-ReturnAddr
1184 // +-----> BL-SavedFramePointer
1185 // | ... baseline frame data ...
1186 // | BLStub-Descriptor
1187 // | BLStub-ReturnAddr
1188 // | BLStub-StubPointer |
1189 // +------ BLStub-SavedFramePointer |- Rect-Descriptor.Size
1190 // ... args to rectifier ... |
1191 // < COMMON LAYOUT >
1192 //
1193 // Common stack layout:
1194 //
1195 // ActualArgc |
1196 // CalleeToken |- IonRectitiferFrameLayout::Size()
1197 // Rect-Descriptor |
1198 // Rect-ReturnAddr |
1199 // ... rectifier data & args ... |- Descriptor.Size
1200 // ActualArgc |
1201 // CalleeToken |- JitFrameLayout::Size()
1202 // Descriptor |
1203 // FP -----> ReturnAddr |
1204 //
1205 masm.bind(&handle_Rectifier);
1206 {
1207 // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1208 masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1209 masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1210 masm.movePtr(scratch3, scratch1);
1211 masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1212 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1213
1214 // Now |scratch1| contains Rect-Descriptor.Size
1215 // and |scratch2| points to Rectifier frame
1216 // and |scratch3| contains Rect-Descriptor.Type
1217
1218 // Check for either Ion or BaselineStub frame.
1219 Label handle_Rectifier_BaselineStub;
1220 masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1221 &handle_Rectifier_BaselineStub);
1222
1223 // Handle Rectifier <- IonJS
1224 // scratch3 := RectFrame[ReturnAddr]
1225 masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1226 masm.storePtr(scratch3, lastProfilingCallSite);
1227
1228 // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1229 masm.lea(Operand(scratch2, scratch1, TimesOne, RectifierFrameLayout::Size()), scratch3);
1230 masm.storePtr(scratch3, lastProfilingFrame);
1231 masm.ret();
1232
1233 // Handle Rectifier <- BaselineStub <- BaselineJS
1234 masm.bind(&handle_Rectifier_BaselineStub);
1235 #ifdef DEBUG
1236 {
1237 Label checkOk;
1238 masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1239 masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1240 masm.bind(&checkOk);
1241 }
1242 #endif
1243 BaseIndex stubFrameReturnAddr(scratch2, scratch1, TimesOne,
1244 RectifierFrameLayout::Size() +
1245 BaselineStubFrameLayout::offsetOfReturnAddress());
1246 masm.loadPtr(stubFrameReturnAddr, scratch3);
1247 masm.storePtr(scratch3, lastProfilingCallSite);
1248
1249 BaseIndex stubFrameSavedFramePtr(scratch2, scratch1, TimesOne,
1250 RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1251 masm.loadPtr(stubFrameSavedFramePtr, scratch3);
1252 masm.addPtr(Imm32(sizeof(void*)), scratch3);
1253 masm.storePtr(scratch3, lastProfilingFrame);
1254 masm.ret();
1255 }
1256
1257 // JitFrame_IonAccessorIC
1258 //
1259 // The caller is always an IonJS frame.
1260 //
1261 // Ion-Descriptor
1262 // Ion-ReturnAddr
1263 // ... ion frame data ... |- AccFrame-Descriptor.Size
1264 // StubCode |
1265 // AccFrame-Descriptor |- IonAccessorICFrameLayout::Size()
1266 // AccFrame-ReturnAddr |
1267 // ... accessor frame data & args ... |- Descriptor.Size
1268 // ActualArgc |
1269 // CalleeToken |- JitFrameLayout::Size()
1270 // Descriptor |
1271 // FP -----> ReturnAddr |
1272 masm.bind(&handle_IonAccessorIC);
1273 {
1274 // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1275 masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1276
1277 // scratch3 := AccFrame-Descriptor.Size
1278 masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfDescriptor()), scratch3);
1279 #ifdef DEBUG
1280 // Assert previous frame is an IonJS frame.
1281 masm.movePtr(scratch3, scratch1);
1282 masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1283 {
1284 Label checkOk;
1285 masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1286 masm.assumeUnreachable("IonAccessorIC frame must be preceded by IonJS frame");
1287 masm.bind(&checkOk);
1288 }
1289 #endif
1290 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1291
1292 // lastProfilingCallSite := AccFrame-ReturnAddr
1293 masm.loadPtr(Address(scratch2, IonAccessorICFrameLayout::offsetOfReturnAddress()), scratch1);
1294 masm.storePtr(scratch1, lastProfilingCallSite);
1295
1296 // lastProfilingFrame := AccessorFrame + AccFrame-Descriptor.Size +
1297 // IonAccessorICFrameLayout::Size()
1298 masm.lea(Operand(scratch2, scratch3, TimesOne, IonAccessorICFrameLayout::Size()), scratch1);
1299 masm.storePtr(scratch1, lastProfilingFrame);
1300 masm.ret();
1301 }
1302
1303 //
1304 // JitFrame_Entry
1305 //
1306 // If at an entry frame, store null into both fields.
1307 //
1308 masm.bind(&handle_Entry);
1309 {
1310 masm.movePtr(ImmPtr(nullptr), scratch1);
1311 masm.storePtr(scratch1, lastProfilingCallSite);
1312 masm.storePtr(scratch1, lastProfilingFrame);
1313 masm.ret();
1314 }
1315
1316 Linker linker(masm);
1317 JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1318
1319 #ifdef JS_ION_PERF
1320 writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1321 #endif
1322
1323 return code;
1324 }
1325