1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef jit_arm_SharedICHelpers_arm_h
8 #define jit_arm_SharedICHelpers_arm_h
9 
10 #include "jit/BaselineFrame.h"
11 #include "jit/BaselineIC.h"
12 #include "jit/MacroAssembler.h"
13 #include "jit/SharedICRegisters.h"
14 
15 namespace js {
16 namespace jit {
17 
18 // Distance from sp to the top Value inside an IC stub (no return address on the stack on ARM).
19 static const size_t ICStackValueOffset = 0;
20 
21 inline void
EmitRestoreTailCallReg(MacroAssembler & masm)22 EmitRestoreTailCallReg(MacroAssembler& masm)
23 {
24     // No-op on ARM because link register is always holding the return address.
25 }
26 
27 inline void
EmitRepushTailCallReg(MacroAssembler & masm)28 EmitRepushTailCallReg(MacroAssembler& masm)
29 {
30     // No-op on ARM because link register is always holding the return address.
31 }
32 
33 inline void
EmitCallIC(CodeOffset * patchOffset,MacroAssembler & masm)34 EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
35 {
36     // Move ICEntry offset into ICStubReg
37     CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
38     *patchOffset = offset;
39 
40     // Load stub pointer into ICStubReg
41     masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg);
42 
43     // Load stubcode pointer from BaselineStubEntry.
44     // R2 won't be active when we call ICs, so we can use r0.
45     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
46     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
47 
48     // Call the stubcode via a direct branch-and-link.
49     masm.ma_blx(r0);
50 }
51 
52 inline void
53 EmitEnterTypeMonitorIC(MacroAssembler& masm,
54                        size_t monitorStubOffset = ICMonitoredStub::offsetOfFirstMonitorStub())
55 {
56     // This is expected to be called from within an IC, when ICStubReg is
57     // properly initialized to point to the stub.
58     masm.loadPtr(Address(ICStubReg, (uint32_t) monitorStubOffset), ICStubReg);
59 
60     // Load stubcode pointer from BaselineStubEntry.
61     // R2 won't be active when we call ICs, so we can use r0.
62     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
63     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
64 
65     // Jump to the stubcode.
66     masm.branch(r0);
67 }
68 
69 inline void
EmitReturnFromIC(MacroAssembler & masm)70 EmitReturnFromIC(MacroAssembler& masm)
71 {
72     masm.ma_mov(lr, pc);
73 }
74 
75 inline void
EmitChangeICReturnAddress(MacroAssembler & masm,Register reg)76 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
77 {
78     masm.ma_mov(reg, lr);
79 }
80 
81 inline void
EmitBaselineTailCallVM(JitCode * target,MacroAssembler & masm,uint32_t argSize)82 EmitBaselineTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t argSize)
83 {
84     // We assume during this that R0 and R1 have been pushed, and that R2 is
85     // unused.
86     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
87 
88     // Compute frame size.
89     masm.movePtr(BaselineFrameReg, r0);
90     masm.ma_add(Imm32(BaselineFrame::FramePointerOffset), r0);
91     masm.ma_sub(BaselineStackReg, r0);
92 
93     // Store frame size without VMFunction arguments for GC marking.
94     masm.ma_sub(r0, Imm32(argSize), r1);
95     masm.store32(r1, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
96 
97     // Push frame descriptor and perform the tail call.
98     // ICTailCallReg (lr) already contains the return address (as we keep
99     // it there through the stub calls), but the VMWrapper code being called
100     // expects the return address to also be pushed on the stack.
101     MOZ_ASSERT(ICTailCallReg == lr);
102     masm.makeFrameDescriptor(r0, JitFrame_BaselineJS);
103     masm.push(r0);
104     masm.push(lr);
105     masm.branch(target);
106 }
107 
108 inline void
EmitIonTailCallVM(JitCode * target,MacroAssembler & masm,uint32_t stackSize)109 EmitIonTailCallVM(JitCode* target, MacroAssembler& masm, uint32_t stackSize)
110 {
111     // We assume during this that R0 and R1 have been pushed, and that R2 is
112     // unused.
113     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
114 
115     masm.loadPtr(Address(sp, stackSize), r0);
116     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), r0);
117     masm.add32(Imm32(stackSize + JitStubFrameLayout::Size() - sizeof(intptr_t)), r0);
118 
119     // Push frame descriptor and perform the tail call.
120     // ICTailCallReg (lr) already contains the return address (as we keep
121     // it there through the stub calls), but the VMWrapper code being called
122     // expects the return address to also be pushed on the stack.
123     MOZ_ASSERT(ICTailCallReg == lr);
124     masm.makeFrameDescriptor(r0, JitFrame_IonJS);
125     masm.push(r0);
126     masm.push(lr);
127     masm.branch(target);
128 }
129 
130 inline void
EmitBaselineCreateStubFrameDescriptor(MacroAssembler & masm,Register reg)131 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg)
132 {
133     // Compute stub frame size. We have to add two pointers: the stub reg and
134     // previous frame pointer pushed by EmitEnterStubFrame.
135     masm.mov(BaselineFrameReg, reg);
136     masm.ma_add(Imm32(sizeof(void*) * 2), reg);
137     masm.ma_sub(BaselineStackReg, reg);
138 
139     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub);
140 }
141 
142 inline void
EmitBaselineCallVM(JitCode * target,MacroAssembler & masm)143 EmitBaselineCallVM(JitCode* target, MacroAssembler& masm)
144 {
145     EmitBaselineCreateStubFrameDescriptor(masm, r0);
146     masm.push(r0);
147     masm.call(target);
148 }
149 
150 inline void
EmitIonCallVM(JitCode * target,size_t stackSlots,MacroAssembler & masm)151 EmitIonCallVM(JitCode* target, size_t stackSlots, MacroAssembler& masm)
152 {
153     uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonStub);
154     masm.Push(Imm32(descriptor));
155     masm.callJit(target);
156 
157     // Remove rest of the frame left on the stack. We remove the return address
158     // which is implicitly popped when returning.
159     size_t framePop = sizeof(ExitFrameLayout) - sizeof(void*);
160 
161     // Pop arguments from framePushed.
162     masm.implicitPop(stackSlots * sizeof(void*) + framePop);
163 }
164 
165 // Size of vales pushed by EmitEnterStubFrame.
166 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void*);
167 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void*);
168 
169 inline void
EmitBaselineEnterStubFrame(MacroAssembler & masm,Register scratch)170 EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch)
171 {
172     MOZ_ASSERT(scratch != ICTailCallReg);
173 
174     // Compute frame size.
175     masm.mov(BaselineFrameReg, scratch);
176     masm.ma_add(Imm32(BaselineFrame::FramePointerOffset), scratch);
177     masm.ma_sub(BaselineStackReg, scratch);
178 
179     masm.store32(scratch, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
180 
181     // Note: when making changes here, don't forget to update STUB_FRAME_SIZE if
182     // needed.
183 
184     // Push frame descriptor and return address.
185     masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
186     masm.Push(scratch);
187     masm.Push(ICTailCallReg);
188 
189     // Save old frame pointer, stack pointer and stub reg.
190     masm.Push(ICStubReg);
191     masm.Push(BaselineFrameReg);
192     masm.mov(BaselineStackReg, BaselineFrameReg);
193 
194     // We pushed 4 words, so the stack is still aligned to 8 bytes.
195     masm.checkStackAlignment();
196 }
197 
198 inline void
EmitIonEnterStubFrame(MacroAssembler & masm,Register scratch)199 EmitIonEnterStubFrame(MacroAssembler& masm, Register scratch)
200 {
201     MOZ_ASSERT(ICTailCallReg == lr);
202     masm.Push(ICTailCallReg);
203     masm.Push(ICStubReg);
204 }
205 
206 inline void
207 EmitBaselineLeaveStubFrame(MacroAssembler& masm, bool calledIntoIon = false)
208 {
209     ScratchRegisterScope scratch(masm);
210 
211     // Ion frames do not save and restore the frame pointer. If we called into
212     // Ion, we have to restore the stack pointer from the frame descriptor. If
213     // we performed a VM call, the descriptor has been popped already so in that
214     // case we use the frame pointer.
215     if (calledIntoIon) {
216         masm.Pop(scratch);
217         masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch);
218         masm.add32(scratch, BaselineStackReg);
219     } else {
220         masm.mov(BaselineFrameReg, BaselineStackReg);
221     }
222 
223     masm.Pop(BaselineFrameReg);
224     masm.Pop(ICStubReg);
225 
226     // Load the return address.
227     masm.Pop(ICTailCallReg);
228 
229     // Discard the frame descriptor.
230     masm.Pop(scratch);
231 }
232 
233 inline void
EmitIonLeaveStubFrame(MacroAssembler & masm)234 EmitIonLeaveStubFrame(MacroAssembler& masm)
235 {
236     masm.Pop(ICStubReg);
237     masm.Pop(ICTailCallReg);
238 }
239 
240 inline void
EmitStowICValues(MacroAssembler & masm,int values)241 EmitStowICValues(MacroAssembler& masm, int values)
242 {
243     MOZ_ASSERT(values >= 0 && values <= 2);
244     switch(values) {
245       case 1:
246         // Stow R0.
247         masm.Push(R0);
248         break;
249       case 2:
250         // Stow R0 and R1.
251         masm.Push(R0);
252         masm.Push(R1);
253         break;
254     }
255 }
256 
257 inline void
258 EmitUnstowICValues(MacroAssembler& masm, int values, bool discard = false)
259 {
260     MOZ_ASSERT(values >= 0 && values <= 2);
261     switch(values) {
262       case 1:
263         // Unstow R0.
264         if (discard)
265             masm.addPtr(Imm32(sizeof(Value)), BaselineStackReg);
266         else
267             masm.popValue(R0);
268         break;
269       case 2:
270         // Unstow R0 and R1.
271         if (discard) {
272             masm.addPtr(Imm32(sizeof(Value) * 2), BaselineStackReg);
273         } else {
274             masm.popValue(R1);
275             masm.popValue(R0);
276         }
277         break;
278     }
279     masm.adjustFrame(-values * sizeof(Value));
280 }
281 
282 inline void
EmitCallTypeUpdateIC(MacroAssembler & masm,JitCode * code,uint32_t objectOffset)283 EmitCallTypeUpdateIC(MacroAssembler& masm, JitCode* code, uint32_t objectOffset)
284 {
285     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
286 
287     // R0 contains the value that needs to be typechecked. The object we're
288     // updating is a boxed Value on the stack, at offset objectOffset from esp,
289     // excluding the return address.
290 
291     // Save the current ICStubReg to stack, as well as the TailCallReg,
292     // since on ARM, the LR is live.
293     masm.push(ICStubReg);
294     masm.push(ICTailCallReg);
295 
296     // This is expected to be called from within an IC, when ICStubReg is
297     // properly initialized to point to the stub.
298     masm.loadPtr(Address(ICStubReg, ICUpdatedStub::offsetOfFirstUpdateStub()),
299                  ICStubReg);
300 
301     // TODO: Change r0 uses below to use masm's configurable scratch register instead.
302 
303     // Load stubcode pointer from ICStubReg into ICTailCallReg.
304     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
305 
306     // Call the stubcode.
307     masm.ma_blx(r0);
308 
309     // Restore the old stub reg and tailcall reg.
310     masm.pop(ICTailCallReg);
311     masm.pop(ICStubReg);
312 
313     // The update IC will store 0 or 1 in R1.scratchReg() reflecting if the
314     // value in R0 type-checked properly or not.
315     Label success;
316     masm.cmp32(R1.scratchReg(), Imm32(1));
317     masm.j(Assembler::Equal, &success);
318 
319     // If the IC failed, then call the update fallback function.
320     EmitBaselineEnterStubFrame(masm, R1.scratchReg());
321 
322     masm.loadValue(Address(BaselineStackReg, STUB_FRAME_SIZE + objectOffset), R1);
323 
324     masm.Push(R0);
325     masm.Push(R1);
326     masm.Push(ICStubReg);
327 
328     // Load previous frame pointer, push BaselineFrame*.
329     masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg());
330     masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
331 
332     EmitBaselineCallVM(code, masm);
333     EmitBaselineLeaveStubFrame(masm);
334 
335     // Success at end.
336     masm.bind(&success);
337 }
338 
339 template <typename AddrType>
340 inline void
EmitPreBarrier(MacroAssembler & masm,const AddrType & addr,MIRType type)341 EmitPreBarrier(MacroAssembler& masm, const AddrType& addr, MIRType type)
342 {
343     // On ARM, lr is clobbered by patchableCallPreBarrier. Save it first.
344     masm.push(lr);
345     masm.patchableCallPreBarrier(addr, type);
346     masm.pop(lr);
347 }
348 
349 inline void
EmitStubGuardFailure(MacroAssembler & masm)350 EmitStubGuardFailure(MacroAssembler& masm)
351 {
352     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
353 
354     // NOTE: This routine assumes that the stub guard code left the stack in the
355     // same state it was in when it was entered.
356 
357     // BaselineStubEntry points to the current stub.
358 
359     // Load next stub into ICStubReg.
360     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfNext()), ICStubReg);
361 
362     // Load stubcode pointer from BaselineStubEntry into scratch register.
363     masm.loadPtr(Address(ICStubReg, ICStub::offsetOfStubCode()), r0);
364 
365     // Return address is already loaded, just jump to the next stubcode.
366     MOZ_ASSERT(ICTailCallReg == lr);
367     masm.branch(r0);
368 }
369 
370 
371 } // namespace jit
372 } // namespace js
373 
374 #endif /* jit_arm_SharedICHelpers_arm_h */
375