1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/arm64/MoveEmitter-arm64.h"
8 #include "jit/MacroAssembler-inl.h"
9 
10 using namespace js;
11 using namespace js::jit;
12 
13 MemOperand
toMemOperand(const MoveOperand & operand) const14 MoveEmitterARM64::toMemOperand(const MoveOperand& operand) const
15 {
16     MOZ_ASSERT(operand.isMemory());
17     ARMRegister base(operand.base(), 64);
18     if (operand.base() == masm.getStackPointer())
19         return MemOperand(base, operand.disp() + (masm.framePushed() - pushedAtStart_));
20     return MemOperand(base, operand.disp());
21 }
22 
23 void
emit(const MoveResolver & moves)24 MoveEmitterARM64::emit(const MoveResolver& moves)
25 {
26     if (moves.numCycles()) {
27         masm.reserveStack(sizeof(void*));
28         pushedAtCycle_ = masm.framePushed();
29     }
30 
31     for (size_t i = 0; i < moves.numMoves(); i++)
32         emitMove(moves.getMove(i));
33 }
34 
35 void
finish()36 MoveEmitterARM64::finish()
37 {
38     assertDone();
39     masm.freeStack(masm.framePushed() - pushedAtStart_);
40     MOZ_ASSERT(masm.framePushed() == pushedAtStart_);
41 }
42 
43 void
emitMove(const MoveOp & move)44 MoveEmitterARM64::emitMove(const MoveOp& move)
45 {
46     const MoveOperand& from = move.from();
47     const MoveOperand& to = move.to();
48 
49     if (move.isCycleBegin()) {
50         MOZ_ASSERT(!inCycle_ && !move.isCycleEnd());
51         breakCycle(from, to, move.endCycleType());
52         inCycle_ = true;
53     } else if (move.isCycleEnd()) {
54         MOZ_ASSERT(inCycle_);
55         completeCycle(from, to, move.type());
56         inCycle_ = false;
57         return;
58     }
59 
60     switch (move.type()) {
61       case MoveOp::FLOAT32:
62         emitFloat32Move(from, to);
63         break;
64       case MoveOp::DOUBLE:
65         emitDoubleMove(from, to);
66         break;
67       case MoveOp::INT32:
68         emitInt32Move(from, to);
69         break;
70       case MoveOp::GENERAL:
71         emitGeneralMove(from, to);
72         break;
73       default:
74         MOZ_CRASH("Unexpected move type");
75     }
76 }
77 
78 void
emitFloat32Move(const MoveOperand & from,const MoveOperand & to)79 MoveEmitterARM64::emitFloat32Move(const MoveOperand& from, const MoveOperand& to)
80 {
81     if (from.isFloatReg()) {
82         if (to.isFloatReg())
83             masm.Fmov(toFPReg(to, MoveOp::FLOAT32), toFPReg(from, MoveOp::FLOAT32));
84         else
85             masm.Str(toFPReg(from, MoveOp::FLOAT32), toMemOperand(to));
86         return;
87     }
88 
89     if (to.isFloatReg()) {
90         masm.Ldr(toFPReg(to, MoveOp::FLOAT32), toMemOperand(from));
91         return;
92     }
93 
94     vixl::UseScratchRegisterScope temps(&masm.asVIXL());
95     const ARMFPRegister scratch32 = temps.AcquireS();
96     masm.Ldr(scratch32, toMemOperand(from));
97     masm.Str(scratch32, toMemOperand(to));
98 }
99 
100 void
emitDoubleMove(const MoveOperand & from,const MoveOperand & to)101 MoveEmitterARM64::emitDoubleMove(const MoveOperand& from, const MoveOperand& to)
102 {
103     if (from.isFloatReg()) {
104         if (to.isFloatReg())
105             masm.Fmov(toFPReg(to, MoveOp::DOUBLE), toFPReg(from, MoveOp::DOUBLE));
106         else
107             masm.Str(toFPReg(from, MoveOp::DOUBLE), toMemOperand(to));
108         return;
109     }
110 
111     if (to.isFloatReg()) {
112         masm.Ldr(toFPReg(to, MoveOp::DOUBLE), toMemOperand(from));
113         return;
114     }
115 
116     vixl::UseScratchRegisterScope temps(&masm.asVIXL());
117     const ARMFPRegister scratch = temps.AcquireD();
118     masm.Ldr(scratch, toMemOperand(from));
119     masm.Str(scratch, toMemOperand(to));
120 }
121 
122 void
emitInt32Move(const MoveOperand & from,const MoveOperand & to)123 MoveEmitterARM64::emitInt32Move(const MoveOperand& from, const MoveOperand& to)
124 {
125     if (from.isGeneralReg()) {
126         if (to.isGeneralReg())
127             masm.Mov(toARMReg32(to), toARMReg32(from));
128         else
129             masm.Str(toARMReg32(from), toMemOperand(to));
130         return;
131     }
132 
133     if (to.isGeneralReg()) {
134         masm.Ldr(toARMReg32(to), toMemOperand(from));
135         return;
136     }
137 
138     vixl::UseScratchRegisterScope temps(&masm.asVIXL());
139     const ARMRegister scratch32 = temps.AcquireW();
140     masm.Ldr(scratch32, toMemOperand(from));
141     masm.Str(scratch32, toMemOperand(to));
142 }
143 
144 void
emitGeneralMove(const MoveOperand & from,const MoveOperand & to)145 MoveEmitterARM64::emitGeneralMove(const MoveOperand& from, const MoveOperand& to)
146 {
147     if (from.isGeneralReg()) {
148         MOZ_ASSERT(to.isGeneralReg() || to.isMemory());
149         if (to.isGeneralReg())
150             masm.Mov(toARMReg64(to), toARMReg64(from));
151         else
152             masm.Str(toARMReg64(from), toMemOperand(to));
153         return;
154     }
155 
156     // {Memory OR EffectiveAddress} -> Register move.
157     if (to.isGeneralReg()) {
158         MOZ_ASSERT(from.isMemoryOrEffectiveAddress());
159         if (from.isMemory())
160             masm.Ldr(toARMReg64(to), toMemOperand(from));
161         else
162             masm.Add(toARMReg64(to), toARMReg64(from), Operand(from.disp()));
163         return;
164     }
165 
166     vixl::UseScratchRegisterScope temps(&masm.asVIXL());
167     const ARMRegister scratch64 = temps.AcquireX();
168 
169     // Memory -> Memory move.
170     if (from.isMemory()) {
171         MOZ_ASSERT(to.isMemory());
172         masm.Ldr(scratch64, toMemOperand(from));
173         masm.Str(scratch64, toMemOperand(to));
174         return;
175     }
176 
177     // EffectiveAddress -> Memory move.
178     MOZ_ASSERT(from.isEffectiveAddress());
179     MOZ_ASSERT(to.isMemory());
180     masm.Add(scratch64, toARMReg64(from), Operand(from.disp()));
181     masm.Str(scratch64, toMemOperand(to));
182 }
183 
184 MemOperand
cycleSlot()185 MoveEmitterARM64::cycleSlot()
186 {
187     // Using SP as stack pointer requires alignment preservation below.
188     MOZ_ASSERT(!masm.GetStackPointer64().Is(sp));
189 
190     // emit() already allocated a slot for resolving the cycle.
191     MOZ_ASSERT(pushedAtCycle_ != -1);
192 
193     return MemOperand(masm.GetStackPointer64(), masm.framePushed() - pushedAtCycle_);
194 }
195 
196 void
breakCycle(const MoveOperand & from,const MoveOperand & to,MoveOp::Type type)197 MoveEmitterARM64::breakCycle(const MoveOperand& from, const MoveOperand& to, MoveOp::Type type)
198 {
199     switch (type) {
200       case MoveOp::FLOAT32:
201         if (to.isMemory()) {
202             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
203             const ARMFPRegister scratch32 = temps.AcquireS();
204             masm.Ldr(scratch32, toMemOperand(to));
205             masm.Str(scratch32, cycleSlot());
206         } else {
207             masm.Str(toFPReg(to, type), cycleSlot());
208         }
209         break;
210 
211       case MoveOp::DOUBLE:
212         if (to.isMemory()) {
213             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
214             const ARMFPRegister scratch64 = temps.AcquireD();
215             masm.Ldr(scratch64, toMemOperand(to));
216             masm.Str(scratch64, cycleSlot());
217         } else {
218             masm.Str(toFPReg(to, type), cycleSlot());
219         }
220         break;
221 
222       case MoveOp::INT32:
223         if (to.isMemory()) {
224             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
225             const ARMRegister scratch32 = temps.AcquireW();
226             masm.Ldr(scratch32, toMemOperand(to));
227             masm.Str(scratch32, cycleSlot());
228         } else {
229             masm.Str(toARMReg32(to), cycleSlot());
230         }
231         break;
232 
233       case MoveOp::GENERAL:
234         if (to.isMemory()) {
235             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
236             const ARMRegister scratch64 = temps.AcquireX();
237             masm.Ldr(scratch64, toMemOperand(to));
238             masm.Str(scratch64, cycleSlot());
239         } else {
240             masm.Str(toARMReg64(to), cycleSlot());
241         }
242         break;
243 
244       default:
245         MOZ_CRASH("Unexpected move type");
246     }
247 }
248 
249 void
completeCycle(const MoveOperand & from,const MoveOperand & to,MoveOp::Type type)250 MoveEmitterARM64::completeCycle(const MoveOperand& from, const MoveOperand& to, MoveOp::Type type)
251 {
252     switch (type) {
253       case MoveOp::FLOAT32:
254         if (to.isMemory()) {
255             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
256             const ARMFPRegister scratch32 = temps.AcquireS();
257             masm.Ldr(scratch32, cycleSlot());
258             masm.Str(scratch32, toMemOperand(to));
259         } else {
260             masm.Ldr(toFPReg(to, type), cycleSlot());
261         }
262         break;
263 
264       case MoveOp::DOUBLE:
265         if (to.isMemory()) {
266             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
267             const ARMFPRegister scratch = temps.AcquireD();
268             masm.Ldr(scratch, cycleSlot());
269             masm.Str(scratch, toMemOperand(to));
270         } else {
271             masm.Ldr(toFPReg(to, type), cycleSlot());
272         }
273         break;
274 
275       case MoveOp::INT32:
276         if (to.isMemory()) {
277             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
278             const ARMRegister scratch32 = temps.AcquireW();
279             masm.Ldr(scratch32, cycleSlot());
280             masm.Str(scratch32, toMemOperand(to));
281         } else {
282             masm.Ldr(toARMReg64(to), cycleSlot());
283         }
284         break;
285 
286       case MoveOp::GENERAL:
287         if (to.isMemory()) {
288             vixl::UseScratchRegisterScope temps(&masm.asVIXL());
289             const ARMRegister scratch64 = temps.AcquireX();
290             masm.Ldr(scratch64, cycleSlot());
291             masm.Str(scratch64, toMemOperand(to));
292         } else {
293             masm.Ldr(toARMReg64(to), cycleSlot());
294         }
295         break;
296 
297       default:
298         MOZ_CRASH("Unexpected move type");
299     }
300 }
301