1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #ifndef V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
39 
40 #include "src/codegen/arm/assembler-arm.h"
41 
42 #include "src/codegen/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects/objects-inl.h"
45 #include "src/objects/smi.h"
46 
47 namespace v8 {
48 namespace internal {
49 
SupportsOptimizer()50 bool CpuFeatures::SupportsOptimizer() { return true; }
51 
SupportsWasmSimd128()52 bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(NEON); }
53 
NumRegisters()54 int DoubleRegister::NumRegisters() {
55   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
56 }
57 
apply(intptr_t delta)58 void RelocInfo::apply(intptr_t delta) {
59   if (RelocInfo::IsInternalReference(rmode_)) {
60     // absolute code pointer inside code object moves with the code object.
61     int32_t* p = reinterpret_cast<int32_t*>(pc_);
62     *p += delta;  // relocate entry
63   } else if (RelocInfo::IsRelativeCodeTarget(rmode_)) {
64     Instruction* branch = Instruction::At(pc_);
65     int32_t branch_offset = branch->GetBranchOffset() - delta;
66     branch->SetBranchOffset(branch_offset);
67   }
68 }
69 
target_address()70 Address RelocInfo::target_address() {
71   DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) ||
72          IsWasmCall(rmode_));
73   return Assembler::target_address_at(pc_, constant_pool_);
74 }
75 
target_address_address()76 Address RelocInfo::target_address_address() {
77   DCHECK(HasTargetAddressAddress());
78   if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
79     return pc_;
80   } else if (Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_))) {
81     return constant_pool_entry_address();
82   } else {
83     DCHECK(Assembler::IsBOrBlPcImmediateOffset(Memory<int32_t>(pc_)));
84     DCHECK(IsRelativeCodeTarget(rmode_));
85     return pc_;
86   }
87 }
88 
constant_pool_entry_address()89 Address RelocInfo::constant_pool_entry_address() {
90   DCHECK(IsInConstantPool());
91   return Assembler::constant_pool_entry_address(pc_, constant_pool_);
92 }
93 
target_address_size()94 int RelocInfo::target_address_size() { return kPointerSize; }
95 
target_object()96 HeapObject RelocInfo::target_object() {
97   DCHECK(IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT);
98   return HeapObject::cast(
99       Object(Assembler::target_address_at(pc_, constant_pool_)));
100 }
101 
target_object_no_host(Isolate * isolate)102 HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
103   return target_object();
104 }
105 
target_object_handle(Assembler * origin)106 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
107   if (IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT) {
108     return Handle<HeapObject>(reinterpret_cast<Address*>(
109         Assembler::target_address_at(pc_, constant_pool_)));
110   }
111   DCHECK(IsRelativeCodeTarget(rmode_));
112   return origin->relative_code_target_object_handle_at(pc_);
113 }
114 
set_target_object(Heap * heap,HeapObject target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)115 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
116                                   WriteBarrierMode write_barrier_mode,
117                                   ICacheFlushMode icache_flush_mode) {
118   DCHECK(IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT);
119   Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
120                                    icache_flush_mode);
121   if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
122       !FLAG_disable_write_barriers) {
123     WriteBarrierForCode(host(), this, target);
124   }
125 }
126 
target_external_reference()127 Address RelocInfo::target_external_reference() {
128   DCHECK(rmode_ == EXTERNAL_REFERENCE);
129   return Assembler::target_address_at(pc_, constant_pool_);
130 }
131 
set_target_external_reference(Address target,ICacheFlushMode icache_flush_mode)132 void RelocInfo::set_target_external_reference(
133     Address target, ICacheFlushMode icache_flush_mode) {
134   DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
135   Assembler::set_target_address_at(pc_, constant_pool_, target,
136                                    icache_flush_mode);
137 }
138 
target_internal_reference()139 Address RelocInfo::target_internal_reference() {
140   DCHECK(rmode_ == INTERNAL_REFERENCE);
141   return Memory<Address>(pc_);
142 }
143 
target_internal_reference_address()144 Address RelocInfo::target_internal_reference_address() {
145   DCHECK(rmode_ == INTERNAL_REFERENCE);
146   return pc_;
147 }
148 
target_runtime_entry(Assembler * origin)149 Address RelocInfo::target_runtime_entry(Assembler* origin) {
150   DCHECK(IsRuntimeEntry(rmode_));
151   return target_address();
152 }
153 
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)154 void RelocInfo::set_target_runtime_entry(Address target,
155                                          WriteBarrierMode write_barrier_mode,
156                                          ICacheFlushMode icache_flush_mode) {
157   DCHECK(IsRuntimeEntry(rmode_));
158   if (target_address() != target)
159     set_target_address(target, write_barrier_mode, icache_flush_mode);
160 }
161 
target_off_heap_target()162 Address RelocInfo::target_off_heap_target() {
163   DCHECK(IsOffHeapTarget(rmode_));
164   return Assembler::target_address_at(pc_, constant_pool_);
165 }
166 
WipeOut()167 void RelocInfo::WipeOut() {
168   DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
169          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
170          IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
171   if (IsInternalReference(rmode_)) {
172     Memory<Address>(pc_) = kNullAddress;
173   } else {
174     Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
175   }
176 }
177 
relative_code_target_object_handle_at(Address pc)178 Handle<Code> Assembler::relative_code_target_object_handle_at(
179     Address pc) const {
180   Instruction* branch = Instruction::At(pc);
181   int code_target_index = branch->GetBranchOffset() / kInstrSize;
182   return GetCodeTarget(code_target_index);
183 }
184 
Zero()185 Operand Operand::Zero() { return Operand(static_cast<int32_t>(0)); }
186 
Operand(const ExternalReference & f)187 Operand::Operand(const ExternalReference& f)
188     : rmode_(RelocInfo::EXTERNAL_REFERENCE) {
189   value_.immediate = static_cast<int32_t>(f.address());
190 }
191 
Operand(Smi value)192 Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) {
193   value_.immediate = static_cast<intptr_t>(value.ptr());
194 }
195 
Operand(Register rm)196 Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
197 
CheckBuffer()198 void Assembler::CheckBuffer() {
199   if (buffer_space() <= kGap) {
200     GrowBuffer();
201   }
202   MaybeCheckConstPool();
203 }
204 
emit(Instr x)205 void Assembler::emit(Instr x) {
206   CheckBuffer();
207   *reinterpret_cast<Instr*>(pc_) = x;
208   pc_ += kInstrSize;
209 }
210 
deserialization_set_special_target_at(Address constant_pool_entry,Code code,Address target)211 void Assembler::deserialization_set_special_target_at(
212     Address constant_pool_entry, Code code, Address target) {
213   DCHECK(!Builtins::IsIsolateIndependentBuiltin(code));
214   Memory<Address>(constant_pool_entry) = target;
215 }
216 
deserialization_special_target_size(Address location)217 int Assembler::deserialization_special_target_size(Address location) {
218   return kSpecialTargetSize;
219 }
220 
deserialization_set_target_internal_reference_at(Address pc,Address target,RelocInfo::Mode mode)221 void Assembler::deserialization_set_target_internal_reference_at(
222     Address pc, Address target, RelocInfo::Mode mode) {
223   Memory<Address>(pc) = target;
224 }
225 
is_constant_pool_load(Address pc)226 bool Assembler::is_constant_pool_load(Address pc) {
227   return IsLdrPcImmediateOffset(Memory<int32_t>(pc));
228 }
229 
constant_pool_entry_address(Address pc,Address constant_pool)230 Address Assembler::constant_pool_entry_address(Address pc,
231                                                Address constant_pool) {
232   DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc)));
233   Instr instr = Memory<int32_t>(pc);
234   return pc + GetLdrRegisterImmediateOffset(instr) + Instruction::kPcLoadDelta;
235 }
236 
target_address_at(Address pc,Address constant_pool)237 Address Assembler::target_address_at(Address pc, Address constant_pool) {
238   if (is_constant_pool_load(pc)) {
239     // This is a constant pool lookup. Return the value in the constant pool.
240     return Memory<Address>(constant_pool_entry_address(pc, constant_pool));
241   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
242     // This is an movw / movt immediate load. Return the immediate.
243     DCHECK(IsMovW(Memory<int32_t>(pc)) &&
244            IsMovT(Memory<int32_t>(pc + kInstrSize)));
245     Instruction* movw_instr = Instruction::At(pc);
246     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
247     return static_cast<Address>((movt_instr->ImmedMovwMovtValue() << 16) |
248                                 movw_instr->ImmedMovwMovtValue());
249   } else if (IsMovImmed(Memory<int32_t>(pc))) {
250     // This is an mov / orr immediate load. Return the immediate.
251     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
252            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
253            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
254            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
255     Instr mov_instr = instr_at(pc);
256     Instr orr_instr_1 = instr_at(pc + kInstrSize);
257     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
258     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
259     Address ret = static_cast<Address>(
260         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
261         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
262     return ret;
263   } else {
264     Instruction* branch = Instruction::At(pc);
265     int32_t delta = branch->GetBranchOffset();
266     return pc + delta + Instruction::kPcLoadDelta;
267   }
268 }
269 
set_target_address_at(Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)270 void Assembler::set_target_address_at(Address pc, Address constant_pool,
271                                       Address target,
272                                       ICacheFlushMode icache_flush_mode) {
273   if (is_constant_pool_load(pc)) {
274     // This is a constant pool lookup. Update the entry in the constant pool.
275     Memory<Address>(constant_pool_entry_address(pc, constant_pool)) = target;
276     // Intuitively, we would think it is necessary to always flush the
277     // instruction cache after patching a target address in the code as follows:
278     //   FlushInstructionCache(pc, sizeof(target));
279     // However, on ARM, no instruction is actually patched in the case
280     // of embedded constants of the form:
281     // ldr   ip, [pp, #...]
282     // since the instruction accessing this address in the constant pool remains
283     // unchanged.
284   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
285     // This is an movw / movt immediate load. Patch the immediate embedded in
286     // the instructions.
287     DCHECK(IsMovW(Memory<int32_t>(pc)));
288     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
289     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
290     uint32_t immediate = static_cast<uint32_t>(target);
291     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
292     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
293     DCHECK(IsMovW(Memory<int32_t>(pc)));
294     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
295     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
296       FlushInstructionCache(pc, 2 * kInstrSize);
297     }
298   } else if (IsMovImmed(Memory<int32_t>(pc))) {
299     // This is an mov / orr immediate load. Patch the immediate embedded in
300     // the instructions.
301     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
302            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
303            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
304            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
305     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
306     uint32_t immediate = static_cast<uint32_t>(target);
307     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
308     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
309     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
310     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
311     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
312            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
313            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
314            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
315     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
316       FlushInstructionCache(pc, 4 * kInstrSize);
317     }
318   } else {
319     intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta;
320     Instruction* branch = Instruction::At(pc);
321     branch->SetBranchOffset(branch_offset);
322     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
323       FlushInstructionCache(pc, kInstrSize);
324     }
325   }
326 }
327 
EnsureSpace(Assembler * assembler)328 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
329 
330 template <typename T>
CanAcquireVfp()331 bool UseScratchRegisterScope::CanAcquireVfp() const {
332   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
333   DCHECK_NOT_NULL(available);
334   for (int index = 0; index < T::kNumRegisters; index++) {
335     T reg = T::from_code(index);
336     uint64_t mask = reg.ToVfpRegList();
337     if ((*available & mask) == mask) {
338       return true;
339     }
340   }
341   return false;
342 }
343 
344 template <typename T>
AcquireVfp()345 T UseScratchRegisterScope::AcquireVfp() {
346   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
347   DCHECK_NOT_NULL(available);
348   for (int index = 0; index < T::kNumRegisters; index++) {
349     T reg = T::from_code(index);
350     uint64_t mask = reg.ToVfpRegList();
351     if ((*available & mask) == mask) {
352       *available &= ~mask;
353       return reg;
354     }
355   }
356   UNREACHABLE();
357 }
358 
359 }  // namespace internal
360 }  // namespace v8
361 
362 #endif  // V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
363