1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #ifndef V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
39 
40 #include "src/codegen/arm/assembler-arm.h"
41 
42 #include "src/codegen/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects/objects-inl.h"
45 #include "src/objects/smi.h"
46 
47 namespace v8 {
48 namespace internal {
49 
SupportsOptimizer()50 bool CpuFeatures::SupportsOptimizer() { return true; }
51 
SupportedRegisterCount()52 int DoubleRegister::SupportedRegisterCount() {
53   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
54 }
55 
apply(intptr_t delta)56 void RelocInfo::apply(intptr_t delta) {
57   if (RelocInfo::IsInternalReference(rmode_)) {
58     // absolute code pointer inside code object moves with the code object.
59     int32_t* p = reinterpret_cast<int32_t*>(pc_);
60     *p += delta;  // relocate entry
61   } else if (RelocInfo::IsRelativeCodeTarget(rmode_)) {
62     Instruction* branch = Instruction::At(pc_);
63     int32_t branch_offset = branch->GetBranchOffset() - delta;
64     branch->SetBranchOffset(branch_offset);
65   }
66 }
67 
target_address()68 Address RelocInfo::target_address() {
69   DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) ||
70          IsWasmCall(rmode_));
71   return Assembler::target_address_at(pc_, constant_pool_);
72 }
73 
target_address_address()74 Address RelocInfo::target_address_address() {
75   DCHECK(HasTargetAddressAddress());
76   if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
77     return pc_;
78   } else if (Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_))) {
79     return constant_pool_entry_address();
80   } else {
81     DCHECK(Assembler::IsBOrBlPcImmediateOffset(Memory<int32_t>(pc_)));
82     DCHECK(IsRelativeCodeTarget(rmode_));
83     return pc_;
84   }
85 }
86 
constant_pool_entry_address()87 Address RelocInfo::constant_pool_entry_address() {
88   DCHECK(IsInConstantPool());
89   return Assembler::constant_pool_entry_address(pc_, constant_pool_);
90 }
91 
target_address_size()92 int RelocInfo::target_address_size() { return kPointerSize; }
93 
target_object()94 HeapObject RelocInfo::target_object() {
95   DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
96          IsDataEmbeddedObject(rmode_));
97   if (IsDataEmbeddedObject(rmode_)) {
98     return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
99   }
100   return HeapObject::cast(
101       Object(Assembler::target_address_at(pc_, constant_pool_)));
102 }
103 
target_object_no_host(PtrComprCageBase cage_base)104 HeapObject RelocInfo::target_object_no_host(PtrComprCageBase cage_base) {
105   return target_object();
106 }
107 
target_object_handle(Assembler * origin)108 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
109   if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) {
110     return Handle<HeapObject>(reinterpret_cast<Address*>(
111         Assembler::target_address_at(pc_, constant_pool_)));
112   } else if (IsDataEmbeddedObject(rmode_)) {
113     return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
114   }
115   DCHECK(IsRelativeCodeTarget(rmode_));
116   return origin->relative_code_target_object_handle_at(pc_);
117 }
118 
set_target_object(Heap * heap,HeapObject target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)119 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
120                                   WriteBarrierMode write_barrier_mode,
121                                   ICacheFlushMode icache_flush_mode) {
122   DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
123          IsDataEmbeddedObject(rmode_));
124   if (IsDataEmbeddedObject(rmode_)) {
125     WriteUnalignedValue(pc_, target.ptr());
126     // No need to flush icache since no instructions were changed.
127   } else {
128     Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
129                                      icache_flush_mode);
130   }
131   if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
132       !FLAG_disable_write_barriers) {
133     WriteBarrierForCode(host(), this, target);
134   }
135 }
136 
target_external_reference()137 Address RelocInfo::target_external_reference() {
138   DCHECK(rmode_ == EXTERNAL_REFERENCE);
139   return Assembler::target_address_at(pc_, constant_pool_);
140 }
141 
set_target_external_reference(Address target,ICacheFlushMode icache_flush_mode)142 void RelocInfo::set_target_external_reference(
143     Address target, ICacheFlushMode icache_flush_mode) {
144   DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
145   Assembler::set_target_address_at(pc_, constant_pool_, target,
146                                    icache_flush_mode);
147 }
148 
target_internal_reference()149 Address RelocInfo::target_internal_reference() {
150   DCHECK(rmode_ == INTERNAL_REFERENCE);
151   return Memory<Address>(pc_);
152 }
153 
target_internal_reference_address()154 Address RelocInfo::target_internal_reference_address() {
155   DCHECK(rmode_ == INTERNAL_REFERENCE);
156   return pc_;
157 }
158 
target_runtime_entry(Assembler * origin)159 Address RelocInfo::target_runtime_entry(Assembler* origin) {
160   DCHECK(IsRuntimeEntry(rmode_));
161   return target_address();
162 }
163 
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)164 void RelocInfo::set_target_runtime_entry(Address target,
165                                          WriteBarrierMode write_barrier_mode,
166                                          ICacheFlushMode icache_flush_mode) {
167   DCHECK(IsRuntimeEntry(rmode_));
168   if (target_address() != target)
169     set_target_address(target, write_barrier_mode, icache_flush_mode);
170 }
171 
target_off_heap_target()172 Address RelocInfo::target_off_heap_target() {
173   DCHECK(IsOffHeapTarget(rmode_));
174   return Assembler::target_address_at(pc_, constant_pool_);
175 }
176 
WipeOut()177 void RelocInfo::WipeOut() {
178   DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
179          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
180          IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
181   if (IsInternalReference(rmode_)) {
182     Memory<Address>(pc_) = kNullAddress;
183   } else {
184     Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
185   }
186 }
187 
relative_code_target_object_handle_at(Address pc)188 Handle<Code> Assembler::relative_code_target_object_handle_at(
189     Address pc) const {
190   Instruction* branch = Instruction::At(pc);
191   int code_target_index = branch->GetBranchOffset() / kInstrSize;
192   return GetCodeTarget(code_target_index);
193 }
194 
Zero()195 Operand Operand::Zero() { return Operand(static_cast<int32_t>(0)); }
196 
Operand(const ExternalReference & f)197 Operand::Operand(const ExternalReference& f)
198     : rmode_(RelocInfo::EXTERNAL_REFERENCE) {
199   value_.immediate = static_cast<int32_t>(f.address());
200 }
201 
Operand(Smi value)202 Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) {
203   value_.immediate = static_cast<intptr_t>(value.ptr());
204 }
205 
Operand(Register rm)206 Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
207 
CheckBuffer()208 void Assembler::CheckBuffer() {
209   if (V8_UNLIKELY(buffer_space() <= kGap)) {
210     GrowBuffer();
211   }
212   MaybeCheckConstPool();
213 }
214 
emit(Instr x)215 void Assembler::emit(Instr x) {
216   CheckBuffer();
217   *reinterpret_cast<Instr*>(pc_) = x;
218   pc_ += kInstrSize;
219 }
220 
deserialization_set_special_target_at(Address constant_pool_entry,Code code,Address target)221 void Assembler::deserialization_set_special_target_at(
222     Address constant_pool_entry, Code code, Address target) {
223   DCHECK(!Builtins::IsIsolateIndependentBuiltin(code));
224   Memory<Address>(constant_pool_entry) = target;
225 }
226 
deserialization_special_target_size(Address location)227 int Assembler::deserialization_special_target_size(Address location) {
228   return kSpecialTargetSize;
229 }
230 
deserialization_set_target_internal_reference_at(Address pc,Address target,RelocInfo::Mode mode)231 void Assembler::deserialization_set_target_internal_reference_at(
232     Address pc, Address target, RelocInfo::Mode mode) {
233   Memory<Address>(pc) = target;
234 }
235 
is_constant_pool_load(Address pc)236 bool Assembler::is_constant_pool_load(Address pc) {
237   return IsLdrPcImmediateOffset(Memory<int32_t>(pc));
238 }
239 
constant_pool_entry_address(Address pc,Address constant_pool)240 Address Assembler::constant_pool_entry_address(Address pc,
241                                                Address constant_pool) {
242   DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc)));
243   Instr instr = Memory<int32_t>(pc);
244   return pc + GetLdrRegisterImmediateOffset(instr) + Instruction::kPcLoadDelta;
245 }
246 
target_address_at(Address pc,Address constant_pool)247 Address Assembler::target_address_at(Address pc, Address constant_pool) {
248   if (is_constant_pool_load(pc)) {
249     // This is a constant pool lookup. Return the value in the constant pool.
250     return Memory<Address>(constant_pool_entry_address(pc, constant_pool));
251   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
252     // This is an movw / movt immediate load. Return the immediate.
253     DCHECK(IsMovW(Memory<int32_t>(pc)) &&
254            IsMovT(Memory<int32_t>(pc + kInstrSize)));
255     Instruction* movw_instr = Instruction::At(pc);
256     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
257     return static_cast<Address>((movt_instr->ImmedMovwMovtValue() << 16) |
258                                 movw_instr->ImmedMovwMovtValue());
259   } else if (IsMovImmed(Memory<int32_t>(pc))) {
260     // This is an mov / orr immediate load. Return the immediate.
261     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
262            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
263            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
264            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
265     Instr mov_instr = instr_at(pc);
266     Instr orr_instr_1 = instr_at(pc + kInstrSize);
267     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
268     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
269     Address ret = static_cast<Address>(
270         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
271         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
272     return ret;
273   } else {
274     Instruction* branch = Instruction::At(pc);
275     int32_t delta = branch->GetBranchOffset();
276     return pc + delta + Instruction::kPcLoadDelta;
277   }
278 }
279 
set_target_address_at(Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)280 void Assembler::set_target_address_at(Address pc, Address constant_pool,
281                                       Address target,
282                                       ICacheFlushMode icache_flush_mode) {
283   if (is_constant_pool_load(pc)) {
284     // This is a constant pool lookup. Update the entry in the constant pool.
285     Memory<Address>(constant_pool_entry_address(pc, constant_pool)) = target;
286     // Intuitively, we would think it is necessary to always flush the
287     // instruction cache after patching a target address in the code as follows:
288     //   FlushInstructionCache(pc, sizeof(target));
289     // However, on ARM, no instruction is actually patched in the case
290     // of embedded constants of the form:
291     // ldr   ip, [pp, #...]
292     // since the instruction accessing this address in the constant pool remains
293     // unchanged.
294   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
295     // This is an movw / movt immediate load. Patch the immediate embedded in
296     // the instructions.
297     DCHECK(IsMovW(Memory<int32_t>(pc)));
298     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
299     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
300     uint32_t immediate = static_cast<uint32_t>(target);
301     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
302     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
303     DCHECK(IsMovW(Memory<int32_t>(pc)));
304     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
305     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
306       FlushInstructionCache(pc, 2 * kInstrSize);
307     }
308   } else if (IsMovImmed(Memory<int32_t>(pc))) {
309     // This is an mov / orr immediate load. Patch the immediate embedded in
310     // the instructions.
311     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
312            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
313            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
314            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
315     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
316     uint32_t immediate = static_cast<uint32_t>(target);
317     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
318     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
319     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
320     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
321     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
322            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
323            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
324            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
325     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
326       FlushInstructionCache(pc, 4 * kInstrSize);
327     }
328   } else {
329     intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta;
330     Instruction* branch = Instruction::At(pc);
331     branch->SetBranchOffset(branch_offset);
332     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
333       FlushInstructionCache(pc, kInstrSize);
334     }
335   }
336 }
337 
EnsureSpace(Assembler * assembler)338 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
339 
340 template <typename T>
CanAcquireVfp()341 bool UseScratchRegisterScope::CanAcquireVfp() const {
342   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
343   DCHECK_NOT_NULL(available);
344   for (int index = 0; index < T::kNumRegisters; index++) {
345     T reg = T::from_code(index);
346     uint64_t mask = reg.ToVfpRegList();
347     if ((*available & mask) == mask) {
348       return true;
349     }
350   }
351   return false;
352 }
353 
354 template <typename T>
AcquireVfp()355 T UseScratchRegisterScope::AcquireVfp() {
356   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
357   DCHECK_NOT_NULL(available);
358   for (int index = 0; index < T::kNumRegisters; index++) {
359     T reg = T::from_code(index);
360     uint64_t mask = reg.ToVfpRegList();
361     if ((*available & mask) == mask) {
362       *available &= ~mask;
363       return reg;
364     }
365   }
366   UNREACHABLE();
367 }
368 
369 }  // namespace internal
370 }  // namespace v8
371 
372 #endif  // V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
373