1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #ifndef V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
39 
40 #include "src/codegen/arm/assembler-arm.h"
41 
42 #include "src/codegen/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects/objects-inl.h"
45 #include "src/objects/smi.h"
46 
47 namespace v8 {
48 namespace internal {
49 
SupportsOptimizer()50 bool CpuFeatures::SupportsOptimizer() { return true; }
51 
SupportedRegisterCount()52 int DoubleRegister::SupportedRegisterCount() {
53   return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
54 }
55 
apply(intptr_t delta)56 void RelocInfo::apply(intptr_t delta) {
57   if (RelocInfo::IsInternalReference(rmode_)) {
58     // absolute code pointer inside code object moves with the code object.
59     int32_t* p = reinterpret_cast<int32_t*>(pc_);
60     *p += delta;  // relocate entry
61   } else if (RelocInfo::IsRelativeCodeTarget(rmode_)) {
62     Instruction* branch = Instruction::At(pc_);
63     int32_t branch_offset = branch->GetBranchOffset() - delta;
64     branch->SetBranchOffset(branch_offset);
65   }
66 }
67 
target_address()68 Address RelocInfo::target_address() {
69   DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) ||
70          IsWasmCall(rmode_));
71   return Assembler::target_address_at(pc_, constant_pool_);
72 }
73 
target_address_address()74 Address RelocInfo::target_address_address() {
75   DCHECK(HasTargetAddressAddress());
76   if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
77     return pc_;
78   } else if (Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_))) {
79     return constant_pool_entry_address();
80   } else {
81     DCHECK(Assembler::IsBOrBlPcImmediateOffset(Memory<int32_t>(pc_)));
82     DCHECK(IsRelativeCodeTarget(rmode_));
83     return pc_;
84   }
85 }
86 
constant_pool_entry_address()87 Address RelocInfo::constant_pool_entry_address() {
88   DCHECK(IsInConstantPool());
89   return Assembler::constant_pool_entry_address(pc_, constant_pool_);
90 }
91 
target_address_size()92 int RelocInfo::target_address_size() { return kPointerSize; }
93 
target_object(PtrComprCageBase cage_base)94 HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) {
95   DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
96          IsDataEmbeddedObject(rmode_));
97   if (IsDataEmbeddedObject(rmode_)) {
98     return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
99   }
100   return HeapObject::cast(
101       Object(Assembler::target_address_at(pc_, constant_pool_)));
102 }
103 
target_object_handle(Assembler* origin)104 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
105   if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) {
106     return Handle<HeapObject>(reinterpret_cast<Address*>(
107         Assembler::target_address_at(pc_, constant_pool_)));
108   } else if (IsDataEmbeddedObject(rmode_)) {
109     return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
110   }
111   DCHECK(IsRelativeCodeTarget(rmode_));
112   return origin->relative_code_target_object_handle_at(pc_);
113 }
114 
set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode)115 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
116                                   WriteBarrierMode write_barrier_mode,
117                                   ICacheFlushMode icache_flush_mode) {
118   DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
119          IsDataEmbeddedObject(rmode_));
120   if (IsDataEmbeddedObject(rmode_)) {
121     WriteUnalignedValue(pc_, target.ptr());
122     // No need to flush icache since no instructions were changed.
123   } else {
124     Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
125                                      icache_flush_mode);
126   }
127   if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
128       !FLAG_disable_write_barriers) {
129     WriteBarrierForCode(host(), this, target);
130   }
131 }
132 
target_external_reference()133 Address RelocInfo::target_external_reference() {
134   DCHECK(rmode_ == EXTERNAL_REFERENCE);
135   return Assembler::target_address_at(pc_, constant_pool_);
136 }
137 
set_target_external_reference( Address target, ICacheFlushMode icache_flush_mode)138 void RelocInfo::set_target_external_reference(
139     Address target, ICacheFlushMode icache_flush_mode) {
140   DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
141   Assembler::set_target_address_at(pc_, constant_pool_, target,
142                                    icache_flush_mode);
143 }
144 
target_internal_reference()145 Address RelocInfo::target_internal_reference() {
146   DCHECK(rmode_ == INTERNAL_REFERENCE);
147   return Memory<Address>(pc_);
148 }
149 
target_internal_reference_address()150 Address RelocInfo::target_internal_reference_address() {
151   DCHECK(rmode_ == INTERNAL_REFERENCE);
152   return pc_;
153 }
154 
target_runtime_entry(Assembler* origin)155 Address RelocInfo::target_runtime_entry(Assembler* origin) {
156   DCHECK(IsRuntimeEntry(rmode_));
157   return target_address();
158 }
159 
set_target_runtime_entry(Address target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode)160 void RelocInfo::set_target_runtime_entry(Address target,
161                                          WriteBarrierMode write_barrier_mode,
162                                          ICacheFlushMode icache_flush_mode) {
163   DCHECK(IsRuntimeEntry(rmode_));
164   if (target_address() != target)
165     set_target_address(target, write_barrier_mode, icache_flush_mode);
166 }
167 
target_off_heap_target()168 Address RelocInfo::target_off_heap_target() {
169   DCHECK(IsOffHeapTarget(rmode_));
170   return Assembler::target_address_at(pc_, constant_pool_);
171 }
172 
WipeOut()173 void RelocInfo::WipeOut() {
174   DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
175          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
176          IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
177   if (IsInternalReference(rmode_)) {
178     Memory<Address>(pc_) = kNullAddress;
179   } else {
180     Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
181   }
182 }
183 
relative_code_target_object_handle_at( Address pc) const184 Handle<Code> Assembler::relative_code_target_object_handle_at(
185     Address pc) const {
186   Instruction* branch = Instruction::At(pc);
187   int code_target_index = branch->GetBranchOffset() / kInstrSize;
188   return GetCodeTarget(code_target_index);
189 }
190 
Zero()191 Operand Operand::Zero() { return Operand(static_cast<int32_t>(0)); }
192 
Operand(const ExternalReference& f)193 Operand::Operand(const ExternalReference& f)
194     : rmode_(RelocInfo::EXTERNAL_REFERENCE) {
195   value_.immediate = static_cast<int32_t>(f.address());
196 }
197 
Operand(Smi value)198 Operand::Operand(Smi value) : rmode_(RelocInfo::NO_INFO) {
199   value_.immediate = static_cast<intptr_t>(value.ptr());
200 }
201 
Operand(Register rm)202 Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
203 
CheckBuffer()204 void Assembler::CheckBuffer() {
205   if (V8_UNLIKELY(buffer_space() <= kGap)) {
206     GrowBuffer();
207   }
208   MaybeCheckConstPool();
209 }
210 
emit(Instr x)211 void Assembler::emit(Instr x) {
212   CheckBuffer();
213   *reinterpret_cast<Instr*>(pc_) = x;
214   pc_ += kInstrSize;
215 }
216 
deserialization_set_special_target_at( Address constant_pool_entry, Code code, Address target)217 void Assembler::deserialization_set_special_target_at(
218     Address constant_pool_entry, Code code, Address target) {
219   DCHECK(!Builtins::IsIsolateIndependentBuiltin(code));
220   Memory<Address>(constant_pool_entry) = target;
221 }
222 
deserialization_special_target_size(Address location)223 int Assembler::deserialization_special_target_size(Address location) {
224   return kSpecialTargetSize;
225 }
226 
deserialization_set_target_internal_reference_at( Address pc, Address target, RelocInfo::Mode mode)227 void Assembler::deserialization_set_target_internal_reference_at(
228     Address pc, Address target, RelocInfo::Mode mode) {
229   Memory<Address>(pc) = target;
230 }
231 
is_constant_pool_load(Address pc)232 bool Assembler::is_constant_pool_load(Address pc) {
233   return IsLdrPcImmediateOffset(Memory<int32_t>(pc));
234 }
235 
constant_pool_entry_address(Address pc, Address constant_pool)236 Address Assembler::constant_pool_entry_address(Address pc,
237                                                Address constant_pool) {
238   DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc)));
239   Instr instr = Memory<int32_t>(pc);
240   return pc + GetLdrRegisterImmediateOffset(instr) + Instruction::kPcLoadDelta;
241 }
242 
target_address_at(Address pc, Address constant_pool)243 Address Assembler::target_address_at(Address pc, Address constant_pool) {
244   if (is_constant_pool_load(pc)) {
245     // This is a constant pool lookup. Return the value in the constant pool.
246     return Memory<Address>(constant_pool_entry_address(pc, constant_pool));
247   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
248     // This is an movw / movt immediate load. Return the immediate.
249     DCHECK(IsMovW(Memory<int32_t>(pc)) &&
250            IsMovT(Memory<int32_t>(pc + kInstrSize)));
251     Instruction* movw_instr = Instruction::At(pc);
252     Instruction* movt_instr = Instruction::At(pc + kInstrSize);
253     return static_cast<Address>((movt_instr->ImmedMovwMovtValue() << 16) |
254                                 movw_instr->ImmedMovwMovtValue());
255   } else if (IsMovImmed(Memory<int32_t>(pc))) {
256     // This is an mov / orr immediate load. Return the immediate.
257     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
258            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
259            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
260            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
261     Instr mov_instr = instr_at(pc);
262     Instr orr_instr_1 = instr_at(pc + kInstrSize);
263     Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
264     Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
265     Address ret = static_cast<Address>(
266         DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
267         DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
268     return ret;
269   } else {
270     Instruction* branch = Instruction::At(pc);
271     int32_t delta = branch->GetBranchOffset();
272     return pc + delta + Instruction::kPcLoadDelta;
273   }
274 }
275 
set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode)276 void Assembler::set_target_address_at(Address pc, Address constant_pool,
277                                       Address target,
278                                       ICacheFlushMode icache_flush_mode) {
279   if (is_constant_pool_load(pc)) {
280     // This is a constant pool lookup. Update the entry in the constant pool.
281     Memory<Address>(constant_pool_entry_address(pc, constant_pool)) = target;
282     // Intuitively, we would think it is necessary to always flush the
283     // instruction cache after patching a target address in the code as follows:
284     //   FlushInstructionCache(pc, sizeof(target));
285     // However, on ARM, no instruction is actually patched in the case
286     // of embedded constants of the form:
287     // ldr   ip, [pp, #...]
288     // since the instruction accessing this address in the constant pool remains
289     // unchanged.
290   } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
291     // This is an movw / movt immediate load. Patch the immediate embedded in
292     // the instructions.
293     DCHECK(IsMovW(Memory<int32_t>(pc)));
294     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
295     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
296     uint32_t immediate = static_cast<uint32_t>(target);
297     instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
298     instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
299     DCHECK(IsMovW(Memory<int32_t>(pc)));
300     DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
301     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
302       FlushInstructionCache(pc, 2 * kInstrSize);
303     }
304   } else if (IsMovImmed(Memory<int32_t>(pc))) {
305     // This is an mov / orr immediate load. Patch the immediate embedded in
306     // the instructions.
307     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
308            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
309            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
310            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
311     uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
312     uint32_t immediate = static_cast<uint32_t>(target);
313     instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
314     instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
315     instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
316     instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
317     DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
318            IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
319            IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
320            IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
321     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
322       FlushInstructionCache(pc, 4 * kInstrSize);
323     }
324   } else {
325     intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta;
326     Instruction* branch = Instruction::At(pc);
327     branch->SetBranchOffset(branch_offset);
328     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
329       FlushInstructionCache(pc, kInstrSize);
330     }
331   }
332 }
333 
EnsureSpace(Assembler* assembler)334 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
335 
336 template <typename T>
CanAcquireVfp() const337 bool UseScratchRegisterScope::CanAcquireVfp() const {
338   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
339   DCHECK_NOT_NULL(available);
340   for (int index = 0; index < T::kNumRegisters; index++) {
341     T reg = T::from_code(index);
342     uint64_t mask = reg.ToVfpRegList();
343     if ((*available & mask) == mask) {
344       return true;
345     }
346   }
347   return false;
348 }
349 
350 template <typename T>
AcquireVfp()351 T UseScratchRegisterScope::AcquireVfp() {
352   VfpRegList* available = assembler_->GetScratchVfpRegisterList();
353   DCHECK_NOT_NULL(available);
354   for (int index = 0; index < T::kNumRegisters; index++) {
355     T reg = T::from_code(index);
356     uint64_t mask = reg.ToVfpRegList();
357     if ((*available & mask) == mask) {
358       *available &= ~mask;
359       return reg;
360     }
361   }
362   UNREACHABLE();
363 }
364 
365 }  // namespace internal
366 }  // namespace v8
367 
368 #endif  // V8_CODEGEN_ARM_ASSEMBLER_ARM_INL_H_
369