1
2// Copyright (c) 1994-2006 Sun Microsystems Inc.
3// All Rights Reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// - Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11//
12// - Redistribution in binary form must reproduce the above copyright
13// notice, this list of conditions and the following disclaimer in the
14// documentation and/or other materials provided with the distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32// The original source code covered by the above license above has been
33// modified significantly by Google Inc.
34// Copyright 2012 the V8 project authors. All rights reserved.
35
36#ifndef V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
37#define V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
38
39#include "src/codegen/mips64/assembler-mips64.h"
40
41#include "src/codegen/assembler.h"
42#include "src/debug/debug.h"
43#include "src/objects/objects-inl.h"
44
45namespace v8 {
46namespace internal {
47
48bool CpuFeatures::SupportsOptimizer() { return IsSupported(FPU); }
49
50// -----------------------------------------------------------------------------
51// Operand and MemOperand.
52
53bool Operand::is_reg() const { return rm_.is_valid(); }
54
55int64_t Operand::immediate() const {
56  DCHECK(!is_reg());
57  DCHECK(!IsHeapObjectRequest());
58  return value_.immediate;
59}
60
61// -----------------------------------------------------------------------------
62// RelocInfo.
63
64void RelocInfo::apply(intptr_t delta) {
65  if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
66    // Absolute code pointer inside code object moves with the code object.
67    Assembler::RelocateInternalReference(rmode_, pc_, delta);
68  }
69}
70
71Address RelocInfo::target_address() {
72  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
73  return Assembler::target_address_at(pc_, constant_pool_);
74}
75
76Address RelocInfo::target_address_address() {
77  DCHECK(HasTargetAddressAddress());
78  // Read the address of the word containing the target_address in an
79  // instruction stream.
80  // The only architecture-independent user of this function is the serializer.
81  // The serializer uses it to find out how many raw bytes of instruction to
82  // output before the next target.
83  // For an instruction like LUI/ORI where the target bits are mixed into the
84  // instruction bits, the size of the target will be zero, indicating that the
85  // serializer should not step forward in memory after a target is resolved
86  // and written. In this case the target_address_address function should
87  // return the end of the instructions to be patched, allowing the
88  // deserializer to deserialize the instructions as raw bytes and put them in
89  // place, ready to be patched with the target. After jump optimization,
90  // that is the address of the instruction that follows J/JAL/JR/JALR
91  // instruction.
92  return pc_ + Assembler::kInstructionsFor64BitConstant * kInstrSize;
93}
94
95Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); }
96
97int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
98
99void Assembler::deserialization_set_special_target_at(
100    Address instruction_payload, Code code, Address target) {
101  set_target_address_at(instruction_payload,
102                        !code.is_null() ? code.constant_pool() : kNullAddress,
103                        target);
104}
105
106int Assembler::deserialization_special_target_size(
107    Address instruction_payload) {
108  return kSpecialTargetSize;
109}
110
111void Assembler::set_target_internal_reference_encoded_at(Address pc,
112                                                         Address target) {
113  // Encoded internal references are j/jal instructions.
114  Instr instr = Assembler::instr_at(pc + 0 * kInstrSize);
115
116  uint64_t imm28 = target & static_cast<uint64_t>(kImm28Mask);
117
118  instr &= ~kImm26Mask;
119  uint64_t imm26 = imm28 >> 2;
120  DCHECK(is_uint26(imm26));
121
122  instr_at_put(pc, instr | (imm26 & kImm26Mask));
123  // Currently used only by deserializer, and all code will be flushed
124  // after complete deserialization, no need to flush on each reference.
125}
126
127void Assembler::deserialization_set_target_internal_reference_at(
128    Address pc, Address target, RelocInfo::Mode mode) {
129  if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
130    DCHECK(IsJ(instr_at(pc)));
131    set_target_internal_reference_encoded_at(pc, target);
132  } else {
133    DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
134    Memory<Address>(pc) = target;
135  }
136}
137
138HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) {
139  DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
140         IsDataEmbeddedObject(rmode_));
141  if (IsDataEmbeddedObject(rmode_)) {
142    return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
143  }
144  return HeapObject::cast(
145      Object(Assembler::target_address_at(pc_, constant_pool_)));
146}
147
148Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
149  if (IsDataEmbeddedObject(rmode_)) {
150    return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
151  } else {
152    DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
153    return Handle<HeapObject>(reinterpret_cast<Address*>(
154        Assembler::target_address_at(pc_, constant_pool_)));
155  }
156}
157
158void RelocInfo::set_target_object(Heap* heap, HeapObject target,
159                                  WriteBarrierMode write_barrier_mode,
160                                  ICacheFlushMode icache_flush_mode) {
161  DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) ||
162         IsDataEmbeddedObject(rmode_));
163  if (IsDataEmbeddedObject(rmode_)) {
164    WriteUnalignedValue(pc_, target.ptr());
165    // No need to flush icache since no instructions were changed.
166  } else {
167    Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
168                                     icache_flush_mode);
169  }
170  if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
171      !FLAG_disable_write_barriers) {
172    WriteBarrierForCode(host(), this, target);
173  }
174}
175
176Address RelocInfo::target_external_reference() {
177  DCHECK(rmode_ == EXTERNAL_REFERENCE);
178  return Assembler::target_address_at(pc_, constant_pool_);
179}
180
181void RelocInfo::set_target_external_reference(
182    Address target, ICacheFlushMode icache_flush_mode) {
183  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
184  Assembler::set_target_address_at(pc_, constant_pool_, target,
185                                   icache_flush_mode);
186}
187
188Address RelocInfo::target_internal_reference() {
189  if (rmode_ == INTERNAL_REFERENCE) {
190    return Memory<Address>(pc_);
191  } else {
192    // Encoded internal references are j/jal instructions.
193    DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
194    Instr instr = Assembler::instr_at(pc_ + 0 * kInstrSize);
195    instr &= kImm26Mask;
196    uint64_t imm28 = instr << 2;
197    uint64_t segment = pc_ & ~static_cast<uint64_t>(kImm28Mask);
198    return static_cast<Address>(segment | imm28);
199  }
200}
201
202Address RelocInfo::target_internal_reference_address() {
203  DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
204  return pc_;
205}
206
207Address RelocInfo::target_runtime_entry(Assembler* origin) {
208  DCHECK(IsRuntimeEntry(rmode_));
209  return target_address();
210}
211
212void RelocInfo::set_target_runtime_entry(Address target,
213                                         WriteBarrierMode write_barrier_mode,
214                                         ICacheFlushMode icache_flush_mode) {
215  DCHECK(IsRuntimeEntry(rmode_));
216  if (target_address() != target)
217    set_target_address(target, write_barrier_mode, icache_flush_mode);
218}
219
220Address RelocInfo::target_off_heap_target() {
221  DCHECK(IsOffHeapTarget(rmode_));
222  return Assembler::target_address_at(pc_, constant_pool_);
223}
224
225void RelocInfo::WipeOut() {
226  DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
227         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
228         IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) ||
229         IsOffHeapTarget(rmode_));
230  if (IsInternalReference(rmode_)) {
231    Memory<Address>(pc_) = kNullAddress;
232  } else if (IsInternalReferenceEncoded(rmode_)) {
233    Assembler::set_target_internal_reference_encoded_at(pc_, kNullAddress);
234  } else {
235    Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
236  }
237}
238
239// -----------------------------------------------------------------------------
240// Assembler.
241
242void Assembler::CheckBuffer() {
243  if (buffer_space() <= kGap) {
244    GrowBuffer();
245  }
246}
247
248void Assembler::CheckForEmitInForbiddenSlot() {
249  if (!is_buffer_growth_blocked()) {
250    CheckBuffer();
251  }
252  if (IsPrevInstrCompactBranch()) {
253    // Nop instruction to precede a CTI in forbidden slot:
254    Instr nop = SPECIAL | SLL;
255    *reinterpret_cast<Instr*>(pc_) = nop;
256    pc_ += kInstrSize;
257
258    ClearCompactBranchState();
259  }
260}
261
262void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
263  if (IsPrevInstrCompactBranch()) {
264    if (Instruction::IsForbiddenAfterBranchInstr(x)) {
265      // Nop instruction to precede a CTI in forbidden slot:
266      Instr nop = SPECIAL | SLL;
267      *reinterpret_cast<Instr*>(pc_) = nop;
268      pc_ += kInstrSize;
269    }
270    ClearCompactBranchState();
271  }
272  *reinterpret_cast<Instr*>(pc_) = x;
273  pc_ += kInstrSize;
274  if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
275    EmittedCompactBranchInstruction();
276  }
277  CheckTrampolinePoolQuick();
278}
279
280template <>
281inline void Assembler::EmitHelper(uint8_t x);
282
283template <typename T>
284void Assembler::EmitHelper(T x) {
285  *reinterpret_cast<T*>(pc_) = x;
286  pc_ += sizeof(x);
287  CheckTrampolinePoolQuick();
288}
289
290template <>
291void Assembler::EmitHelper(uint8_t x) {
292  *reinterpret_cast<uint8_t*>(pc_) = x;
293  pc_ += sizeof(x);
294  if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
295    CheckTrampolinePoolQuick();
296  }
297}
298
299void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
300  if (!is_buffer_growth_blocked()) {
301    CheckBuffer();
302  }
303  EmitHelper(x, is_compact_branch);
304}
305
306void Assembler::emit(uint64_t data) {
307  CheckForEmitInForbiddenSlot();
308  EmitHelper(data);
309}
310
311EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
312
313}  // namespace internal
314}  // namespace v8
315
316#endif  // V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
317