1 2// Copyright (c) 1994-2006 Sun Microsystems Inc. 3// All Rights Reserved. 4// 5// Redistribution and use in source and binary forms, with or without 6// modification, are permitted provided that the following conditions are 7// met: 8// 9// - Redistributions of source code must retain the above copyright notice, 10// this list of conditions and the following disclaimer. 11// 12// - Redistribution in binary form must reproduce the above copyright 13// notice, this list of conditions and the following disclaimer in the 14// documentation and/or other materials provided with the distribution. 15// 16// - Neither the name of Sun Microsystems or the names of contributors may 17// be used to endorse or promote products derived from this software without 18// specific prior written permission. 19// 20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 21// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 22// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 23// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 24// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 25// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 26// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 27// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 28// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 29// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 30// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 32// The original source code covered by the above license above has been 33// modified significantly by Google Inc. 34// Copyright 2012 the V8 project authors. All rights reserved. 35 36#ifndef V8_CODEGEN_MIPS_ASSEMBLER_MIPS_INL_H_ 37#define V8_CODEGEN_MIPS_ASSEMBLER_MIPS_INL_H_ 38 39#include "src/codegen/mips/assembler-mips.h" 40 41#include "src/codegen/assembler.h" 42#include "src/debug/debug.h" 43#include "src/objects/objects-inl.h" 44 45namespace v8 { 46namespace internal { 47 48bool CpuFeatures::SupportsOptimizer() { return IsSupported(FPU); } 49 50// ----------------------------------------------------------------------------- 51// Operand and MemOperand. 52 53bool Operand::is_reg() const { return rm_.is_valid(); } 54 55int32_t Operand::immediate() const { 56 DCHECK(!is_reg()); 57 DCHECK(!IsHeapObjectRequest()); 58 return value_.immediate; 59} 60 61// ----------------------------------------------------------------------------- 62// RelocInfo. 63 64void RelocInfo::apply(intptr_t delta) { 65 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) { 66 // Absolute code pointer inside code object moves with the code object. 67 Assembler::RelocateInternalReference(rmode_, pc_, delta); 68 } else if (IsRelativeCodeTarget(rmode_)) { 69 Assembler::RelocateRelativeReference(rmode_, pc_, delta); 70 } 71} 72 73Address RelocInfo::target_address() { 74 DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) || 75 IsWasmCall(rmode_)); 76 return Assembler::target_address_at(pc_, constant_pool_); 77} 78 79Address RelocInfo::target_address_address() { 80 DCHECK(HasTargetAddressAddress()); 81 // Read the address of the word containing the target_address in an 82 // instruction stream. 83 // The only architecture-independent user of this function is the serializer. 84 // The serializer uses it to find out how many raw bytes of instruction to 85 // output before the next target. 86 // For an instruction like LUI/ORI where the target bits are mixed into the 87 // instruction bits, the size of the target will be zero, indicating that the 88 // serializer should not step forward in memory after a target is resolved 89 // and written. In this case the target_address_address function should 90 // return the end of the instructions to be patched, allowing the 91 // deserializer to deserialize the instructions as raw bytes and put them in 92 // place, ready to be patched with the target. After jump optimization, 93 // that is the address of the instruction that follows J/JAL/JR/JALR 94 // instruction. 95 if (IsMipsArchVariant(kMips32r6)) { 96 // On R6 we don't move to the end of the instructions to be patched, but one 97 // instruction before, because if these instructions are at the end of the 98 // code object it can cause errors in the deserializer. 99 return pc_ + (Assembler::kInstructionsFor32BitConstant - 1) * kInstrSize; 100 } else { 101 return pc_ + Assembler::kInstructionsFor32BitConstant * kInstrSize; 102 } 103} 104 105Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); } 106 107int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; } 108 109void Assembler::deserialization_set_special_target_at( 110 Address instruction_payload, Code code, Address target) { 111 set_target_address_at(instruction_payload, 112 !code.is_null() ? code.constant_pool() : kNullAddress, 113 target); 114} 115 116int Assembler::deserialization_special_target_size( 117 Address instruction_payload) { 118 return kSpecialTargetSize; 119} 120 121void Assembler::set_target_internal_reference_encoded_at(Address pc, 122 Address target) { 123 Instr instr1 = Assembler::instr_at(pc + 0 * kInstrSize); 124 Instr instr2 = Assembler::instr_at(pc + 1 * kInstrSize); 125 DCHECK(Assembler::IsLui(instr1)); 126 DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2)); 127 instr1 &= ~kImm16Mask; 128 instr2 &= ~kImm16Mask; 129 int32_t imm = static_cast<int32_t>(target); 130 DCHECK_EQ(imm & 3, 0); 131 if (Assembler::IsJicOrJialc(instr2)) { 132 // Encoded internal references are lui/jic load of 32-bit absolute address. 133 uint32_t lui_offset_u, jic_offset_u; 134 Assembler::UnpackTargetAddressUnsigned(imm, &lui_offset_u, &jic_offset_u); 135 136 Assembler::instr_at_put(pc + 0 * kInstrSize, instr1 | lui_offset_u); 137 Assembler::instr_at_put(pc + 1 * kInstrSize, instr2 | jic_offset_u); 138 } else { 139 // Encoded internal references are lui/ori load of 32-bit absolute address. 140 PatchLuiOriImmediate(pc, imm, instr1, 0 * kInstrSize, instr2, 141 1 * kInstrSize); 142 } 143 144 // Currently used only by deserializer, and all code will be flushed 145 // after complete deserialization, no need to flush on each reference. 146} 147 148void Assembler::deserialization_set_target_internal_reference_at( 149 Address pc, Address target, RelocInfo::Mode mode) { 150 if (RelocInfo::IsInternalReferenceEncoded(mode)) { 151 DCHECK(IsLui(instr_at(pc))); 152 set_target_internal_reference_encoded_at(pc, target); 153 } else { 154 DCHECK(RelocInfo::IsInternalReference(mode)); 155 Memory<Address>(pc) = target; 156 } 157} 158 159HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { 160 DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || 161 IsDataEmbeddedObject(rmode_)); 162 if (IsDataEmbeddedObject(rmode_)) { 163 return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); 164 } 165 return HeapObject::cast( 166 Object(Assembler::target_address_at(pc_, constant_pool_))); 167} 168 169Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { 170 if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) { 171 return Handle<HeapObject>(reinterpret_cast<Address*>( 172 Assembler::target_address_at(pc_, constant_pool_))); 173 } else if (IsDataEmbeddedObject(rmode_)) { 174 return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); 175 } 176 DCHECK(IsRelativeCodeTarget(rmode_)); 177 return origin->relative_code_target_object_handle_at(pc_); 178} 179 180void RelocInfo::set_target_object(Heap* heap, HeapObject target, 181 WriteBarrierMode write_barrier_mode, 182 ICacheFlushMode icache_flush_mode) { 183 DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || 184 IsDataEmbeddedObject(rmode_)); 185 if (IsDataEmbeddedObject(rmode_)) { 186 WriteUnalignedValue(pc_, target.ptr()); 187 // No need to flush icache since no instructions were changed. 188 } else { 189 Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), 190 icache_flush_mode); 191 } 192 if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() && 193 !FLAG_disable_write_barriers) { 194 WriteBarrierForCode(host(), this, target); 195 } 196} 197 198Address RelocInfo::target_external_reference() { 199 DCHECK(IsExternalReference(rmode_)); 200 return Assembler::target_address_at(pc_, constant_pool_); 201} 202 203void RelocInfo::set_target_external_reference( 204 Address target, ICacheFlushMode icache_flush_mode) { 205 DCHECK(IsExternalReference(rmode_)); 206 Assembler::set_target_address_at(pc_, constant_pool_, target, 207 icache_flush_mode); 208} 209 210Address RelocInfo::target_internal_reference() { 211 if (IsInternalReference(rmode_)) { 212 return Memory<Address>(pc_); 213 } else { 214 // Encoded internal references are lui/ori or lui/jic load of 32-bit 215 // absolute address. 216 DCHECK(IsInternalReferenceEncoded(rmode_)); 217 Instr instr1 = Assembler::instr_at(pc_ + 0 * kInstrSize); 218 Instr instr2 = Assembler::instr_at(pc_ + 1 * kInstrSize); 219 DCHECK(Assembler::IsLui(instr1)); 220 DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2)); 221 if (Assembler::IsJicOrJialc(instr2)) { 222 return static_cast<Address>( 223 Assembler::CreateTargetAddress(instr1, instr2)); 224 } 225 return static_cast<Address>(Assembler::GetLuiOriImmediate(instr1, instr2)); 226 } 227} 228 229Address RelocInfo::target_internal_reference_address() { 230 DCHECK(IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)); 231 return pc_; 232} 233 234Address RelocInfo::target_runtime_entry(Assembler* origin) { 235 DCHECK(IsRuntimeEntry(rmode_)); 236 return target_address(); 237} 238 239void RelocInfo::set_target_runtime_entry(Address target, 240 WriteBarrierMode write_barrier_mode, 241 ICacheFlushMode icache_flush_mode) { 242 DCHECK(IsRuntimeEntry(rmode_)); 243 if (target_address() != target) 244 set_target_address(target, write_barrier_mode, icache_flush_mode); 245} 246 247Address RelocInfo::target_off_heap_target() { 248 DCHECK(IsOffHeapTarget(rmode_)); 249 return Assembler::target_address_at(pc_, constant_pool_); 250} 251 252void RelocInfo::WipeOut() { 253 DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || 254 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || 255 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) || 256 IsOffHeapTarget(rmode_)); 257 if (IsInternalReference(rmode_)) { 258 Memory<Address>(pc_) = kNullAddress; 259 } else if (IsInternalReferenceEncoded(rmode_)) { 260 Assembler::set_target_internal_reference_encoded_at(pc_, kNullAddress); 261 } else { 262 Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress); 263 } 264} 265 266Handle<Code> Assembler::relative_code_target_object_handle_at( 267 Address pc) const { 268 Instr instr1 = instr_at(pc); 269 Instr instr2 = instr_at(pc + kInstrSize); 270 DCHECK(IsLui(instr1)); 271 DCHECK(IsOri(instr2) || IsNal(instr2)); 272 DCHECK(IsNal(instr2) || IsNal(instr_at(pc - kInstrSize))); 273 if (IsNal(instr2)) { 274 instr2 = instr_at(pc + 2 * kInstrSize); 275 } 276 // Interpret 2 instructions generated by li (lui/ori). 277 int code_target_index = GetLuiOriImmediate(instr1, instr2); 278 return GetCodeTarget(code_target_index); 279} 280 281// ----------------------------------------------------------------------------- 282// Assembler. 283 284void Assembler::CheckBuffer() { 285 if (buffer_space() <= kGap) { 286 GrowBuffer(); 287 } 288} 289 290void Assembler::CheckForEmitInForbiddenSlot() { 291 if (!is_buffer_growth_blocked()) { 292 CheckBuffer(); 293 } 294 if (IsPrevInstrCompactBranch()) { 295 // Nop instruction to precede a CTI in forbidden slot: 296 Instr nop = SPECIAL | SLL; 297 *reinterpret_cast<Instr*>(pc_) = nop; 298 pc_ += kInstrSize; 299 300 ClearCompactBranchState(); 301 } 302} 303 304void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) { 305 if (IsPrevInstrCompactBranch()) { 306 if (Instruction::IsForbiddenAfterBranchInstr(x)) { 307 // Nop instruction to precede a CTI in forbidden slot: 308 Instr nop = SPECIAL | SLL; 309 *reinterpret_cast<Instr*>(pc_) = nop; 310 pc_ += kInstrSize; 311 } 312 ClearCompactBranchState(); 313 } 314 *reinterpret_cast<Instr*>(pc_) = x; 315 pc_ += kInstrSize; 316 if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) { 317 EmittedCompactBranchInstruction(); 318 } 319 CheckTrampolinePoolQuick(); 320} 321 322template <> 323inline void Assembler::EmitHelper(uint8_t x); 324 325template <typename T> 326void Assembler::EmitHelper(T x) { 327 *reinterpret_cast<T*>(pc_) = x; 328 pc_ += sizeof(x); 329 CheckTrampolinePoolQuick(); 330} 331 332template <> 333void Assembler::EmitHelper(uint8_t x) { 334 *reinterpret_cast<uint8_t*>(pc_) = x; 335 pc_ += sizeof(x); 336 if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) { 337 CheckTrampolinePoolQuick(); 338 } 339} 340 341void Assembler::emit(Instr x, CompactBranchType is_compact_branch) { 342 if (!is_buffer_growth_blocked()) { 343 CheckBuffer(); 344 } 345 EmitHelper(x, is_compact_branch); 346} 347 348EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); } 349 350} // namespace internal 351} // namespace v8 352 353#endif // V8_CODEGEN_MIPS_ASSEMBLER_MIPS_INL_H_ 354