1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
6#define V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
7
8#include "src/codegen/x64/assembler-x64.h"
9
10#include "src/base/cpu.h"
11#include "src/base/memory.h"
12#include "src/debug/debug.h"
13#include "src/objects/objects-inl.h"
14
15namespace v8 {
16namespace internal {
17
18bool CpuFeatures::SupportsOptimizer() { return true; }
19
20// -----------------------------------------------------------------------------
21// Implementation of Assembler
22
23void Assembler::emitl(uint32_t x) {
24  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
25  pc_ += sizeof(uint32_t);
26}
27
28void Assembler::emitq(uint64_t x) {
29  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
30  pc_ += sizeof(uint64_t);
31}
32
33void Assembler::emitw(uint16_t x) {
34  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
35  pc_ += sizeof(uint16_t);
36}
37
38// TODO(ishell): Rename accordingly once RUNTIME_ENTRY is renamed.
39void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
40  DCHECK(RelocInfo::IsRuntimeEntry(rmode));
41  DCHECK_NE(options().code_range_base, 0);
42  RecordRelocInfo(rmode);
43  uint32_t offset = static_cast<uint32_t>(entry - options().code_range_base);
44  emitl(offset);
45}
46
47void Assembler::emit(Immediate x) {
48  if (!RelocInfo::IsNoInfo(x.rmode_)) {
49    RecordRelocInfo(x.rmode_);
50  }
51  emitl(x.value_);
52}
53
54void Assembler::emit(Immediate64 x) {
55  if (!RelocInfo::IsNoInfo(x.rmode_)) {
56    RecordRelocInfo(x.rmode_);
57  }
58  emitq(static_cast<uint64_t>(x.value_));
59}
60
61void Assembler::emit_rex_64(Register reg, Register rm_reg) {
62  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
63}
64
65void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
66  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
67}
68
69void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
70  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
71}
72
73void Assembler::emit_rex_64(XMMRegister reg, XMMRegister rm_reg) {
74  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
75}
76
77void Assembler::emit_rex_64(Register reg, Operand op) {
78  emit(0x48 | reg.high_bit() << 2 | op.data().rex);
79}
80
81void Assembler::emit_rex_64(XMMRegister reg, Operand op) {
82  emit(0x48 | (reg.code() & 0x8) >> 1 | op.data().rex);
83}
84
85void Assembler::emit_rex_64(Register rm_reg) {
86  DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
87  emit(0x48 | rm_reg.high_bit());
88}
89
90void Assembler::emit_rex_64(Operand op) { emit(0x48 | op.data().rex); }
91
92void Assembler::emit_rex_32(Register reg, Register rm_reg) {
93  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
94}
95
96void Assembler::emit_rex_32(Register reg, Operand op) {
97  emit(0x40 | reg.high_bit() << 2 | op.data().rex);
98}
99
100void Assembler::emit_rex_32(Register rm_reg) { emit(0x40 | rm_reg.high_bit()); }
101
102void Assembler::emit_rex_32(Operand op) { emit(0x40 | op.data().rex); }
103
104void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
105  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
106  if (rex_bits != 0) emit(0x40 | rex_bits);
107}
108
109void Assembler::emit_optional_rex_32(Register reg, Operand op) {
110  byte rex_bits = reg.high_bit() << 2 | op.data().rex;
111  if (rex_bits != 0) emit(0x40 | rex_bits);
112}
113
114void Assembler::emit_optional_rex_32(XMMRegister reg, Operand op) {
115  byte rex_bits = (reg.code() & 0x8) >> 1 | op.data().rex;
116  if (rex_bits != 0) emit(0x40 | rex_bits);
117}
118
119void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
120  byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
121  if (rex_bits != 0) emit(0x40 | rex_bits);
122}
123
124void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
125  byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
126  if (rex_bits != 0) emit(0x40 | rex_bits);
127}
128
129void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
130  byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
131  if (rex_bits != 0) emit(0x40 | rex_bits);
132}
133
134void Assembler::emit_optional_rex_32(Register rm_reg) {
135  if (rm_reg.high_bit()) emit(0x41);
136}
137
138void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
139  if (rm_reg.high_bit()) emit(0x41);
140}
141
142void Assembler::emit_optional_rex_32(Operand op) {
143  if (op.data().rex != 0) emit(0x40 | op.data().rex);
144}
145
146void Assembler::emit_optional_rex_8(Register reg) {
147  if (!reg.is_byte_register()) {
148    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
149    emit_rex_32(reg);
150  }
151}
152
153void Assembler::emit_optional_rex_8(Register reg, Operand op) {
154  if (!reg.is_byte_register()) {
155    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
156    emit_rex_32(reg, op);
157  } else {
158    emit_optional_rex_32(reg, op);
159  }
160}
161
162// byte 1 of 3-byte VEX
163void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
164                                LeadingOpcode m) {
165  byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.high_bit())) << 5;
166  emit(rxb | m);
167}
168
169// byte 1 of 3-byte VEX
170void Assembler::emit_vex3_byte1(XMMRegister reg, Operand rm, LeadingOpcode m) {
171  byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.data().rex)) << 5;
172  emit(rxb | m);
173}
174
175// byte 1 of 2-byte VEX
176void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
177                                SIMDPrefix pp) {
178  byte rv = static_cast<byte>(~((reg.high_bit() << 4) | v.code())) << 3;
179  emit(rv | l | pp);
180}
181
182// byte 2 of 3-byte VEX
183void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
184                                SIMDPrefix pp) {
185  emit(w | ((~v.code() & 0xf) << 3) | l | pp);
186}
187
188void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
189                                XMMRegister rm, VectorLength l, SIMDPrefix pp,
190                                LeadingOpcode mm, VexW w) {
191  if (rm.high_bit() || mm != k0F || w != kW0) {
192    emit_vex3_byte0();
193    emit_vex3_byte1(reg, rm, mm);
194    emit_vex3_byte2(w, vreg, l, pp);
195  } else {
196    emit_vex2_byte0();
197    emit_vex2_byte1(reg, vreg, l, pp);
198  }
199}
200
201void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
202                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
203                                VexW w) {
204  XMMRegister ireg = XMMRegister::from_code(reg.code());
205  XMMRegister ivreg = XMMRegister::from_code(vreg.code());
206  XMMRegister irm = XMMRegister::from_code(rm.code());
207  emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
208}
209
210void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg, Operand rm,
211                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
212                                VexW w) {
213  if (rm.data().rex || mm != k0F || w != kW0) {
214    emit_vex3_byte0();
215    emit_vex3_byte1(reg, rm, mm);
216    emit_vex3_byte2(w, vreg, l, pp);
217  } else {
218    emit_vex2_byte0();
219    emit_vex2_byte1(reg, vreg, l, pp);
220  }
221}
222
223void Assembler::emit_vex_prefix(Register reg, Register vreg, Operand rm,
224                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
225                                VexW w) {
226  XMMRegister ireg = XMMRegister::from_code(reg.code());
227  XMMRegister ivreg = XMMRegister::from_code(vreg.code());
228  emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
229}
230
231Address Assembler::target_address_at(Address pc, Address constant_pool) {
232  return ReadUnalignedValue<int32_t>(pc) + pc + 4;
233}
234
235void Assembler::set_target_address_at(Address pc, Address constant_pool,
236                                      Address target,
237                                      ICacheFlushMode icache_flush_mode) {
238  WriteUnalignedValue(pc, relative_target_offset(target, pc));
239  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
240    FlushInstructionCache(pc, sizeof(int32_t));
241  }
242}
243
244int32_t Assembler::relative_target_offset(Address target, Address pc) {
245  Address offset = target - pc - 4;
246  DCHECK(is_int32(offset));
247  return static_cast<int32_t>(offset);
248}
249
250void Assembler::deserialization_set_target_internal_reference_at(
251    Address pc, Address target, RelocInfo::Mode mode) {
252  WriteUnalignedValue(pc, target);
253}
254
255void Assembler::deserialization_set_special_target_at(
256    Address instruction_payload, Code code, Address target) {
257  set_target_address_at(instruction_payload,
258                        !code.is_null() ? code.constant_pool() : kNullAddress,
259                        target);
260}
261
262int Assembler::deserialization_special_target_size(
263    Address instruction_payload) {
264  return kSpecialTargetSize;
265}
266
267Handle<CodeT> Assembler::code_target_object_handle_at(Address pc) {
268  return GetCodeTarget(ReadUnalignedValue<int32_t>(pc));
269}
270
271Handle<HeapObject> Assembler::compressed_embedded_object_handle_at(Address pc) {
272  return GetEmbeddedObject(ReadUnalignedValue<uint32_t>(pc));
273}
274
275Address Assembler::runtime_entry_at(Address pc) {
276  return ReadUnalignedValue<int32_t>(pc) + options().code_range_base;
277}
278
279// -----------------------------------------------------------------------------
280// Implementation of RelocInfo
281
282// The modes possibly affected by apply must be in kApplyMask.
283void RelocInfo::apply(intptr_t delta) {
284  if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
285    WriteUnalignedValue(
286        pc_, ReadUnalignedValue<int32_t>(pc_) - static_cast<int32_t>(delta));
287  } else if (IsInternalReference(rmode_)) {
288    // Absolute code pointer inside code object moves with the code object.
289    WriteUnalignedValue(pc_, ReadUnalignedValue<Address>(pc_) + delta);
290  }
291}
292
293Address RelocInfo::target_address() {
294  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
295  return Assembler::target_address_at(pc_, constant_pool_);
296}
297
298Address RelocInfo::target_address_address() {
299  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
300         IsWasmStubCall(rmode_) || IsFullEmbeddedObject(rmode_) ||
301         IsCompressedEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
302         IsOffHeapTarget(rmode_));
303  return pc_;
304}
305
306Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); }
307
308int RelocInfo::target_address_size() {
309  if (IsCodedSpecially()) {
310    return Assembler::kSpecialTargetSize;
311  } else {
312    return IsCompressedEmbeddedObject(rmode_) ? kTaggedSize
313                                              : kSystemPointerSize;
314  }
315}
316
317HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) {
318  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
319  if (IsCompressedEmbeddedObject(rmode_)) {
320    Tagged_t compressed = ReadUnalignedValue<Tagged_t>(pc_);
321    DCHECK(!HAS_SMI_TAG(compressed));
322    Object obj(DecompressTaggedPointer(cage_base, compressed));
323    // Embedding of compressed Code objects must not happen when external code
324    // space is enabled, because CodeDataContainers must be used instead.
325    DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL,
326                   !IsCodeSpaceObject(HeapObject::cast(obj)));
327    return HeapObject::cast(obj);
328  }
329  DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_));
330  return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
331}
332
333Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
334  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
335  if (IsCodeTarget(rmode_)) {
336    return origin->code_target_object_handle_at(pc_);
337  } else {
338    if (IsCompressedEmbeddedObject(rmode_)) {
339      return origin->compressed_embedded_object_handle_at(pc_);
340    }
341    DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_));
342    return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
343  }
344}
345
346Address RelocInfo::target_external_reference() {
347  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
348  return ReadUnalignedValue<Address>(pc_);
349}
350
351void RelocInfo::set_target_external_reference(
352    Address target, ICacheFlushMode icache_flush_mode) {
353  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
354  WriteUnalignedValue(pc_, target);
355  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
356    FlushInstructionCache(pc_, sizeof(Address));
357  }
358}
359
360Address RelocInfo::target_internal_reference() {
361  DCHECK(rmode_ == INTERNAL_REFERENCE);
362  return ReadUnalignedValue<Address>(pc_);
363}
364
365Address RelocInfo::target_internal_reference_address() {
366  DCHECK(rmode_ == INTERNAL_REFERENCE);
367  return pc_;
368}
369
370void RelocInfo::set_target_object(Heap* heap, HeapObject target,
371                                  WriteBarrierMode write_barrier_mode,
372                                  ICacheFlushMode icache_flush_mode) {
373  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
374  if (IsCompressedEmbeddedObject(rmode_)) {
375    DCHECK(COMPRESS_POINTERS_BOOL);
376    Tagged_t tagged = CompressTagged(target.ptr());
377    WriteUnalignedValue(pc_, tagged);
378  } else {
379    DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_));
380    WriteUnalignedValue(pc_, target.ptr());
381  }
382  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
383    FlushInstructionCache(pc_, sizeof(Address));
384  }
385  if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
386      !FLAG_disable_write_barriers) {
387    WriteBarrierForCode(host(), this, target);
388  }
389}
390
391Address RelocInfo::target_runtime_entry(Assembler* origin) {
392  DCHECK(IsRuntimeEntry(rmode_));
393  return origin->runtime_entry_at(pc_);
394}
395
396void RelocInfo::set_target_runtime_entry(Address target,
397                                         WriteBarrierMode write_barrier_mode,
398                                         ICacheFlushMode icache_flush_mode) {
399  DCHECK(IsRuntimeEntry(rmode_));
400  if (target_address() != target) {
401    set_target_address(target, write_barrier_mode, icache_flush_mode);
402  }
403}
404
405Address RelocInfo::target_off_heap_target() {
406  DCHECK(IsOffHeapTarget(rmode_));
407  return ReadUnalignedValue<Address>(pc_);
408}
409
410void RelocInfo::WipeOut() {
411  if (IsFullEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
412      IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)) {
413    WriteUnalignedValue(pc_, kNullAddress);
414  } else if (IsCompressedEmbeddedObject(rmode_)) {
415    Address smi_address = Smi::FromInt(0).ptr();
416    WriteUnalignedValue(pc_, CompressTagged(smi_address));
417  } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
418    // Effectively write zero into the relocation.
419    Assembler::set_target_address_at(pc_, constant_pool_,
420                                     pc_ + sizeof(int32_t));
421  } else {
422    UNREACHABLE();
423  }
424}
425
426}  // namespace internal
427}  // namespace v8
428
429#endif  // V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
430