1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
6 #error This header must be included via macro-assembler.h
7 #endif
8
9 #ifndef V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
10 #define V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
11
12 #include "src/base/flags.h"
13 #include "src/codegen/bailout-reason.h"
14 #include "src/codegen/shared-ia32-x64/macro-assembler-shared-ia32-x64.h"
15 #include "src/codegen/x64/assembler-x64.h"
16 #include "src/common/globals.h"
17 #include "src/execution/isolate-data.h"
18 #include "src/objects/contexts.h"
19 #include "src/objects/tagged-index.h"
20
21 namespace v8 {
22 namespace internal {
23
24 // Convenience for platform-independent signatures.
25 using MemOperand = Operand;
26
27 class StringConstantBase;
28
29 struct SmiIndex {
SmiIndexv8::internal::SmiIndex30 SmiIndex(Register index_register, ScaleFactor scale)
31 : reg(index_register), scale(scale) {}
32 Register reg;
33 ScaleFactor scale;
34 };
35
36 // TODO(victorgomes): Move definition to macro-assembler.h, once all other
37 // platforms are updated.
38 enum class StackLimitKind { kInterruptStackLimit, kRealStackLimit };
39
40 // Convenient class to access arguments below the stack pointer.
41 class StackArgumentsAccessor {
42 public:
43 // argc = the number of arguments not including the receiver.
StackArgumentsAccessor(Register argc)44 explicit StackArgumentsAccessor(Register argc) : argc_(argc) {
45 DCHECK_NE(argc_, no_reg);
46 }
47
48 // Argument 0 is the receiver (despite argc not including the receiver).
operator [](int index) const49 Operand operator[](int index) const { return GetArgumentOperand(index); }
50
51 Operand GetArgumentOperand(int index) const;
GetReceiverOperand() const52 Operand GetReceiverOperand() const { return GetArgumentOperand(0); }
53
54 private:
55 const Register argc_;
56
57 DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor);
58 };
59
60 class V8_EXPORT_PRIVATE TurboAssembler
61 : public SharedTurboAssemblerBase<TurboAssembler> {
62 public:
63 using SharedTurboAssemblerBase<TurboAssembler>::SharedTurboAssemblerBase;
64
PushReturnAddressFrom(Register src)65 void PushReturnAddressFrom(Register src) { pushq(src); }
PopReturnAddressTo(Register dst)66 void PopReturnAddressTo(Register dst) { popq(dst); }
67
68 void Ret();
69
70 // Call incsspq with {number_of_words} only if the cpu supports it.
71 // NOTE: This shouldn't be embedded in optimized code, since the check
72 // for CPU support would be redundant (we could check at compiler time).
73 void IncsspqIfSupported(Register number_of_words, Register scratch);
74
75 // Return and drop arguments from stack, where the number of arguments
76 // may be bigger than 2^16 - 1. Requires a scratch register.
77 void Ret(int bytes_dropped, Register scratch);
78
79 // Operations on roots in the root-array.
80 Operand RootAsOperand(RootIndex index);
81 void LoadRoot(Register destination, RootIndex index) final;
LoadRoot(Operand destination, RootIndex index)82 void LoadRoot(Operand destination, RootIndex index) {
83 LoadRoot(kScratchRegister, index);
84 movq(destination, kScratchRegister);
85 }
86
87 void Push(Register src);
88 void Push(Operand src);
89 void Push(Immediate value);
90 void Push(Smi smi);
Push(TaggedIndex index)91 void Push(TaggedIndex index) {
92 Push(Immediate(static_cast<uint32_t>(index.ptr())));
93 }
94 void Push(Handle<HeapObject> source);
95
96 enum class PushArrayOrder { kNormal, kReverse };
97 // `array` points to the first element (the lowest address).
98 // `array` and `size` are not modified.
99 void PushArray(Register array, Register size, Register scratch,
100 PushArrayOrder order = PushArrayOrder::kNormal);
101
102 // Before calling a C-function from generated code, align arguments on stack.
103 // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
104 // etc., not pushed. The argument count assumes all arguments are word sized.
105 // The number of slots reserved for arguments depends on platform. On Windows
106 // stack slots are reserved for the arguments passed in registers. On other
107 // platforms stack slots are only reserved for the arguments actually passed
108 // on the stack.
109 void PrepareCallCFunction(int num_arguments);
110
111 // Calls a C function and cleans up the space for arguments allocated
112 // by PrepareCallCFunction. The called function is not allowed to trigger a
113 // garbage collection, since that might move the code and invalidate the
114 // return address (unless this is somehow accounted for by the called
115 // function).
116 void CallCFunction(ExternalReference function, int num_arguments);
117 void CallCFunction(Register function, int num_arguments);
118
119 // Calculate the number of stack slots to reserve for arguments when calling a
120 // C function.
121 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
122
123 void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
124 Label* condition_met,
125 Label::Distance condition_met_distance = Label::kFar);
126
127 // Define movq here instead of using AVX_OP. movq is defined using templates
128 // and there is a function template `void movq(P1)`, while technically
129 // impossible, will be selected when deducing the arguments for AvxHelper.
130 void Movq(XMMRegister dst, Register src);
131 void Movq(Register dst, XMMRegister src);
132
133 void Cvtss2sd(XMMRegister dst, XMMRegister src);
134 void Cvtss2sd(XMMRegister dst, Operand src);
135 void Cvtsd2ss(XMMRegister dst, XMMRegister src);
136 void Cvtsd2ss(XMMRegister dst, Operand src);
137 void Cvttsd2si(Register dst, XMMRegister src);
138 void Cvttsd2si(Register dst, Operand src);
139 void Cvttsd2siq(Register dst, XMMRegister src);
140 void Cvttsd2siq(Register dst, Operand src);
141 void Cvttss2si(Register dst, XMMRegister src);
142 void Cvttss2si(Register dst, Operand src);
143 void Cvttss2siq(Register dst, XMMRegister src);
144 void Cvttss2siq(Register dst, Operand src);
145 void Cvtlui2ss(XMMRegister dst, Register src);
146 void Cvtlui2ss(XMMRegister dst, Operand src);
147 void Cvtlui2sd(XMMRegister dst, Register src);
148 void Cvtlui2sd(XMMRegister dst, Operand src);
149 void Cvtqui2ss(XMMRegister dst, Register src);
150 void Cvtqui2ss(XMMRegister dst, Operand src);
151 void Cvtqui2sd(XMMRegister dst, Register src);
152 void Cvtqui2sd(XMMRegister dst, Operand src);
153 void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
154 void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
155 void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
156 void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
157
158 // cvtsi2sd and cvtsi2ss instructions only write to the low 64/32-bit of dst
159 // register, which hinders register renaming and makes dependence chains
160 // longer. So we use xorpd to clear the dst register before cvtsi2sd for
161 // non-AVX and a scratch XMM register as first src for AVX to solve this
162 // issue.
163 void Cvtqsi2ss(XMMRegister dst, Register src);
164 void Cvtqsi2ss(XMMRegister dst, Operand src);
165 void Cvtqsi2sd(XMMRegister dst, Register src);
166 void Cvtqsi2sd(XMMRegister dst, Operand src);
167 void Cvtlsi2ss(XMMRegister dst, Register src);
168 void Cvtlsi2ss(XMMRegister dst, Operand src);
169 void Cvtlsi2sd(XMMRegister dst, Register src);
170 void Cvtlsi2sd(XMMRegister dst, Operand src);
171
172 void Cmpeqss(XMMRegister dst, XMMRegister src);
173 void Cmpeqsd(XMMRegister dst, XMMRegister src);
174
175 void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8);
176 void Pextrq(Register dst, XMMRegister src, int8_t imm8);
177
178 void PinsrdPreSse41(XMMRegister dst, Register src2, uint8_t imm8,
179 uint32_t* load_pc_offset = nullptr);
180 void PinsrdPreSse41(XMMRegister dst, Operand src2, uint8_t imm8,
181 uint32_t* load_pc_offset = nullptr);
182
183 void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8,
184 uint32_t* load_pc_offset = nullptr);
185 void Pinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8,
186 uint32_t* load_pc_offset = nullptr);
187
188 void Lzcntq(Register dst, Register src);
189 void Lzcntq(Register dst, Operand src);
190 void Lzcntl(Register dst, Register src);
191 void Lzcntl(Register dst, Operand src);
192 void Tzcntq(Register dst, Register src);
193 void Tzcntq(Register dst, Operand src);
194 void Tzcntl(Register dst, Register src);
195 void Tzcntl(Register dst, Operand src);
196 void Popcntl(Register dst, Register src);
197 void Popcntl(Register dst, Operand src);
198 void Popcntq(Register dst, Register src);
199 void Popcntq(Register dst, Operand src);
200
201 void Cmp(Register dst, Smi src);
202 void Cmp(Operand dst, Smi src);
203 void Cmp(Register dst, int32_t src);
204
205 // ---------------------------------------------------------------------------
206 // Conversions between tagged smi values and non-tagged integer values.
207
208 // Tag an word-size value. The result must be known to be a valid smi value.
209 void SmiTag(Register reg);
210 // Requires dst != src
211 void SmiTag(Register dst, Register src);
212
213 // Simple comparison of smis. Both sides must be known smis to use these,
214 // otherwise use Cmp.
215 void SmiCompare(Register smi1, Register smi2);
216 void SmiCompare(Register dst, Smi src);
217 void SmiCompare(Register dst, Operand src);
218 void SmiCompare(Operand dst, Register src);
219 void SmiCompare(Operand dst, Smi src);
220
221 // Functions performing a check on a known or potential smi. Returns
222 // a condition that is satisfied if the check is successful.
223 Condition CheckSmi(Register src);
224 Condition CheckSmi(Operand src);
225
226 // Abort execution if argument is a smi, enabled via --debug-code.
227 void AssertNotSmi(Register object);
228
229 // Abort execution if argument is not a smi, enabled via --debug-code.
230 void AssertSmi(Register object);
231 void AssertSmi(Operand object);
232
233 // Test-and-jump functions. Typically combines a check function
234 // above with a conditional jump.
235
236 // Jump to label if the value is a tagged smi.
237 void JumpIfSmi(Register src, Label* on_smi,
238 Label::Distance near_jump = Label::kFar);
239
240 // Jump to label if the value is not a tagged smi.
241 void JumpIfNotSmi(Register src, Label* on_not_smi,
242 Label::Distance near_jump = Label::kFar);
243
244 // Jump to label if the value is not a tagged smi.
245 void JumpIfNotSmi(Operand src, Label* on_not_smi,
246 Label::Distance near_jump = Label::kFar);
247
248 // Operations on tagged smi values.
249
250 // Smis represent a subset of integers. The subset is always equivalent to
251 // a two's complement interpretation of a fixed number of bits.
252
253 // Add an integer constant to a tagged smi, giving a tagged smi as result.
254 // No overflow testing on the result is done.
255 void SmiAddConstant(Operand dst, Smi constant);
256
257 // Specialized operations
258
259 // Converts, if necessary, a smi to a combination of number and
260 // multiplier to be used as a scaled index.
261 // The src register contains a *positive* smi value. The shift is the
262 // power of two to multiply the index value by (e.g. to index by
263 // smi-value * kSystemPointerSize, pass the smi and kSystemPointerSizeLog2).
264 // The returned index register may be either src or dst, depending
265 // on what is most efficient. If src and dst are different registers,
266 // src is always unchanged.
267 SmiIndex SmiToIndex(Register dst, Register src, int shift);
268
JumpIfEqual(Register a, int32_t b, Label* dest)269 void JumpIfEqual(Register a, int32_t b, Label* dest) {
270 cmpl(a, Immediate(b));
271 j(equal, dest);
272 }
273
JumpIfLessThan(Register a, int32_t b, Label* dest)274 void JumpIfLessThan(Register a, int32_t b, Label* dest) {
275 cmpl(a, Immediate(b));
276 j(less, dest);
277 }
278
279 #ifdef V8_MAP_PACKING
280 void UnpackMapWord(Register r);
281 #endif
282
283 void LoadMap(Register destination, Register object);
284
Move(Register dst, intptr_t x)285 void Move(Register dst, intptr_t x) {
286 if (x == 0) {
287 xorl(dst, dst);
288 // The following shorter sequence for uint8 causes performance
289 // regressions:
290 // xorl(dst, dst); movb(dst,
291 // Immediate(static_cast<uint32_t>(x)));
292 } else if (is_uint32(x)) {
293 movl(dst, Immediate(static_cast<uint32_t>(x)));
294 } else if (is_int32(x)) {
295 // "movq reg64, imm32" is sign extending.
296 movq(dst, Immediate(static_cast<int32_t>(x)));
297 } else {
298 movq(dst, Immediate64(x));
299 }
300 }
301 void Move(Operand dst, intptr_t x);
302 void Move(Register dst, Smi source);
303
Move(Operand dst, Smi source)304 void Move(Operand dst, Smi source) {
305 Register constant = GetSmiConstant(source);
306 movq(dst, constant);
307 }
308
Move(Register dst, TaggedIndex source)309 void Move(Register dst, TaggedIndex source) { Move(dst, source.ptr()); }
310
Move(Operand dst, TaggedIndex source)311 void Move(Operand dst, TaggedIndex source) { Move(dst, source.ptr()); }
312
313 void Move(Register dst, ExternalReference ext);
314
315 void Move(XMMRegister dst, uint32_t src);
316 void Move(XMMRegister dst, uint64_t src);
Move(XMMRegister dst, float src)317 void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
Move(XMMRegister dst, double src)318 void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
319 void Move(XMMRegister dst, uint64_t high, uint64_t low);
320
321 // Move if the registers are not identical.
322 void Move(Register target, Register source);
323 void Move(XMMRegister target, XMMRegister source);
324
325 void Move(Register target, Operand source);
326 void Move(Register target, Immediate source);
327
328 void Move(Register dst, Handle<HeapObject> source,
329 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
330 void Move(Operand dst, Handle<HeapObject> source,
331 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
332
333 // Loads a pointer into a register with a relocation mode.
Move(Register dst, Address ptr, RelocInfo::Mode rmode)334 void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
335 // This method must not be used with heap object references. The stored
336 // address is not GC safe. Use the handle version instead.
337 DCHECK(rmode == RelocInfo::NO_INFO || rmode > RelocInfo::LAST_GCED_ENUM);
338 movq(dst, Immediate64(ptr, rmode));
339 }
340
341 // Move src0 to dst0 and src1 to dst1, handling possible overlaps.
342 void MovePair(Register dst0, Register src0, Register dst1, Register src1);
343
344 void MoveStringConstant(
345 Register result, const StringConstantBase* string,
346 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
347
348 // Convert smi to word-size sign-extended value.
349 void SmiUntag(Register reg);
350 // Requires dst != src
351 void SmiUntag(Register dst, Register src);
352 void SmiUntag(Register dst, Operand src);
353
354 // Convert smi to 32-bit value.
355 void SmiToInt32(Register reg);
356
357 // Loads the address of the external reference into the destination
358 // register.
359 void LoadAddress(Register destination, ExternalReference source);
360
361 void LoadFromConstantsTable(Register destination, int constant_index) final;
362 void LoadRootRegisterOffset(Register destination, intptr_t offset) final;
363 void LoadRootRelative(Register destination, int32_t offset) final;
364
365 // Operand pointing to an external reference.
366 // May emit code to set up the scratch register. The operand is
367 // only guaranteed to be correct as long as the scratch register
368 // isn't changed.
369 // If the operand is used more than once, use a scratch register
370 // that is guaranteed not to be clobbered.
371 Operand ExternalReferenceAsOperand(ExternalReference reference,
372 Register scratch = kScratchRegister);
373
Call(Register reg)374 void Call(Register reg) { call(reg); }
375 void Call(Operand op);
376 void Call(Handle<CodeT> code_object, RelocInfo::Mode rmode);
377 void Call(Address destination, RelocInfo::Mode rmode);
378 void Call(ExternalReference ext);
Call(Label* target)379 void Call(Label* target) { call(target); }
380
381 Operand EntryFromBuiltinAsOperand(Builtin builtin_index);
382 Operand EntryFromBuiltinIndexAsOperand(Register builtin_index);
383 void CallBuiltinByIndex(Register builtin_index);
384 void CallBuiltin(Builtin builtin);
385 void TailCallBuiltin(Builtin builtin);
386
387 void LoadCodeObjectEntry(Register destination, Register code_object);
388 void CallCodeObject(Register code_object);
389 void JumpCodeObject(Register code_object,
390 JumpMode jump_mode = JumpMode::kJump);
391
392 // Load code entry point from the CodeDataContainer object.
393 void LoadCodeDataContainerEntry(Register destination,
394 Register code_data_container_object);
395 // Load code entry point from the CodeDataContainer object and compute
396 // Code object pointer out of it. Must not be used for CodeDataContainers
397 // corresponding to builtins, because their entry points values point to
398 // the embedded instruction stream in .text section.
399 void LoadCodeDataContainerCodeNonBuiltin(Register destination,
400 Register code_data_container_object);
401 void CallCodeDataContainerObject(Register code_data_container_object);
402 void JumpCodeDataContainerObject(Register code_data_container_object,
403 JumpMode jump_mode = JumpMode::kJump);
404
405 // Helper functions that dispatch either to Call/JumpCodeObject or to
406 // Call/JumpCodeDataContainerObject.
407 // TODO(v8:11880): remove since CodeT targets are now default.
408 void LoadCodeTEntry(Register destination, Register code);
409 void CallCodeTObject(Register code);
410 void JumpCodeTObject(Register code, JumpMode jump_mode = JumpMode::kJump);
411
412 void Jump(Address destination, RelocInfo::Mode rmode);
413 void Jump(const ExternalReference& reference);
414 void Jump(Operand op);
415 void Jump(Handle<CodeT> code_object, RelocInfo::Mode rmode,
416 Condition cc = always);
417
418 void BailoutIfDeoptimized(Register scratch);
419 void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
420 DeoptimizeKind kind, Label* ret,
421 Label* jump_deoptimization_entry_label);
422
423 void Trap();
424 void DebugBreak();
425
426 void CompareRoot(Register with, RootIndex index);
427 void CompareRoot(Operand with, RootIndex index);
428
429 // Generates function and stub prologue code.
430 void StubPrologue(StackFrame::Type type);
431 void Prologue();
432
433 // Helpers for argument handling
434 enum ArgumentsCountMode { kCountIncludesReceiver, kCountExcludesReceiver };
435 enum ArgumentsCountType { kCountIsInteger, kCountIsSmi, kCountIsBytes };
436 void DropArguments(Register count, Register scratch, ArgumentsCountType type,
437 ArgumentsCountMode mode);
438 void DropArgumentsAndPushNewReceiver(Register argc, Register receiver,
439 Register scratch,
440 ArgumentsCountType type,
441 ArgumentsCountMode mode);
442 void DropArgumentsAndPushNewReceiver(Register argc, Operand receiver,
443 Register scratch,
444 ArgumentsCountType type,
445 ArgumentsCountMode mode);
446
447 // Calls Abort(msg) if the condition cc is not satisfied.
448 // Use --debug_code to enable.
449 void Assert(Condition cc, AbortReason reason);
450
451 // Like Assert(), but without condition.
452 // Use --debug_code to enable.
453 void AssertUnreachable(AbortReason reason);
454
455 // Abort execution if a 64 bit register containing a 32 bit payload does not
456 // have zeros in the top 32 bits, enabled via --debug-code.
457 void AssertZeroExtended(Register reg);
458
459 // Like Assert(), but always enabled.
460 void Check(Condition cc, AbortReason reason);
461
462 // Print a message to stdout and abort execution.
463 void Abort(AbortReason msg);
464
465 // Check that the stack is aligned.
466 void CheckStackAlignment();
467
468 // Activation support.
469 void EnterFrame(StackFrame::Type type);
EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg)470 void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
471 // Out-of-line constant pool not implemented on x64.
472 UNREACHABLE();
473 }
474 void LeaveFrame(StackFrame::Type type);
475
476 // Allocate stack space of given size (i.e. decrement {rsp} by the value
477 // stored in the given register, or by a constant). If you need to perform a
478 // stack check, do it before calling this function because this function may
479 // write into the newly allocated space. It may also overwrite the given
480 // register's value, in the version that takes a register.
481 #if defined(V8_TARGET_OS_WIN) || defined(V8_TARGET_OS_MACOS)
482 void AllocateStackSpace(Register bytes_scratch);
483 void AllocateStackSpace(int bytes);
484 #else
AllocateStackSpace(Register bytes)485 void AllocateStackSpace(Register bytes) { subq(rsp, bytes); }
AllocateStackSpace(int bytes)486 void AllocateStackSpace(int bytes) {
487 DCHECK_GE(bytes, 0);
488 if (bytes == 0) return;
489 subq(rsp, Immediate(bytes));
490 }
491 #endif
492
InitializeRootRegister()493 void InitializeRootRegister() {
494 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
495 Move(kRootRegister, isolate_root);
496 #ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
497 LoadRootRelative(kPtrComprCageBaseRegister,
498 IsolateData::cage_base_offset());
499 #endif
500 }
501
502 void MaybeSaveRegisters(RegList registers);
503 void MaybeRestoreRegisters(RegList registers);
504
505 void CallEphemeronKeyBarrier(Register object, Register slot_address,
506 SaveFPRegsMode fp_mode);
507
508 void CallRecordWriteStubSaveRegisters(
509 Register object, Register slot_address,
510 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
511 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
512 void CallRecordWriteStub(
513 Register object, Register slot_address,
514 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
515 StubCallMode mode = StubCallMode::kCallBuiltinPointer);
516
517 #ifdef V8_IS_TSAN
518 void CallTSANStoreStub(Register address, Register value,
519 SaveFPRegsMode fp_mode, int size, StubCallMode mode,
520 std::memory_order order);
521 void CallTSANRelaxedLoadStub(Register address, SaveFPRegsMode fp_mode,
522 int size, StubCallMode mode);
523 #endif // V8_IS_TSAN
524
525 void MoveNumber(Register dst, double value);
526 void MoveNonSmi(Register dst, double value);
527
528 // Calculate how much stack space (in bytes) are required to store caller
529 // registers excluding those specified in the arguments.
530 int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
531 Register exclusion1 = no_reg,
532 Register exclusion2 = no_reg,
533 Register exclusion3 = no_reg) const;
534
535 // PushCallerSaved and PopCallerSaved do not arrange the registers in any
536 // particular order so they are not useful for calls that can cause a GC.
537 // The caller can exclude up to 3 registers that do not need to be saved and
538 // restored.
539
540 // Push caller saved registers on the stack, and return the number of bytes
541 // stack pointer is adjusted.
542 int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
543 Register exclusion2 = no_reg,
544 Register exclusion3 = no_reg);
545 // Restore caller saved registers from the stack, and return the number of
546 // bytes stack pointer is adjusted.
547 int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
548 Register exclusion2 = no_reg,
549 Register exclusion3 = no_reg);
550
551 // Compute the start of the generated instruction stream from the current PC.
552 // This is an alternative to embedding the {CodeObject} handle as a reference.
553 void ComputeCodeStartAddress(Register dst);
554
555 // Control-flow integrity:
556
557 // Define a function entrypoint. This doesn't emit any code for this
558 // architecture, as control-flow integrity is not supported for it.
CodeEntry()559 void CodeEntry() {}
560 // Define an exception handler.
ExceptionHandler()561 void ExceptionHandler() {}
562 // Define an exception handler and bind a label.
BindExceptionHandler(Label* label)563 void BindExceptionHandler(Label* label) { bind(label); }
564
565 // ---------------------------------------------------------------------------
566 // Pointer compression support
567
568 // Loads a field containing a HeapObject and decompresses it if pointer
569 // compression is enabled.
570 void LoadTaggedPointerField(Register destination, Operand field_operand);
571
572 // Loads a field containing a Smi and decompresses it if pointer compression
573 // is enabled.
574 void LoadTaggedSignedField(Register destination, Operand field_operand);
575
576 // Loads a field containing any tagged value and decompresses it if necessary.
577 void LoadAnyTaggedField(Register destination, Operand field_operand);
578
579 // Loads a field containing a HeapObject, decompresses it if necessary and
580 // pushes full pointer to the stack. When pointer compression is enabled,
581 // uses |scratch| to decompress the value.
582 void PushTaggedPointerField(Operand field_operand, Register scratch);
583
584 // Loads a field containing any tagged value, decompresses it if necessary and
585 // pushes the full pointer to the stack. When pointer compression is enabled,
586 // uses |scratch| to decompress the value.
587 void PushTaggedAnyField(Operand field_operand, Register scratch);
588
589 // Loads a field containing smi value and untags it.
590 void SmiUntagField(Register dst, Operand src);
591
592 // Compresses tagged value if necessary and stores it to given on-heap
593 // location.
594 void StoreTaggedField(Operand dst_field_operand, Immediate immediate);
595 void StoreTaggedField(Operand dst_field_operand, Register value);
596 void StoreTaggedSignedField(Operand dst_field_operand, Smi value);
597 void AtomicStoreTaggedField(Operand dst_field_operand, Register value);
598
599 // The following macros work even when pointer compression is not enabled.
600 void DecompressTaggedSigned(Register destination, Operand field_operand);
601 void DecompressTaggedPointer(Register destination, Operand field_operand);
602 void DecompressTaggedPointer(Register destination, Register source);
603 void DecompressAnyTagged(Register destination, Operand field_operand);
604
605 // ---------------------------------------------------------------------------
606 // V8 Sandbox support
607
608 // Transform a SandboxedPointer from/to its encoded form, which is used when
609 // the pointer is stored on the heap and ensures that the pointer will always
610 // point into the sandbox.
611 void EncodeSandboxedPointer(Register value);
612 void DecodeSandboxedPointer(Register value);
613
614 // Load and decode a SandboxedPointer from the heap.
615 void LoadSandboxedPointerField(Register destination, Operand field_operand);
616 // Encode and store a SandboxedPointer to the heap.
617 void StoreSandboxedPointerField(Operand dst_field_operand, Register value);
618
619 enum class IsolateRootLocation { kInScratchRegister, kInRootRegister };
620 // Loads a field containing off-heap pointer and does necessary decoding
621 // if sandboxed external pointers are enabled.
622 void LoadExternalPointerField(Register destination, Operand field_operand,
623 ExternalPointerTag tag, Register scratch,
624 IsolateRootLocation isolateRootLocation =
625 IsolateRootLocation::kInRootRegister);
626
627 protected:
628 static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
629
630 // Returns a register holding the smi value. The register MUST NOT be
631 // modified. It may be the "smi 1 constant" register.
632 Register GetSmiConstant(Smi value);
633
634 // Drops arguments assuming that the return address was already popped.
635 void DropArguments(Register count, ArgumentsCountType type = kCountIsInteger,
636 ArgumentsCountMode mode = kCountExcludesReceiver);
637 };
638
639 // MacroAssembler implements a collection of frequently used macros.
640 class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
641 public:
642 using TurboAssembler::TurboAssembler;
643
644 // Loads and stores the value of an external reference.
645 // Special case code for load and store to take advantage of
646 // load_rax/store_rax if possible/necessary.
647 // For other operations, just use:
648 // Operand operand = ExternalReferenceAsOperand(extref);
649 // operation(operand, ..);
650 void Load(Register destination, ExternalReference source);
651 void Store(ExternalReference destination, Register source);
652
653 // Pushes the address of the external reference onto the stack.
654 void PushAddress(ExternalReference source);
655
656 // Operations on roots in the root-array.
657 // Load a root value where the index (or part of it) is variable.
658 // The variable_offset register is added to the fixed_offset value
659 // to get the index into the root-array.
660 void PushRoot(RootIndex index);
661
662 // Compare the object in a register to a value and jump if they are equal.
JumpIfRoot(Register with, RootIndex index, Label* if_equal, Label::Distance if_equal_distance = Label::kFar)663 void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
664 Label::Distance if_equal_distance = Label::kFar) {
665 CompareRoot(with, index);
666 j(equal, if_equal, if_equal_distance);
667 }
JumpIfRoot(Operand with, RootIndex index, Label* if_equal, Label::Distance if_equal_distance = Label::kFar)668 void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
669 Label::Distance if_equal_distance = Label::kFar) {
670 CompareRoot(with, index);
671 j(equal, if_equal, if_equal_distance);
672 }
673
674 // Compare the object in a register to a value and jump if they are not equal.
JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal, Label::Distance if_not_equal_distance = Label::kFar)675 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
676 Label::Distance if_not_equal_distance = Label::kFar) {
677 CompareRoot(with, index);
678 j(not_equal, if_not_equal, if_not_equal_distance);
679 }
JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal, Label::Distance if_not_equal_distance = Label::kFar)680 void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
681 Label::Distance if_not_equal_distance = Label::kFar) {
682 CompareRoot(with, index);
683 j(not_equal, if_not_equal, if_not_equal_distance);
684 }
685
686 // ---------------------------------------------------------------------------
687 // GC Support
688
689 // Notify the garbage collector that we wrote a pointer into an object.
690 // |object| is the object being stored into, |value| is the object being
691 // stored. value and scratch registers are clobbered by the operation.
692 // The offset is the offset from the start of the object, not the offset from
693 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
694 void RecordWriteField(
695 Register object, int offset, Register value, Register slot_address,
696 SaveFPRegsMode save_fp,
697 RememberedSetAction remembered_set_action = RememberedSetAction::kEmit,
698 SmiCheck smi_check = SmiCheck::kInline);
699
700 // For page containing |object| mark region covering |address|
701 // dirty. |object| is the object being stored into, |value| is the
702 // object being stored. The address and value registers are clobbered by the
703 // operation. RecordWrite filters out smis so it does not update
704 // the write barrier if the value is a smi.
705 void RecordWrite(
706 Register object, Register slot_address, Register value,
707 SaveFPRegsMode save_fp,
708 RememberedSetAction remembered_set_action = RememberedSetAction::kEmit,
709 SmiCheck smi_check = SmiCheck::kInline);
710
711 // Enter specific kind of exit frame; either in normal or
712 // debug mode. Expects the number of arguments in register rax and
713 // sets up the number of arguments in register rdi and the pointer
714 // to the first argument in register rsi.
715 //
716 // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
717 // stack accessible via StackSpaceOperand.
718 void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false,
719 StackFrame::Type frame_type = StackFrame::EXIT);
720
721 // Enter specific kind of exit frame. Allocates
722 // (arg_stack_space * kSystemPointerSize) memory (not GCed) on the stack
723 // accessible via StackSpaceOperand.
724 void EnterApiExitFrame(int arg_stack_space);
725
726 // Leave the current exit frame. Expects/provides the return value in
727 // register rax:rdx (untouched) and the pointer to the first
728 // argument in register rsi (if pop_arguments == true).
729 void LeaveExitFrame(bool save_doubles = false, bool pop_arguments = true);
730
731 // Leave the current exit frame. Expects/provides the return value in
732 // register rax (untouched).
733 void LeaveApiExitFrame();
734
735 // ---------------------------------------------------------------------------
736 // JavaScript invokes
737
738 // Invoke the JavaScript function code by either calling or jumping.
739 void InvokeFunctionCode(Register function, Register new_target,
740 Register expected_parameter_count,
741 Register actual_parameter_count, InvokeType type);
742
743 // On function call, call into the debugger.
744 void CallDebugOnFunctionCall(Register fun, Register new_target,
745 Register expected_parameter_count,
746 Register actual_parameter_count);
747
748 // Invoke the JavaScript function in the given register. Changes the
749 // current context to the context in the function before invoking.
750 void InvokeFunction(Register function, Register new_target,
751 Register actual_parameter_count, InvokeType type);
752
753 void InvokeFunction(Register function, Register new_target,
754 Register expected_parameter_count,
755 Register actual_parameter_count, InvokeType type);
756
757 // ---------------------------------------------------------------------------
758 // Macro instructions.
759
760 using TurboAssembler::Cmp;
761 void Cmp(Register dst, Handle<Object> source);
762 void Cmp(Operand dst, Handle<Object> source);
763
764 // Checks if value is in range [lower_limit, higher_limit] using a single
765 // comparison. Flags CF=1 or ZF=1 indicate the value is in the range
766 // (condition below_equal).
767 void CompareRange(Register value, unsigned lower_limit,
768 unsigned higher_limit);
769 void JumpIfIsInRange(Register value, unsigned lower_limit,
770 unsigned higher_limit, Label* on_in_range,
771 Label::Distance near_jump = Label::kFar);
772
773 // Emit code to discard a non-negative number of pointer-sized elements
774 // from the stack, clobbering only the rsp register.
775 void Drop(int stack_elements);
776 // Emit code to discard a positive number of pointer-sized elements
777 // from the stack under the return address which remains on the top,
778 // clobbering the rsp register.
779 void DropUnderReturnAddress(int stack_elements,
780 Register scratch = kScratchRegister);
781 void PushQuad(Operand src);
782 void PushImm32(int32_t imm32);
783 void Pop(Register dst);
784 void Pop(Operand dst);
785 void PopQuad(Operand dst);
786
787 // Generates a trampoline to jump to the off-heap instruction stream.
788 void JumpToOffHeapInstructionStream(Address entry);
789
790 // Compare object type for heap object.
791 // Always use unsigned comparisons: above and below, not less and greater.
792 // Incoming register is heap_object and outgoing register is map.
793 // They may be the same register, and may be kScratchRegister.
794 void CmpObjectType(Register heap_object, InstanceType type, Register map);
795
796 // Compare instance type for map.
797 // Always use unsigned comparisons: above and below, not less and greater.
798 void CmpInstanceType(Register map, InstanceType type);
799
800 // Compare instance type ranges for a map (low and high inclusive)
801 // Always use unsigned comparisons: below_equal for a positive result.
802 void CmpInstanceTypeRange(Register map, Register instance_type_out,
803 InstanceType low, InstanceType high);
804
805 template <typename Field>
DecodeField(Register reg)806 void DecodeField(Register reg) {
807 static const int shift = Field::kShift;
808 static const int mask = Field::kMask >> Field::kShift;
809 if (shift != 0) {
810 shrq(reg, Immediate(shift));
811 }
812 andq(reg, Immediate(mask));
813 }
814
815 // Abort execution if argument is not a CodeT, enabled via --debug-code.
816 void AssertCodeT(Register object);
817
818 // Abort execution if argument is not a Constructor, enabled via --debug-code.
819 void AssertConstructor(Register object);
820
821 // Abort execution if argument is not a JSFunction, enabled via --debug-code.
822 void AssertFunction(Register object);
823
824 // Abort execution if argument is not a callable JSFunction, enabled via
825 // --debug-code.
826 void AssertCallableFunction(Register object);
827
828 // Abort execution if argument is not a JSBoundFunction,
829 // enabled via --debug-code.
830 void AssertBoundFunction(Register object);
831
832 // Abort execution if argument is not a JSGeneratorObject (or subclass),
833 // enabled via --debug-code.
834 void AssertGeneratorObject(Register object);
835
836 // Abort execution if argument is not undefined or an AllocationSite, enabled
837 // via --debug-code.
838 void AssertUndefinedOrAllocationSite(Register object);
839
840 // ---------------------------------------------------------------------------
841 // Exception handling
842
843 // Push a new stack handler and link it into stack handler chain.
844 void PushStackHandler();
845
846 // Unlink the stack handler on top of the stack from the stack handler chain.
847 void PopStackHandler();
848
849 // ---------------------------------------------------------------------------
850 // Support functions.
851
852 // Load the global proxy from the current context.
LoadGlobalProxy(Register dst)853 void LoadGlobalProxy(Register dst) {
854 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
855 }
856
857 // Load the native context slot with the current index.
858 void LoadNativeContextSlot(Register dst, int index);
859
860 // ---------------------------------------------------------------------------
861 // Runtime calls
862
863 // Call a runtime routine.
864 void CallRuntime(const Runtime::Function* f, int num_arguments,
865 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore);
866
867 // Convenience function: Same as above, but takes the fid instead.
CallRuntime(Runtime::FunctionId fid, SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore)868 void CallRuntime(Runtime::FunctionId fid,
869 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) {
870 const Runtime::Function* function = Runtime::FunctionForId(fid);
871 CallRuntime(function, function->nargs, save_doubles);
872 }
873
874 // Convenience function: Same as above, but takes the fid instead.
CallRuntime(Runtime::FunctionId fid, int num_arguments, SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore)875 void CallRuntime(Runtime::FunctionId fid, int num_arguments,
876 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) {
877 CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
878 }
879
880 // Convenience function: tail call a runtime routine (jump)
881 void TailCallRuntime(Runtime::FunctionId fid);
882
883 // Jump to a runtime routines
884 void JumpToExternalReference(const ExternalReference& ext,
885 bool builtin_exit_frame = false);
886
887 // ---------------------------------------------------------------------------
888 // StatsCounter support
IncrementCounter(StatsCounter* counter, int value)889 void IncrementCounter(StatsCounter* counter, int value) {
890 if (!FLAG_native_code_counters) return;
891 EmitIncrementCounter(counter, value);
892 }
893 void EmitIncrementCounter(StatsCounter* counter, int value);
DecrementCounter(StatsCounter* counter, int value)894 void DecrementCounter(StatsCounter* counter, int value) {
895 if (!FLAG_native_code_counters) return;
896 EmitDecrementCounter(counter, value);
897 }
898 void EmitDecrementCounter(StatsCounter* counter, int value);
899
900 // ---------------------------------------------------------------------------
901 // Stack limit utilities
902 Operand StackLimitAsOperand(StackLimitKind kind);
903 void StackOverflowCheck(
904 Register num_args, Label* stack_overflow,
905 Label::Distance stack_overflow_distance = Label::kFar);
906
907 // ---------------------------------------------------------------------------
908 // In-place weak references.
909 void LoadWeakValue(Register in_out, Label* target_if_cleared);
910
911 private:
912 // Helper functions for generating invokes.
913 void InvokePrologue(Register expected_parameter_count,
914 Register actual_parameter_count, Label* done,
915 InvokeType type);
916
917 void EnterExitFramePrologue(Register saved_rax_reg,
918 StackFrame::Type frame_type);
919
920 // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
921 // stack accessible via StackSpaceOperand.
922 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
923
924 void LeaveExitFrameEpilogue();
925
926 DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler);
927 };
928
929 // -----------------------------------------------------------------------------
930 // Static helper functions.
931
932 // Generate an Operand for loading a field from an object.
FieldOperand(Register object, int offset)933 inline Operand FieldOperand(Register object, int offset) {
934 return Operand(object, offset - kHeapObjectTag);
935 }
936
937 // Generate an Operand for loading an indexed field from an object.
FieldOperand(Register object, Register index, ScaleFactor scale, int offset)938 inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
939 int offset) {
940 return Operand(object, index, scale, offset - kHeapObjectTag);
941 }
942
943 // Provides access to exit frame stack space (not GCed).
StackSpaceOperand(int index)944 inline Operand StackSpaceOperand(int index) {
945 #ifdef V8_TARGET_OS_WIN
946 const int kShaddowSpace = 4;
947 return Operand(rsp, (index + kShaddowSpace) * kSystemPointerSize);
948 #else
949 return Operand(rsp, index * kSystemPointerSize);
950 #endif
951 }
952
StackOperandForReturnAddress(int32_t disp)953 inline Operand StackOperandForReturnAddress(int32_t disp) {
954 return Operand(rsp, disp);
955 }
956
957 #define ACCESS_MASM(masm) masm->
958
959 } // namespace internal
960 } // namespace v8
961
962 #endif // V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
963