1// Use of this source code is governed by a BSD-style license that can be
2// Copyright 2021 the V8 project authors. All rights reserved.
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
6#define V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
7
8#include "src/base/macros.h"
9#include "src/baseline/baseline-assembler.h"
10#include "src/codegen/x64/register-x64.h"
11
12namespace v8 {
13namespace internal {
14namespace baseline {
15
16namespace detail {
17
18// Avoid using kScratchRegister(==r10) since the macro-assembler doesn't use
19// this scope and will conflict.
20static constexpr Register kScratchRegisters[] = {r8, r9, r11, r12, r15};
21static constexpr int kNumScratchRegisters = arraysize(kScratchRegisters);
22
23}  // namespace detail
24
25class BaselineAssembler::ScratchRegisterScope {
26 public:
27  explicit ScratchRegisterScope(BaselineAssembler* assembler)
28      : assembler_(assembler),
29        prev_scope_(assembler->scratch_register_scope_),
30        registers_used_(prev_scope_ == nullptr ? 0
31                                               : prev_scope_->registers_used_) {
32    assembler_->scratch_register_scope_ = this;
33  }
34  ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
35
36  Register AcquireScratch() {
37    DCHECK_LT(registers_used_, detail::kNumScratchRegisters);
38    return detail::kScratchRegisters[registers_used_++];
39  }
40
41 private:
42  BaselineAssembler* assembler_;
43  ScratchRegisterScope* prev_scope_;
44  int registers_used_;
45};
46
47// TODO(v8:11461): Unify condition names in the MacroAssembler.
48enum class Condition : uint32_t {
49  kEqual = equal,
50  kNotEqual = not_equal,
51
52  kLessThan = less,
53  kGreaterThan = greater,
54  kLessThanEqual = less_equal,
55  kGreaterThanEqual = greater_equal,
56
57  kUnsignedLessThan = below,
58  kUnsignedGreaterThan = above,
59  kUnsignedLessThanEqual = below_equal,
60  kUnsignedGreaterThanEqual = above_equal,
61
62  kOverflow = overflow,
63  kNoOverflow = no_overflow,
64
65  kZero = zero,
66  kNotZero = not_zero,
67};
68
69inline internal::Condition AsMasmCondition(Condition cond) {
70  return static_cast<internal::Condition>(cond);
71}
72
73namespace detail {
74
75#define __ masm_->
76
77#ifdef DEBUG
78inline bool Clobbers(Register target, MemOperand op) {
79  return op.AddressUsesRegister(target);
80}
81#endif
82
83}  // namespace detail
84
85MemOperand BaselineAssembler::RegisterFrameOperand(
86    interpreter::Register interpreter_register) {
87  return MemOperand(rbp, interpreter_register.ToOperand() * kSystemPointerSize);
88}
89void BaselineAssembler::RegisterFrameAddress(
90    interpreter::Register interpreter_register, Register rscratch) {
91  return __ leaq(rscratch, MemOperand(rbp, interpreter_register.ToOperand() *
92                                               kSystemPointerSize));
93}
94MemOperand BaselineAssembler::FeedbackVectorOperand() {
95  return MemOperand(rbp, BaselineFrameConstants::kFeedbackVectorFromFp);
96}
97
98void BaselineAssembler::Bind(Label* label) { __ bind(label); }
99void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
100
101void BaselineAssembler::JumpTarget() {
102  // NOP on x64.
103}
104
105void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
106  __ jmp(target, distance);
107}
108void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
109                                   Label* target, Label::Distance distance) {
110  __ JumpIfRoot(value, index, target, distance);
111}
112void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
113                                      Label* target, Label::Distance distance) {
114  __ JumpIfNotRoot(value, index, target, distance);
115}
116void BaselineAssembler::JumpIfSmi(Register value, Label* target,
117                                  Label::Distance distance) {
118  __ JumpIfSmi(value, target, distance);
119}
120void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
121                                     Label::Distance distance) {
122  __ JumpIfNotSmi(value, target, distance);
123}
124
125void BaselineAssembler::CallBuiltin(Builtin builtin) {
126  if (masm()->options().short_builtin_calls) {
127    // Generate pc-relative call.
128    __ CallBuiltin(builtin);
129  } else {
130    ASM_CODE_COMMENT_STRING(masm_,
131                            __ CommentForOffHeapTrampoline("call", builtin));
132    __ Call(__ EntryFromBuiltinAsOperand(builtin));
133  }
134}
135
136void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
137  if (masm()->options().short_builtin_calls) {
138    // Generate pc-relative jump.
139    __ TailCallBuiltin(builtin);
140  } else {
141    ASM_CODE_COMMENT_STRING(
142        masm_, __ CommentForOffHeapTrampoline("tail call", builtin));
143    __ Jump(__ EntryFromBuiltinAsOperand(builtin));
144  }
145}
146
147void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
148                                      Label* target, Label::Distance distance) {
149  if ((mask & 0xff) == mask) {
150    __ testb(value, Immediate(mask));
151  } else {
152    __ testl(value, Immediate(mask));
153  }
154  __ j(AsMasmCondition(cc), target, distance);
155}
156
157void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
158                               Label* target, Label::Distance distance) {
159  __ cmpq(lhs, rhs);
160  __ j(AsMasmCondition(cc), target, distance);
161}
162void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
163                                         InstanceType instance_type,
164                                         Register map, Label* target,
165                                         Label::Distance distance) {
166  __ AssertNotSmi(object);
167  __ CmpObjectType(object, instance_type, map);
168  __ j(AsMasmCondition(cc), target, distance);
169}
170void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
171                                           InstanceType instance_type,
172                                           Label* target,
173                                           Label::Distance distance) {
174  if (FLAG_debug_code) {
175    __ AssertNotSmi(map);
176    __ CmpObjectType(map, MAP_TYPE, kScratchRegister);
177    __ Assert(equal, AbortReason::kUnexpectedValue);
178  }
179  __ CmpInstanceType(map, instance_type);
180  __ j(AsMasmCondition(cc), target, distance);
181}
182void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
183                                      MemOperand operand, Label* target,
184                                      Label::Distance distance) {
185  __ cmpq(value, operand);
186  __ j(AsMasmCondition(cc), target, distance);
187}
188void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Smi smi,
189                                  Label* target, Label::Distance distance) {
190  __ SmiCompare(lhs, smi);
191  __ j(AsMasmCondition(cc), target, distance);
192}
193void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
194                                  Label* target, Label::Distance distance) {
195  __ SmiCompare(lhs, rhs);
196  __ j(AsMasmCondition(cc), target, distance);
197}
198
199void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
200                                        Label* target,
201                                        Label::Distance distance) {
202  __ cmpq(left, Immediate(right));
203  __ j(AsMasmCondition(cc), target, distance);
204}
205
206// cmp_tagged
207void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
208                                     MemOperand operand, Label* target,
209                                     Label::Distance distance) {
210  __ cmp_tagged(value, operand);
211  __ j(AsMasmCondition(cc), target, distance);
212}
213void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
214                                     Register value, Label* target,
215                                     Label::Distance distance) {
216  __ cmp_tagged(operand, value);
217  __ j(AsMasmCondition(cc), target, distance);
218}
219void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
220                                   Label* target, Label::Distance distance) {
221  __ cmpb(value, Immediate(byte));
222  __ j(AsMasmCondition(cc), target, distance);
223}
224
225void BaselineAssembler::Move(interpreter::Register output, Register source) {
226  return __ movq(RegisterFrameOperand(output), source);
227}
228void BaselineAssembler::Move(Register output, TaggedIndex value) {
229  __ Move(output, value);
230}
231void BaselineAssembler::Move(MemOperand output, Register source) {
232  __ movq(output, source);
233}
234void BaselineAssembler::Move(Register output, ExternalReference reference) {
235  __ Move(output, reference);
236}
237void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
238  __ Move(output, value);
239}
240void BaselineAssembler::Move(Register output, int32_t value) {
241  __ Move(output, value);
242}
243void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
244  __ mov_tagged(output, source);
245}
246void BaselineAssembler::MoveSmi(Register output, Register source) {
247  __ mov_tagged(output, source);
248}
249
250namespace detail {
251inline void PushSingle(MacroAssembler* masm, RootIndex source) {
252  masm->PushRoot(source);
253}
254inline void PushSingle(MacroAssembler* masm, Register reg) { masm->Push(reg); }
255inline void PushSingle(MacroAssembler* masm, TaggedIndex value) {
256  masm->Push(value);
257}
258inline void PushSingle(MacroAssembler* masm, Smi value) { masm->Push(value); }
259inline void PushSingle(MacroAssembler* masm, Handle<HeapObject> object) {
260  masm->Push(object);
261}
262inline void PushSingle(MacroAssembler* masm, int32_t immediate) {
263  masm->Push(Immediate(immediate));
264}
265inline void PushSingle(MacroAssembler* masm, MemOperand operand) {
266  masm->Push(operand);
267}
268inline void PushSingle(MacroAssembler* masm, interpreter::Register source) {
269  return PushSingle(masm, BaselineAssembler::RegisterFrameOperand(source));
270}
271
272template <typename Arg>
273struct PushHelper {
274  static int Push(BaselineAssembler* basm, Arg arg) {
275    PushSingle(basm->masm(), arg);
276    return 1;
277  }
278  static int PushReverse(BaselineAssembler* basm, Arg arg) {
279    return Push(basm, arg);
280  }
281};
282
283template <>
284struct PushHelper<interpreter::RegisterList> {
285  static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
286    for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
287      PushSingle(basm->masm(), list[reg_index]);
288    }
289    return list.register_count();
290  }
291  static int PushReverse(BaselineAssembler* basm,
292                         interpreter::RegisterList list) {
293    for (int reg_index = list.register_count() - 1; reg_index >= 0;
294         --reg_index) {
295      PushSingle(basm->masm(), list[reg_index]);
296    }
297    return list.register_count();
298  }
299};
300
301template <typename... Args>
302struct PushAllHelper;
303template <>
304struct PushAllHelper<> {
305  static int Push(BaselineAssembler* masm) { return 0; }
306  static int PushReverse(BaselineAssembler* masm) { return 0; }
307};
308template <typename Arg, typename... Args>
309struct PushAllHelper<Arg, Args...> {
310  static int Push(BaselineAssembler* masm, Arg arg, Args... args) {
311    int nargs = PushHelper<Arg>::Push(masm, arg);
312    return nargs + PushAllHelper<Args...>::Push(masm, args...);
313  }
314  static int PushReverse(BaselineAssembler* masm, Arg arg, Args... args) {
315    int nargs = PushAllHelper<Args...>::PushReverse(masm, args...);
316    return nargs + PushHelper<Arg>::PushReverse(masm, arg);
317  }
318};
319
320}  // namespace detail
321
322template <typename... T>
323int BaselineAssembler::Push(T... vals) {
324  return detail::PushAllHelper<T...>::Push(this, vals...);
325}
326
327template <typename... T>
328void BaselineAssembler::PushReverse(T... vals) {
329  detail::PushAllHelper<T...>::PushReverse(this, vals...);
330}
331
332template <typename... T>
333void BaselineAssembler::Pop(T... registers) {
334  (__ Pop(registers), ...);
335}
336
337void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
338                                               int offset) {
339  __ LoadTaggedPointerField(output, FieldOperand(source, offset));
340}
341void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
342                                              int offset) {
343  __ LoadTaggedSignedField(output, FieldOperand(source, offset));
344}
345void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
346                                           int offset) {
347  __ LoadAnyTaggedField(output, FieldOperand(source, offset));
348}
349void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
350                                                  Register source, int offset) {
351  __ movzxwq(output, FieldOperand(source, offset));
352}
353void BaselineAssembler::LoadWord8Field(Register output, Register source,
354                                       int offset) {
355  __ movb(output, FieldOperand(source, offset));
356}
357void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
358                                               Smi value) {
359  __ StoreTaggedSignedField(FieldOperand(target, offset), value);
360}
361void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
362                                                         int offset,
363                                                         Register value) {
364  ASM_CODE_COMMENT(masm_);
365  Register scratch = WriteBarrierDescriptor::SlotAddressRegister();
366  DCHECK(!AreAliased(target, value, scratch));
367  __ StoreTaggedField(FieldOperand(target, offset), value);
368  __ RecordWriteField(target, offset, value, scratch, SaveFPRegsMode::kIgnore);
369}
370void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
371                                                       int offset,
372                                                       Register value) {
373  __ StoreTaggedField(FieldOperand(target, offset), value);
374}
375
376void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
377    int32_t weight, Label* skip_interrupt_label) {
378  ASM_CODE_COMMENT(masm_);
379  ScratchRegisterScope scratch_scope(this);
380  Register feedback_cell = scratch_scope.AcquireScratch();
381  LoadFunction(feedback_cell);
382  LoadTaggedPointerField(feedback_cell, feedback_cell,
383                         JSFunction::kFeedbackCellOffset);
384  __ addl(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
385          Immediate(weight));
386  if (skip_interrupt_label) {
387    DCHECK_LT(weight, 0);
388    __ j(greater_equal, skip_interrupt_label);
389  }
390}
391
392void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
393    Register weight, Label* skip_interrupt_label) {
394  ASM_CODE_COMMENT(masm_);
395  ScratchRegisterScope scratch_scope(this);
396  Register feedback_cell = scratch_scope.AcquireScratch();
397  LoadFunction(feedback_cell);
398  LoadTaggedPointerField(feedback_cell, feedback_cell,
399                         JSFunction::kFeedbackCellOffset);
400  __ addl(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
401          weight);
402  if (skip_interrupt_label) __ j(greater_equal, skip_interrupt_label);
403}
404
405void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
406  if (rhs.value() == 0) return;
407  if (SmiValuesAre31Bits()) {
408    __ addl(lhs, Immediate(rhs));
409  } else {
410    ScratchRegisterScope scratch_scope(this);
411    Register rhs_reg = scratch_scope.AcquireScratch();
412    __ Move(rhs_reg, rhs);
413    __ addq(lhs, rhs_reg);
414  }
415}
416
417void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
418  Move(output, lhs);
419  __ andq(output, Immediate(rhs));
420}
421
422void BaselineAssembler::Switch(Register reg, int case_value_base,
423                               Label** labels, int num_labels) {
424  ASM_CODE_COMMENT(masm_);
425  ScratchRegisterScope scope(this);
426  Register table = scope.AcquireScratch();
427  Label fallthrough, jump_table;
428  if (case_value_base != 0) {
429    __ subq(reg, Immediate(case_value_base));
430  }
431  __ cmpq(reg, Immediate(num_labels));
432  __ j(above_equal, &fallthrough);
433  __ leaq(table, MemOperand(&jump_table));
434  __ jmp(MemOperand(table, reg, times_8, 0));
435  // Emit the jump table inline, under the assumption that it's not too big.
436  __ Align(kSystemPointerSize);
437  __ bind(&jump_table);
438  for (int i = 0; i < num_labels; ++i) {
439    __ dq(labels[i]);
440  }
441  __ bind(&fallthrough);
442}
443
444#undef __
445#define __ basm.
446
447void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
448  ASM_CODE_COMMENT(masm);
449  BaselineAssembler basm(masm);
450
451  Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
452  Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
453
454  {
455    ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
456
457    Label skip_interrupt_label;
458    __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
459    {
460      __ masm()->SmiTag(params_size);
461      __ Push(params_size, kInterpreterAccumulatorRegister);
462
463      __ LoadContext(kContextRegister);
464      __ Push(MemOperand(rbp, InterpreterFrameConstants::kFunctionOffset));
465      __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
466
467      __ Pop(kInterpreterAccumulatorRegister, params_size);
468      __ masm()->SmiUntag(params_size);
469    }
470    __ Bind(&skip_interrupt_label);
471  }
472
473  BaselineAssembler::ScratchRegisterScope scope(&basm);
474  Register scratch = scope.AcquireScratch();
475
476  Register actual_params_size = scratch;
477  // Compute the size of the actual parameters + receiver (in bytes).
478  __ masm()->movq(actual_params_size,
479                  MemOperand(rbp, StandardFrameConstants::kArgCOffset));
480
481  // If actual is bigger than formal, then we should use it to free up the stack
482  // arguments.
483  Label corrected_args_count;
484  __ masm()->cmpq(params_size, actual_params_size);
485  __ masm()->j(greater_equal, &corrected_args_count);
486  __ masm()->movq(params_size, actual_params_size);
487  __ Bind(&corrected_args_count);
488
489  // Leave the frame (also dropping the register file).
490  __ masm()->LeaveFrame(StackFrame::BASELINE);
491
492  // Drop receiver + arguments.
493  __ masm()->DropArguments(params_size, scratch,
494                           TurboAssembler::kCountIsInteger,
495                           TurboAssembler::kCountIncludesReceiver);
496  __ masm()->Ret();
497}
498
499#undef __
500
501inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
502    Register reg) {
503  assembler_->masm()->cmp_tagged(reg, kInterpreterAccumulatorRegister);
504  assembler_->masm()->Assert(equal, AbortReason::kUnexpectedValue);
505}
506
507}  // namespace baseline
508}  // namespace internal
509}  // namespace v8
510
511#endif  // V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
512