1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_MIPS64_BASELINE_ASSEMBLER_MIPS64_INL_H_
6#define V8_BASELINE_MIPS64_BASELINE_ASSEMBLER_MIPS64_INL_H_
7
8#include "src/baseline/baseline-assembler.h"
9#include "src/codegen/interface-descriptors.h"
10#include "src/codegen/mips64/assembler-mips64-inl.h"
11
12namespace v8 {
13namespace internal {
14namespace baseline {
15
16class BaselineAssembler::ScratchRegisterScope {
17 public:
18  explicit ScratchRegisterScope(BaselineAssembler* assembler)
19      : assembler_(assembler),
20        prev_scope_(assembler->scratch_register_scope_),
21        wrapped_scope_(assembler->masm()) {
22    if (!assembler_->scratch_register_scope_) {
23      // If we haven't opened a scratch scope yet, for the first one add a
24      // couple of extra registers.
25      wrapped_scope_.Include({t0, t1, t2, t3});
26    }
27    assembler_->scratch_register_scope_ = this;
28  }
29  ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
30
31  Register AcquireScratch() { return wrapped_scope_.Acquire(); }
32
33 private:
34  BaselineAssembler* assembler_;
35  ScratchRegisterScope* prev_scope_;
36  UseScratchRegisterScope wrapped_scope_;
37};
38
39enum class Condition : uint32_t {
40  kEqual = eq,
41  kNotEqual = ne,
42
43  kLessThan = lt,
44  kGreaterThan = gt,
45  kLessThanEqual = le,
46  kGreaterThanEqual = ge,
47
48  kUnsignedLessThan = Uless,
49  kUnsignedGreaterThan = Ugreater,
50  kUnsignedLessThanEqual = Uless_equal,
51  kUnsignedGreaterThanEqual = Ugreater_equal,
52
53  kOverflow = overflow,
54  kNoOverflow = no_overflow,
55
56  kZero = eq,
57  kNotZero = ne,
58};
59
60inline internal::Condition AsMasmCondition(Condition cond) {
61  STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
62  return static_cast<internal::Condition>(cond);
63}
64
65namespace detail {
66
67#ifdef DEBUG
68inline bool Clobbers(Register target, MemOperand op) {
69  return op.is_reg() && op.rm() == target;
70}
71#endif
72
73}  // namespace detail
74
75#define __ masm_->
76
77MemOperand BaselineAssembler::RegisterFrameOperand(
78    interpreter::Register interpreter_register) {
79  return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
80}
81void BaselineAssembler::RegisterFrameAddress(
82    interpreter::Register interpreter_register, Register rscratch) {
83  return __ Daddu(rscratch, fp,
84                  interpreter_register.ToOperand() * kSystemPointerSize);
85}
86MemOperand BaselineAssembler::FeedbackVectorOperand() {
87  return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
88}
89
90void BaselineAssembler::Bind(Label* label) { __ bind(label); }
91
92void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
93
94void BaselineAssembler::JumpTarget() {
95  // NOP.
96}
97void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
98  __ Branch(target);
99}
100void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
101                                   Label* target, Label::Distance) {
102  __ JumpIfRoot(value, index, target);
103}
104void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
105                                      Label* target, Label::Distance) {
106  __ JumpIfNotRoot(value, index, target);
107}
108void BaselineAssembler::JumpIfSmi(Register value, Label* target,
109                                  Label::Distance) {
110  __ JumpIfSmi(value, target);
111}
112void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
113                                     Label::Distance) {
114  __ JumpIfNotSmi(value, target);
115}
116void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
117                                        Label* target,
118                                        Label::Distance distance) {
119  JumpIf(cc, left, Operand(right), target, distance);
120}
121
122void BaselineAssembler::CallBuiltin(Builtin builtin) {
123  ASM_CODE_COMMENT_STRING(masm_,
124                          __ CommentForOffHeapTrampoline("call", builtin));
125  Register temp = t9;
126  __ LoadEntryFromBuiltin(builtin, temp);
127  __ Call(temp);
128}
129
130void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
131  ASM_CODE_COMMENT_STRING(masm_,
132                          __ CommentForOffHeapTrampoline("tail call", builtin));
133  Register temp = t9;
134  __ LoadEntryFromBuiltin(builtin, temp);
135  __ Jump(temp);
136}
137
138void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
139                                      Label* target, Label::Distance) {
140  ScratchRegisterScope temps(this);
141  Register scratch = temps.AcquireScratch();
142  __ And(scratch, value, Operand(mask));
143  __ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
144}
145
146void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
147                               Label* target, Label::Distance) {
148  __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
149}
150void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
151                                         InstanceType instance_type,
152                                         Register map, Label* target,
153                                         Label::Distance) {
154  ScratchRegisterScope temps(this);
155  Register type = temps.AcquireScratch();
156  __ GetObjectType(object, map, type);
157  __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
158}
159void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
160                                           InstanceType instance_type,
161                                           Label* target, Label::Distance) {
162  ScratchRegisterScope temps(this);
163  Register type = temps.AcquireScratch();
164  if (FLAG_debug_code) {
165    __ AssertNotSmi(map);
166    __ GetObjectType(map, type, type);
167    __ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
168  }
169  __ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
170  __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
171}
172void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
173                                      MemOperand operand, Label* target,
174                                      Label::Distance) {
175  ScratchRegisterScope temps(this);
176  Register scratch = temps.AcquireScratch();
177  __ Ld(scratch, operand);
178  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
179}
180void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
181                                  Label* target, Label::Distance) {
182  ScratchRegisterScope temps(this);
183  Register scratch = temps.AcquireScratch();
184  __ li(scratch, Operand(smi));
185  __ SmiUntag(scratch);
186  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
187}
188void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
189                                  Label* target, Label::Distance) {
190  __ AssertSmi(lhs);
191  __ AssertSmi(rhs);
192  __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
193}
194void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
195                                     MemOperand operand, Label* target,
196                                     Label::Distance) {
197  ScratchRegisterScope temps(this);
198  Register scratch = temps.AcquireScratch();
199  __ Ld(scratch, operand);
200  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
201}
202void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
203                                     Register value, Label* target,
204                                     Label::Distance) {
205  ScratchRegisterScope temps(this);
206  Register scratch = temps.AcquireScratch();
207  __ Ld(scratch, operand);
208  __ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
209}
210void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
211                                   Label* target, Label::Distance) {
212  __ Branch(target, AsMasmCondition(cc), value, Operand(byte));
213}
214
215void BaselineAssembler::Move(interpreter::Register output, Register source) {
216  Move(RegisterFrameOperand(output), source);
217}
218void BaselineAssembler::Move(Register output, TaggedIndex value) {
219  __ li(output, Operand(value.ptr()));
220}
221void BaselineAssembler::Move(MemOperand output, Register source) {
222  __ Sd(source, output);
223}
224void BaselineAssembler::Move(Register output, ExternalReference reference) {
225  __ li(output, Operand(reference));
226}
227void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
228  __ li(output, Operand(value));
229}
230void BaselineAssembler::Move(Register output, int32_t value) {
231  __ li(output, Operand(value));
232}
233void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
234  __ Move(output, source);
235}
236void BaselineAssembler::MoveSmi(Register output, Register source) {
237  __ Move(output, source);
238}
239
240namespace detail {
241
242template <typename Arg>
243inline Register ToRegister(BaselineAssembler* basm,
244                           BaselineAssembler::ScratchRegisterScope* scope,
245                           Arg arg) {
246  Register reg = scope->AcquireScratch();
247  basm->Move(reg, arg);
248  return reg;
249}
250inline Register ToRegister(BaselineAssembler* basm,
251                           BaselineAssembler::ScratchRegisterScope* scope,
252                           Register reg) {
253  return reg;
254}
255
256template <typename... Args>
257struct PushAllHelper;
258template <>
259struct PushAllHelper<> {
260  static int Push(BaselineAssembler* basm) { return 0; }
261  static int PushReverse(BaselineAssembler* basm) { return 0; }
262};
263// TODO(ishell): try to pack sequence of pushes into one instruction by
264// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
265// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
266template <typename Arg>
267struct PushAllHelper<Arg> {
268  static int Push(BaselineAssembler* basm, Arg arg) {
269    BaselineAssembler::ScratchRegisterScope scope(basm);
270    basm->masm()->Push(ToRegister(basm, &scope, arg));
271    return 1;
272  }
273  static int PushReverse(BaselineAssembler* basm, Arg arg) {
274    return Push(basm, arg);
275  }
276};
277// TODO(ishell): try to pack sequence of pushes into one instruction by
278// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
279// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
280template <typename Arg, typename... Args>
281struct PushAllHelper<Arg, Args...> {
282  static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
283    PushAllHelper<Arg>::Push(basm, arg);
284    return 1 + PushAllHelper<Args...>::Push(basm, args...);
285  }
286  static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
287    int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
288    PushAllHelper<Arg>::Push(basm, arg);
289    return nargs + 1;
290  }
291};
292template <>
293struct PushAllHelper<interpreter::RegisterList> {
294  static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
295    for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
296      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
297    }
298    return list.register_count();
299  }
300  static int PushReverse(BaselineAssembler* basm,
301                         interpreter::RegisterList list) {
302    for (int reg_index = list.register_count() - 1; reg_index >= 0;
303         --reg_index) {
304      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
305    }
306    return list.register_count();
307  }
308};
309
310template <typename... T>
311struct PopAllHelper;
312template <>
313struct PopAllHelper<> {
314  static void Pop(BaselineAssembler* basm) {}
315};
316// TODO(ishell): try to pack sequence of pops into one instruction by
317// looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
318// could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
319template <>
320struct PopAllHelper<Register> {
321  static void Pop(BaselineAssembler* basm, Register reg) {
322    basm->masm()->Pop(reg);
323  }
324};
325template <typename... T>
326struct PopAllHelper<Register, T...> {
327  static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
328    PopAllHelper<Register>::Pop(basm, reg);
329    PopAllHelper<T...>::Pop(basm, tail...);
330  }
331};
332
333}  // namespace detail
334
335template <typename... T>
336int BaselineAssembler::Push(T... vals) {
337  return detail::PushAllHelper<T...>::Push(this, vals...);
338}
339
340template <typename... T>
341void BaselineAssembler::PushReverse(T... vals) {
342  detail::PushAllHelper<T...>::PushReverse(this, vals...);
343}
344
345template <typename... T>
346void BaselineAssembler::Pop(T... registers) {
347  detail::PopAllHelper<T...>::Pop(this, registers...);
348}
349
350void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
351                                               int offset) {
352  __ Ld(output, FieldMemOperand(source, offset));
353}
354void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
355                                              int offset) {
356  __ Ld(output, FieldMemOperand(source, offset));
357}
358void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
359                                           int offset) {
360  __ Ld(output, FieldMemOperand(source, offset));
361}
362void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
363                                                  Register source, int offset) {
364  __ Lhu(output, FieldMemOperand(source, offset));
365}
366void BaselineAssembler::LoadWord8Field(Register output, Register source,
367                                       int offset) {
368  __ Lb(output, FieldMemOperand(source, offset));
369}
370void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
371                                               Smi value) {
372  ASM_CODE_COMMENT(masm_);
373  ScratchRegisterScope temps(this);
374  Register scratch = temps.AcquireScratch();
375  __ li(scratch, Operand(value));
376  __ Sd(scratch, FieldMemOperand(target, offset));
377}
378void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
379                                                         int offset,
380                                                         Register value) {
381  ASM_CODE_COMMENT(masm_);
382  __ Sd(value, FieldMemOperand(target, offset));
383  ScratchRegisterScope temps(this);
384  Register scratch = temps.AcquireScratch();
385  __ RecordWriteField(target, offset, value, scratch, kRAHasNotBeenSaved,
386                      SaveFPRegsMode::kIgnore);
387}
388void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
389                                                       int offset,
390                                                       Register value) {
391  __ Sd(value, FieldMemOperand(target, offset));
392}
393
394void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
395    int32_t weight, Label* skip_interrupt_label) {
396  ASM_CODE_COMMENT(masm_);
397  ScratchRegisterScope scratch_scope(this);
398  Register feedback_cell = scratch_scope.AcquireScratch();
399  LoadFunction(feedback_cell);
400  LoadTaggedPointerField(feedback_cell, feedback_cell,
401                         JSFunction::kFeedbackCellOffset);
402
403  Register interrupt_budget = scratch_scope.AcquireScratch();
404  __ Lw(interrupt_budget,
405        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
406  __ Addu(interrupt_budget, interrupt_budget, weight);
407  __ Sw(interrupt_budget,
408        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
409  if (skip_interrupt_label) {
410    DCHECK_LT(weight, 0);
411    __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
412  }
413}
414void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
415    Register weight, Label* skip_interrupt_label) {
416  ASM_CODE_COMMENT(masm_);
417  ScratchRegisterScope scratch_scope(this);
418  Register feedback_cell = scratch_scope.AcquireScratch();
419  LoadFunction(feedback_cell);
420  LoadTaggedPointerField(feedback_cell, feedback_cell,
421                         JSFunction::kFeedbackCellOffset);
422
423  Register interrupt_budget = scratch_scope.AcquireScratch();
424  __ Lw(interrupt_budget,
425        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
426  __ Addu(interrupt_budget, interrupt_budget, weight);
427  __ Sw(interrupt_budget,
428        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
429  if (skip_interrupt_label)
430    __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
431}
432
433void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
434  __ Daddu(lhs, lhs, Operand(rhs));
435}
436
437void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
438  __ And(output, lhs, Operand(rhs));
439}
440
441void BaselineAssembler::Switch(Register reg, int case_value_base,
442                               Label** labels, int num_labels) {
443  ASM_CODE_COMMENT(masm_);
444  Label fallthrough;
445  if (case_value_base != 0) {
446    __ Dsubu(reg, reg, Operand(case_value_base));
447  }
448
449  __ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
450            reg, Operand(num_labels));
451
452  __ GenerateSwitchTable(reg, num_labels,
453                         [labels](size_t i) { return labels[i]; });
454
455  __ bind(&fallthrough);
456}
457
458#undef __
459
460#define __ basm.
461
462void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
463  ASM_CODE_COMMENT(masm);
464  BaselineAssembler basm(masm);
465
466  Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
467  Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
468
469  {
470    ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
471
472    Label skip_interrupt_label;
473    __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
474    __ masm()->SmiTag(params_size);
475    __ masm()->Push(params_size, kInterpreterAccumulatorRegister);
476
477    __ LoadContext(kContextRegister);
478    __ LoadFunction(kJSFunctionRegister);
479    __ masm()->Push(kJSFunctionRegister);
480    __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
481
482    __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
483    __ masm()->SmiUntag(params_size);
484
485  __ Bind(&skip_interrupt_label);
486  }
487
488  BaselineAssembler::ScratchRegisterScope temps(&basm);
489  Register actual_params_size = temps.AcquireScratch();
490  // Compute the size of the actual parameters + receiver (in bytes).
491  __ Move(actual_params_size,
492          MemOperand(fp, StandardFrameConstants::kArgCOffset));
493
494  // If actual is bigger than formal, then we should use it to free up the stack
495  // arguments.
496  Label corrected_args_count;
497  __ masm()->Branch(&corrected_args_count, ge, params_size,
498                    Operand(actual_params_size));
499  __ masm()->Move(params_size, actual_params_size);
500  __ Bind(&corrected_args_count);
501
502  // Leave the frame (also dropping the register file).
503  __ masm()->LeaveFrame(StackFrame::BASELINE);
504
505  // Drop receiver + arguments.
506  __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
507                           TurboAssembler::kCountIncludesReceiver);
508
509  __ masm()->Ret();
510}
511
512#undef __
513
514inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
515    Register reg) {
516  assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
517                             Operand(kInterpreterAccumulatorRegister));
518}
519
520}  // namespace baseline
521}  // namespace internal
522}  // namespace v8
523
524#endif  // V8_BASELINE_MIPS64_BASELINE_ASSEMBLER_MIPS64_INL_H_
525