1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
6#define V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
7
8#include "src/baseline/baseline-assembler.h"
9#include "src/codegen/interface-descriptors.h"
10#include "src/codegen/mips/assembler-mips-inl.h"
11
12namespace v8 {
13namespace internal {
14namespace baseline {
15
16class BaselineAssembler::ScratchRegisterScope {
17 public:
18  explicit ScratchRegisterScope(BaselineAssembler* assembler)
19      : assembler_(assembler),
20        prev_scope_(assembler->scratch_register_scope_),
21        wrapped_scope_(assembler->masm()) {
22    if (!assembler_->scratch_register_scope_) {
23      // If we haven't opened a scratch scope yet, for the first one add a
24      // couple of extra registers.
25      wrapped_scope_.Include({t4, t5, t6, t7});
26    }
27    assembler_->scratch_register_scope_ = this;
28  }
29  ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
30
31  Register AcquireScratch() { return wrapped_scope_.Acquire(); }
32
33 private:
34  BaselineAssembler* assembler_;
35  ScratchRegisterScope* prev_scope_;
36  UseScratchRegisterScope wrapped_scope_;
37};
38
39enum class Condition : uint32_t {
40  kEqual = eq,
41  kNotEqual = ne,
42
43  kLessThan = lt,
44  kGreaterThan = gt,
45  kLessThanEqual = le,
46  kGreaterThanEqual = ge,
47
48  kUnsignedLessThan = Uless,
49  kUnsignedGreaterThan = Ugreater,
50  kUnsignedLessThanEqual = Uless_equal,
51  kUnsignedGreaterThanEqual = Ugreater_equal,
52
53  kOverflow = overflow,
54  kNoOverflow = no_overflow,
55
56  kZero = eq,
57  kNotZero = ne,
58};
59
60inline internal::Condition AsMasmCondition(Condition cond) {
61  // This is important for arm, where the internal::Condition where each value
62  // represents an encoded bit field value.
63  STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
64  return static_cast<internal::Condition>(cond);
65}
66
67namespace detail {
68
69#ifdef DEBUG
70inline bool Clobbers(Register target, MemOperand op) {
71  return op.is_reg() && op.rm() == target;
72}
73#endif
74
75}  // namespace detail
76
77#define __ masm_->
78
79MemOperand BaselineAssembler::RegisterFrameOperand(
80    interpreter::Register interpreter_register) {
81  return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
82}
83void BaselineAssembler::RegisterFrameAddress(
84    interpreter::Register interpreter_register, Register rscratch) {
85  return __ Addu(rscratch, fp,
86                 interpreter_register.ToOperand() * kSystemPointerSize);
87}
88MemOperand BaselineAssembler::FeedbackVectorOperand() {
89  return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
90}
91
92void BaselineAssembler::Bind(Label* label) { __ bind(label); }
93
94void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
95
96void BaselineAssembler::JumpTarget() {
97  // NOP.
98}
99void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
100  __ Branch(target);
101}
102void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
103                                   Label* target, Label::Distance) {
104  __ JumpIfRoot(value, index, target);
105}
106void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
107                                      Label* target, Label::Distance) {
108  __ JumpIfNotRoot(value, index, target);
109}
110void BaselineAssembler::JumpIfSmi(Register value, Label* target,
111                                  Label::Distance) {
112  __ JumpIfSmi(value, target);
113}
114void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
115                                     Label::Distance) {
116  __ JumpIfNotSmi(value, target);
117}
118void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
119                                        Label* target,
120                                        Label::Distance distance) {
121  JumpIf(cc, left, Operand(right), target, distance);
122}
123
124void BaselineAssembler::CallBuiltin(Builtin builtin) {
125  ASM_CODE_COMMENT_STRING(masm_,
126                          __ CommentForOffHeapTrampoline("call", builtin));
127  Register temp = t9;
128  __ LoadEntryFromBuiltin(builtin, temp);
129  __ Call(temp);
130}
131
132void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
133  ASM_CODE_COMMENT_STRING(masm_,
134                          __ CommentForOffHeapTrampoline("tail call", builtin));
135  Register temp = t9;
136  __ LoadEntryFromBuiltin(builtin, temp);
137  __ Jump(temp);
138}
139
140void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
141                                      Label* target, Label::Distance) {
142  ScratchRegisterScope temps(this);
143  Register scratch = temps.AcquireScratch();
144  __ And(scratch, value, Operand(mask));
145  __ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
146}
147
148void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
149                               Label* target, Label::Distance) {
150  __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
151}
152void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
153                                         InstanceType instance_type,
154                                         Register map, Label* target,
155                                         Label::Distance) {
156  ScratchRegisterScope temps(this);
157  Register type = temps.AcquireScratch();
158  __ GetObjectType(object, map, type);
159  __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
160}
161void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
162                                           InstanceType instance_type,
163                                           Label* target, Label::Distance) {
164  ScratchRegisterScope temps(this);
165  Register type = temps.AcquireScratch();
166  if (FLAG_debug_code) {
167    __ AssertNotSmi(map);
168    __ GetObjectType(map, type, type);
169    __ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
170  }
171  __ Lw(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
172  __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
173}
174void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
175                                      MemOperand operand, Label* target,
176                                      Label::Distance) {
177  ScratchRegisterScope temps(this);
178  Register scratch = temps.AcquireScratch();
179  __ Lw(scratch, operand);
180  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
181}
182void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
183                                  Label* target, Label::Distance) {
184  ScratchRegisterScope temps(this);
185  Register scratch = temps.AcquireScratch();
186  __ li(scratch, Operand(smi));
187  __ SmiUntag(scratch);
188  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
189}
190void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
191                                  Label* target, Label::Distance) {
192  __ AssertSmi(lhs);
193  __ AssertSmi(rhs);
194  __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
195}
196void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
197                                     MemOperand operand, Label* target,
198                                     Label::Distance) {
199  ScratchRegisterScope temps(this);
200  Register scratch = temps.AcquireScratch();
201  __ Lw(scratch, operand);
202  __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
203}
204void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
205                                     Register value, Label* target,
206                                     Label::Distance) {
207  ScratchRegisterScope temps(this);
208  Register scratch = temps.AcquireScratch();
209  __ Lw(scratch, operand);
210  __ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
211}
212void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
213                                   Label* target, Label::Distance) {
214  __ Branch(target, AsMasmCondition(cc), value, Operand(byte));
215}
216
217void BaselineAssembler::Move(interpreter::Register output, Register source) {
218  Move(RegisterFrameOperand(output), source);
219}
220void BaselineAssembler::Move(Register output, TaggedIndex value) {
221  __ li(output, Operand(value.ptr()));
222}
223void BaselineAssembler::Move(MemOperand output, Register source) {
224  __ Sw(source, output);
225}
226void BaselineAssembler::Move(Register output, ExternalReference reference) {
227  __ li(output, Operand(reference));
228}
229void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
230  __ li(output, Operand(value));
231}
232void BaselineAssembler::Move(Register output, int32_t value) {
233  __ li(output, Operand(value));
234}
235void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
236  __ Move(output, source);
237}
238void BaselineAssembler::MoveSmi(Register output, Register source) {
239  __ Move(output, source);
240}
241
242namespace detail {
243
244template <typename Arg>
245inline Register ToRegister(BaselineAssembler* basm,
246                           BaselineAssembler::ScratchRegisterScope* scope,
247                           Arg arg) {
248  Register reg = scope->AcquireScratch();
249  basm->Move(reg, arg);
250  return reg;
251}
252inline Register ToRegister(BaselineAssembler* basm,
253                           BaselineAssembler::ScratchRegisterScope* scope,
254                           Register reg) {
255  return reg;
256}
257
258template <typename... Args>
259struct PushAllHelper;
260template <>
261struct PushAllHelper<> {
262  static int Push(BaselineAssembler* basm) { return 0; }
263  static int PushReverse(BaselineAssembler* basm) { return 0; }
264};
265// TODO(ishell): try to pack sequence of pushes into one instruction by
266// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
267// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
268template <typename Arg>
269struct PushAllHelper<Arg> {
270  static int Push(BaselineAssembler* basm, Arg arg) {
271    BaselineAssembler::ScratchRegisterScope scope(basm);
272    basm->masm()->Push(ToRegister(basm, &scope, arg));
273    return 1;
274  }
275  static int PushReverse(BaselineAssembler* basm, Arg arg) {
276    return Push(basm, arg);
277  }
278};
279// TODO(ishell): try to pack sequence of pushes into one instruction by
280// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
281// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
282template <typename Arg, typename... Args>
283struct PushAllHelper<Arg, Args...> {
284  static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
285    PushAllHelper<Arg>::Push(basm, arg);
286    return 1 + PushAllHelper<Args...>::Push(basm, args...);
287  }
288  static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
289    int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
290    PushAllHelper<Arg>::Push(basm, arg);
291    return nargs + 1;
292  }
293};
294template <>
295struct PushAllHelper<interpreter::RegisterList> {
296  static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
297    for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
298      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
299    }
300    return list.register_count();
301  }
302  static int PushReverse(BaselineAssembler* basm,
303                         interpreter::RegisterList list) {
304    for (int reg_index = list.register_count() - 1; reg_index >= 0;
305         --reg_index) {
306      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
307    }
308    return list.register_count();
309  }
310};
311
312template <typename... T>
313struct PopAllHelper;
314template <>
315struct PopAllHelper<> {
316  static void Pop(BaselineAssembler* basm) {}
317};
318// TODO(ishell): try to pack sequence of pops into one instruction by
319// looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
320// could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
321template <>
322struct PopAllHelper<Register> {
323  static void Pop(BaselineAssembler* basm, Register reg) {
324    basm->masm()->Pop(reg);
325  }
326};
327template <typename... T>
328struct PopAllHelper<Register, T...> {
329  static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
330    PopAllHelper<Register>::Pop(basm, reg);
331    PopAllHelper<T...>::Pop(basm, tail...);
332  }
333};
334
335}  // namespace detail
336
337template <typename... T>
338int BaselineAssembler::Push(T... vals) {
339  return detail::PushAllHelper<T...>::Push(this, vals...);
340}
341
342template <typename... T>
343void BaselineAssembler::PushReverse(T... vals) {
344  detail::PushAllHelper<T...>::PushReverse(this, vals...);
345}
346
347template <typename... T>
348void BaselineAssembler::Pop(T... registers) {
349  detail::PopAllHelper<T...>::Pop(this, registers...);
350}
351
352void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
353                                               int offset) {
354  __ Lw(output, FieldMemOperand(source, offset));
355}
356void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
357                                              int offset) {
358  __ Lw(output, FieldMemOperand(source, offset));
359}
360void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
361                                           int offset) {
362  __ Lw(output, FieldMemOperand(source, offset));
363}
364void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
365                                                  Register source, int offset) {
366  __ lhu(output, FieldMemOperand(source, offset));
367}
368void BaselineAssembler::LoadWord8Field(Register output, Register source,
369                                       int offset) {
370  __ lb(output, FieldMemOperand(source, offset));
371}
372void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
373                                               Smi value) {
374  ASM_CODE_COMMENT(masm_);
375  ScratchRegisterScope temps(this);
376  Register scratch = temps.AcquireScratch();
377  __ li(scratch, Operand(value));
378  __ Sw(scratch, FieldMemOperand(target, offset));
379}
380void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
381                                                         int offset,
382                                                         Register value) {
383  ASM_CODE_COMMENT(masm_);
384  __ Sw(value, FieldMemOperand(target, offset));
385  ScratchRegisterScope temps(this);
386  Register scratch = temps.AcquireScratch();
387  __ RecordWriteField(target, offset, value, scratch, kRAHasNotBeenSaved,
388                      SaveFPRegsMode::kIgnore);
389}
390void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
391                                                       int offset,
392                                                       Register value) {
393  __ Sw(value, FieldMemOperand(target, offset));
394}
395
396void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
397    int32_t weight, Label* skip_interrupt_label) {
398  ASM_CODE_COMMENT(masm_);
399  ScratchRegisterScope scratch_scope(this);
400  Register feedback_cell = scratch_scope.AcquireScratch();
401  LoadFunction(feedback_cell);
402  LoadTaggedPointerField(feedback_cell, feedback_cell,
403                         JSFunction::kFeedbackCellOffset);
404
405  Register interrupt_budget = scratch_scope.AcquireScratch();
406  __ Lw(interrupt_budget,
407        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
408  __ Addu(interrupt_budget, interrupt_budget, weight);
409  __ Sw(interrupt_budget,
410        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
411  if (skip_interrupt_label) {
412    DCHECK_LT(weight, 0);
413    __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
414  }
415}
416void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
417    Register weight, Label* skip_interrupt_label) {
418  ASM_CODE_COMMENT(masm_);
419  ScratchRegisterScope scratch_scope(this);
420  Register feedback_cell = scratch_scope.AcquireScratch();
421  LoadFunction(feedback_cell);
422  LoadTaggedPointerField(feedback_cell, feedback_cell,
423                         JSFunction::kFeedbackCellOffset);
424
425  Register interrupt_budget = scratch_scope.AcquireScratch();
426  __ Lw(interrupt_budget,
427        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
428  __ Addu(interrupt_budget, interrupt_budget, weight);
429  __ Sw(interrupt_budget,
430        FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
431  if (skip_interrupt_label)
432    __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
433}
434
435void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
436  __ Addu(lhs, lhs, Operand(rhs));
437}
438
439void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
440  __ And(output, lhs, Operand(rhs));
441}
442
443void BaselineAssembler::Switch(Register reg, int case_value_base,
444                               Label** labels, int num_labels) {
445  ASM_CODE_COMMENT(masm_);
446  Label fallthrough;
447  if (case_value_base != 0) {
448    __ Subu(reg, reg, Operand(case_value_base));
449  }
450
451  __ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
452            reg, Operand(num_labels));
453
454  __ GenerateSwitchTable(reg, num_labels,
455                         [labels](size_t i) { return labels[i]; });
456
457  __ bind(&fallthrough);
458}
459
460#undef __
461
462#define __ basm.
463
464void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
465  ASM_CODE_COMMENT(masm);
466  BaselineAssembler basm(masm);
467
468  Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
469  Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
470
471  {
472    ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
473
474    Label skip_interrupt_label;
475    __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
476    __ masm()->SmiTag(params_size);
477    __ masm()->Push(params_size, kInterpreterAccumulatorRegister);
478
479    __ LoadContext(kContextRegister);
480    __ LoadFunction(kJSFunctionRegister);
481    __ masm()->Push(kJSFunctionRegister);
482    __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
483
484    __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
485    __ masm()->SmiUntag(params_size);
486
487  __ Bind(&skip_interrupt_label);
488  }
489
490  BaselineAssembler::ScratchRegisterScope temps(&basm);
491  Register actual_params_size = temps.AcquireScratch();
492  // Compute the size of the actual parameters + receiver (in bytes).
493  __ Move(actual_params_size,
494          MemOperand(fp, StandardFrameConstants::kArgCOffset));
495
496  // If actual is bigger than formal, then we should use it to free up the stack
497  // arguments.
498  Label corrected_args_count;
499  __ masm()->Branch(&corrected_args_count, ge, params_size,
500                    Operand(actual_params_size));
501  __ masm()->Move(params_size, actual_params_size);
502  __ Bind(&corrected_args_count);
503
504  // Leave the frame (also dropping the register file).
505  __ masm()->LeaveFrame(StackFrame::BASELINE);
506
507  // Drop receiver + arguments.
508  __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
509                           TurboAssembler::kCountIncludesReceiver);
510
511  __ masm()->Ret();
512}
513
514#undef __
515
516inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
517    Register reg) {
518  assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
519                             Operand(kInterpreterAccumulatorRegister));
520}
521
522}  // namespace baseline
523}  // namespace internal
524}  // namespace v8
525
526#endif  // V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
527