1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
6#define V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
7
8#include "src/baseline/baseline-assembler.h"
9#include "src/codegen/arm/assembler-arm-inl.h"
10#include "src/codegen/interface-descriptors.h"
11
12namespace v8 {
13namespace internal {
14namespace baseline {
15
16class BaselineAssembler::ScratchRegisterScope {
17 public:
18  explicit ScratchRegisterScope(BaselineAssembler* assembler)
19      : assembler_(assembler),
20        prev_scope_(assembler->scratch_register_scope_),
21        wrapped_scope_(assembler->masm()) {
22    if (!assembler_->scratch_register_scope_) {
23      // If we haven't opened a scratch scope yet, for the first one add a
24      // couple of extra registers.
25      DCHECK(wrapped_scope_.CanAcquire());
26      wrapped_scope_.Include(r8, r9);
27      wrapped_scope_.Include(kInterpreterBytecodeOffsetRegister);
28    }
29    assembler_->scratch_register_scope_ = this;
30  }
31  ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
32
33  Register AcquireScratch() { return wrapped_scope_.Acquire(); }
34
35 private:
36  BaselineAssembler* assembler_;
37  ScratchRegisterScope* prev_scope_;
38  UseScratchRegisterScope wrapped_scope_;
39};
40
41// TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
42enum class Condition : uint32_t {
43  kEqual = static_cast<uint32_t>(eq),
44  kNotEqual = static_cast<uint32_t>(ne),
45
46  kLessThan = static_cast<uint32_t>(lt),
47  kGreaterThan = static_cast<uint32_t>(gt),
48  kLessThanEqual = static_cast<uint32_t>(le),
49  kGreaterThanEqual = static_cast<uint32_t>(ge),
50
51  kUnsignedLessThan = static_cast<uint32_t>(lo),
52  kUnsignedGreaterThan = static_cast<uint32_t>(hi),
53  kUnsignedLessThanEqual = static_cast<uint32_t>(ls),
54  kUnsignedGreaterThanEqual = static_cast<uint32_t>(hs),
55
56  kOverflow = static_cast<uint32_t>(vs),
57  kNoOverflow = static_cast<uint32_t>(vc),
58
59  kZero = static_cast<uint32_t>(eq),
60  kNotZero = static_cast<uint32_t>(ne),
61};
62
63inline internal::Condition AsMasmCondition(Condition cond) {
64  // This is important for arm, where the internal::Condition where each value
65  // represents an encoded bit field value.
66  STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
67  return static_cast<internal::Condition>(cond);
68}
69
70namespace detail {
71
72#ifdef DEBUG
73inline bool Clobbers(Register target, MemOperand op) {
74  return op.rn() == target || op.rm() == target;
75}
76#endif
77
78}  // namespace detail
79
80#define __ masm_->
81
82MemOperand BaselineAssembler::RegisterFrameOperand(
83    interpreter::Register interpreter_register) {
84  return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
85}
86void BaselineAssembler::RegisterFrameAddress(
87    interpreter::Register interpreter_register, Register rscratch) {
88  return __ add(rscratch, fp,
89                Operand(interpreter_register.ToOperand() * kSystemPointerSize));
90}
91MemOperand BaselineAssembler::FeedbackVectorOperand() {
92  return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
93}
94
95void BaselineAssembler::Bind(Label* label) { __ bind(label); }
96void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
97
98void BaselineAssembler::JumpTarget() {
99  // NOP on arm.
100}
101
102void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
103  __ b(target);
104}
105
106void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
107                                   Label* target, Label::Distance) {
108  __ JumpIfRoot(value, index, target);
109}
110
111void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
112                                      Label* target, Label::Distance) {
113  __ JumpIfNotRoot(value, index, target);
114}
115
116void BaselineAssembler::JumpIfSmi(Register value, Label* target,
117                                  Label::Distance) {
118  __ JumpIfSmi(value, target);
119}
120
121void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
122                                        Label* target,
123                                        Label::Distance distance) {
124  JumpIf(cc, left, Operand(right), target, distance);
125}
126
127void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
128                                     Label::Distance) {
129  __ JumpIfNotSmi(value, target);
130}
131
132void BaselineAssembler::CallBuiltin(Builtin builtin) {
133  //  __ CallBuiltin(static_cast<int>(builtin));
134  ASM_CODE_COMMENT_STRING(masm_,
135                          __ CommentForOffHeapTrampoline("call", builtin));
136  ScratchRegisterScope temps(this);
137  Register temp = temps.AcquireScratch();
138  __ LoadEntryFromBuiltin(builtin, temp);
139  __ Call(temp);
140}
141
142void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
143  ASM_CODE_COMMENT_STRING(masm_,
144                          __ CommentForOffHeapTrampoline("tail call", builtin));
145  ScratchRegisterScope temps(this);
146  Register temp = temps.AcquireScratch();
147  __ LoadEntryFromBuiltin(builtin, temp);
148  __ Jump(temp);
149}
150
151void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
152                                      Label* target, Label::Distance) {
153  __ tst(value, Operand(mask));
154  __ b(AsMasmCondition(cc), target);
155}
156
157void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
158                               Label* target, Label::Distance) {
159  __ cmp(lhs, Operand(rhs));
160  __ b(AsMasmCondition(cc), target);
161}
162void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
163                                         InstanceType instance_type,
164                                         Register map, Label* target,
165                                         Label::Distance) {
166  ScratchRegisterScope temps(this);
167  Register type = temps.AcquireScratch();
168  __ LoadMap(map, object);
169  __ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
170  JumpIf(cc, type, Operand(instance_type), target);
171}
172void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
173                                           InstanceType instance_type,
174                                           Label* target, Label::Distance) {
175  ScratchRegisterScope temps(this);
176  Register type = temps.AcquireScratch();
177  if (FLAG_debug_code) {
178    __ AssertNotSmi(map);
179    __ CompareObjectType(map, type, type, MAP_TYPE);
180    __ Assert(eq, AbortReason::kUnexpectedValue);
181  }
182  __ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
183  JumpIf(cc, type, Operand(instance_type), target);
184}
185void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
186                                      MemOperand operand, Label* target,
187                                      Label::Distance) {
188  ScratchRegisterScope temps(this);
189  Register tmp = temps.AcquireScratch();
190  __ ldr(tmp, operand);
191  JumpIf(cc, value, Operand(tmp), target);
192}
193void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
194                                  Label* target, Label::Distance) {
195  __ AssertSmi(value);
196  JumpIf(cc, value, Operand(smi), target);
197}
198void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
199                                  Label* target, Label::Distance) {
200  __ AssertSmi(lhs);
201  __ AssertSmi(rhs);
202  JumpIf(cc, lhs, Operand(rhs), target);
203}
204void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
205                                     MemOperand operand, Label* target,
206                                     Label::Distance) {
207  ScratchRegisterScope temps(this);
208  Register tmp = temps.AcquireScratch();
209  __ ldr(tmp, operand);
210  JumpIf(cc, value, Operand(tmp), target);
211}
212void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
213                                     Register value, Label* target,
214                                     Label::Distance) {
215  ScratchRegisterScope temps(this);
216  Register tmp = temps.AcquireScratch();
217  __ ldr(tmp, operand);
218  JumpIf(cc, tmp, Operand(value), target);
219}
220void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
221                                   Label* target, Label::Distance) {
222  JumpIf(cc, value, Operand(byte), target);
223}
224
225void BaselineAssembler::Move(interpreter::Register output, Register source) {
226  Move(RegisterFrameOperand(output), source);
227}
228void BaselineAssembler::Move(Register output, TaggedIndex value) {
229  __ mov(output, Operand(value.ptr()));
230}
231void BaselineAssembler::Move(MemOperand output, Register source) {
232  __ str(source, output);
233}
234void BaselineAssembler::Move(Register output, ExternalReference reference) {
235  __ Move32BitImmediate(output, Operand(reference));
236}
237void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
238  __ Move32BitImmediate(output, Operand(value));
239}
240void BaselineAssembler::Move(Register output, int32_t value) {
241  __ mov(output, Operand(value));
242}
243void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
244  __ mov(output, source);
245}
246void BaselineAssembler::MoveSmi(Register output, Register source) {
247  __ mov(output, source);
248}
249
250namespace detail {
251
252template <typename Arg>
253inline Register ToRegister(BaselineAssembler* basm,
254                           BaselineAssembler::ScratchRegisterScope* scope,
255                           Arg arg) {
256  Register reg = scope->AcquireScratch();
257  basm->Move(reg, arg);
258  return reg;
259}
260inline Register ToRegister(BaselineAssembler* basm,
261                           BaselineAssembler::ScratchRegisterScope* scope,
262                           Register reg) {
263  return reg;
264}
265
266template <typename... Args>
267struct PushAllHelper;
268template <>
269struct PushAllHelper<> {
270  static int Push(BaselineAssembler* basm) { return 0; }
271  static int PushReverse(BaselineAssembler* basm) { return 0; }
272};
273// TODO(ishell): try to pack sequence of pushes into one instruction by
274// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
275// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
276template <typename Arg>
277struct PushAllHelper<Arg> {
278  static int Push(BaselineAssembler* basm, Arg arg) {
279    BaselineAssembler::ScratchRegisterScope scope(basm);
280    basm->masm()->Push(ToRegister(basm, &scope, arg));
281    return 1;
282  }
283  static int PushReverse(BaselineAssembler* basm, Arg arg) {
284    return Push(basm, arg);
285  }
286};
287// TODO(ishell): try to pack sequence of pushes into one instruction by
288// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
289// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
290template <typename Arg, typename... Args>
291struct PushAllHelper<Arg, Args...> {
292  static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
293    PushAllHelper<Arg>::Push(basm, arg);
294    return 1 + PushAllHelper<Args...>::Push(basm, args...);
295  }
296  static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
297    int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
298    PushAllHelper<Arg>::Push(basm, arg);
299    return nargs + 1;
300  }
301};
302template <>
303struct PushAllHelper<interpreter::RegisterList> {
304  static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
305    for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
306      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
307    }
308    return list.register_count();
309  }
310  static int PushReverse(BaselineAssembler* basm,
311                         interpreter::RegisterList list) {
312    for (int reg_index = list.register_count() - 1; reg_index >= 0;
313         --reg_index) {
314      PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
315    }
316    return list.register_count();
317  }
318};
319
320template <typename... T>
321struct PopAllHelper;
322template <>
323struct PopAllHelper<> {
324  static void Pop(BaselineAssembler* basm) {}
325};
326// TODO(ishell): try to pack sequence of pops into one instruction by
327// looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
328// could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
329template <>
330struct PopAllHelper<Register> {
331  static void Pop(BaselineAssembler* basm, Register reg) {
332    basm->masm()->Pop(reg);
333  }
334};
335template <typename... T>
336struct PopAllHelper<Register, T...> {
337  static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
338    PopAllHelper<Register>::Pop(basm, reg);
339    PopAllHelper<T...>::Pop(basm, tail...);
340  }
341};
342
343}  // namespace detail
344
345template <typename... T>
346int BaselineAssembler::Push(T... vals) {
347  return detail::PushAllHelper<T...>::Push(this, vals...);
348}
349
350template <typename... T>
351void BaselineAssembler::PushReverse(T... vals) {
352  detail::PushAllHelper<T...>::PushReverse(this, vals...);
353}
354
355template <typename... T>
356void BaselineAssembler::Pop(T... registers) {
357  detail::PopAllHelper<T...>::Pop(this, registers...);
358}
359
360void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
361                                               int offset) {
362  __ ldr(output, FieldMemOperand(source, offset));
363}
364
365void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
366                                              int offset) {
367  __ ldr(output, FieldMemOperand(source, offset));
368}
369
370void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
371                                           int offset) {
372  __ ldr(output, FieldMemOperand(source, offset));
373}
374
375void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
376                                                  Register source, int offset) {
377  __ ldrh(output, FieldMemOperand(source, offset));
378}
379
380void BaselineAssembler::LoadWord8Field(Register output, Register source,
381                                       int offset) {
382  __ ldrb(output, FieldMemOperand(source, offset));
383}
384
385void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
386                                               Smi value) {
387  ASM_CODE_COMMENT(masm_);
388  ScratchRegisterScope temps(this);
389  Register tmp = temps.AcquireScratch();
390  __ mov(tmp, Operand(value));
391  __ str(tmp, FieldMemOperand(target, offset));
392}
393
394void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
395                                                         int offset,
396                                                         Register value) {
397  ASM_CODE_COMMENT(masm_);
398  DCHECK(!AreAliased(target, value));
399  __ str(value, FieldMemOperand(target, offset));
400  __ RecordWriteField(target, offset, value, kLRHasNotBeenSaved,
401                      SaveFPRegsMode::kIgnore);
402}
403
404void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
405                                                       int offset,
406                                                       Register value) {
407  __ str(value, FieldMemOperand(target, offset));
408}
409
410void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
411    int32_t weight, Label* skip_interrupt_label) {
412  ASM_CODE_COMMENT(masm_);
413  ScratchRegisterScope scratch_scope(this);
414  Register feedback_cell = scratch_scope.AcquireScratch();
415  LoadFunction(feedback_cell);
416  LoadTaggedPointerField(feedback_cell, feedback_cell,
417                         JSFunction::kFeedbackCellOffset);
418
419  Register interrupt_budget = scratch_scope.AcquireScratch();
420  __ ldr(interrupt_budget,
421         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
422  // Remember to set flags as part of the add!
423  __ add(interrupt_budget, interrupt_budget, Operand(weight), SetCC);
424  __ str(interrupt_budget,
425         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
426  if (skip_interrupt_label) {
427    // Use compare flags set by add
428    DCHECK_LT(weight, 0);
429    __ b(ge, skip_interrupt_label);
430  }
431}
432
433void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
434    Register weight, Label* skip_interrupt_label) {
435  ASM_CODE_COMMENT(masm_);
436  ScratchRegisterScope scratch_scope(this);
437  Register feedback_cell = scratch_scope.AcquireScratch();
438  LoadFunction(feedback_cell);
439  LoadTaggedPointerField(feedback_cell, feedback_cell,
440                         JSFunction::kFeedbackCellOffset);
441
442  Register interrupt_budget = scratch_scope.AcquireScratch();
443  __ ldr(interrupt_budget,
444         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
445  // Remember to set flags as part of the add!
446  __ add(interrupt_budget, interrupt_budget, weight, SetCC);
447  __ str(interrupt_budget,
448         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
449  if (skip_interrupt_label) __ b(ge, skip_interrupt_label);
450}
451
452void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
453  __ add(lhs, lhs, Operand(rhs));
454}
455
456void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
457  __ and_(output, lhs, Operand(rhs));
458}
459
460void BaselineAssembler::Switch(Register reg, int case_value_base,
461                               Label** labels, int num_labels) {
462  ASM_CODE_COMMENT(masm_);
463  Label fallthrough;
464  if (case_value_base != 0) {
465    __ sub(reg, reg, Operand(case_value_base));
466  }
467
468  // Mostly copied from code-generator-arm.cc
469  ScratchRegisterScope scope(this);
470  JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
471         &fallthrough);
472  // Ensure to emit the constant pool first if necessary.
473  __ CheckConstPool(true, true);
474  __ BlockConstPoolFor(num_labels);
475  int entry_size_log2 = 2;
476  __ add(pc, pc, Operand(reg, LSL, entry_size_log2), LeaveCC, lo);
477  __ b(&fallthrough);
478  for (int i = 0; i < num_labels; ++i) {
479    __ b(labels[i]);
480  }
481  __ bind(&fallthrough);
482}
483
484#undef __
485
486#define __ basm.
487
488void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
489  ASM_CODE_COMMENT(masm);
490  BaselineAssembler basm(masm);
491
492  Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
493  Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
494
495  {
496    ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
497
498    Label skip_interrupt_label;
499    __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
500    {
501      __ masm()->SmiTag(params_size);
502      __ Push(params_size, kInterpreterAccumulatorRegister);
503
504      __ LoadContext(kContextRegister);
505      __ LoadFunction(kJSFunctionRegister);
506      __ Push(kJSFunctionRegister);
507      __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
508
509      __ Pop(kInterpreterAccumulatorRegister, params_size);
510      __ masm()->SmiUntag(params_size);
511    }
512
513    __ Bind(&skip_interrupt_label);
514  }
515
516  BaselineAssembler::ScratchRegisterScope temps(&basm);
517  Register actual_params_size = temps.AcquireScratch();
518  // Compute the size of the actual parameters + receiver (in bytes).
519  __ Move(actual_params_size,
520          MemOperand(fp, StandardFrameConstants::kArgCOffset));
521
522  // If actual is bigger than formal, then we should use it to free up the stack
523  // arguments.
524  Label corrected_args_count;
525  __ JumpIf(Condition::kGreaterThanEqual, params_size,
526            Operand(actual_params_size), &corrected_args_count);
527  __ masm()->mov(params_size, actual_params_size);
528  __ Bind(&corrected_args_count);
529
530  // Leave the frame (also dropping the register file).
531  __ masm()->LeaveFrame(StackFrame::BASELINE);
532
533  // Drop receiver + arguments.
534  __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
535                           TurboAssembler::kCountIncludesReceiver);
536  __ masm()->Ret();
537}
538
539#undef __
540
541inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
542    Register reg) {
543  assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
544  assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
545}
546
547}  // namespace baseline
548}  // namespace internal
549}  // namespace v8
550
551#endif  // V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
552