1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
6 #define V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
7 
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/arm/assembler-arm-inl.h"
10 #include "src/codegen/interface-descriptors.h"
11 
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15 
16 class BaselineAssembler::ScratchRegisterScope {
17  public:
ScratchRegisterScope(BaselineAssembler* assembler)18   explicit ScratchRegisterScope(BaselineAssembler* assembler)
19       : assembler_(assembler),
20         prev_scope_(assembler->scratch_register_scope_),
21         wrapped_scope_(assembler->masm()) {
22     if (!assembler_->scratch_register_scope_) {
23       // If we haven't opened a scratch scope yet, for the first one add a
24       // couple of extra registers.
25       DCHECK(wrapped_scope_.CanAcquire());
26       wrapped_scope_.Include(r8, r9);
27       wrapped_scope_.Include(kInterpreterBytecodeOffsetRegister);
28     }
29     assembler_->scratch_register_scope_ = this;
30   }
~ScratchRegisterScope()31   ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
32 
AcquireScratch()33   Register AcquireScratch() { return wrapped_scope_.Acquire(); }
34 
35  private:
36   BaselineAssembler* assembler_;
37   ScratchRegisterScope* prev_scope_;
38   UseScratchRegisterScope wrapped_scope_;
39 };
40 
41 // TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
42 enum class Condition : uint32_t {
43   kEqual = static_cast<uint32_t>(eq),
44   kNotEqual = static_cast<uint32_t>(ne),
45 
46   kLessThan = static_cast<uint32_t>(lt),
47   kGreaterThan = static_cast<uint32_t>(gt),
48   kLessThanEqual = static_cast<uint32_t>(le),
49   kGreaterThanEqual = static_cast<uint32_t>(ge),
50 
51   kUnsignedLessThan = static_cast<uint32_t>(lo),
52   kUnsignedGreaterThan = static_cast<uint32_t>(hi),
53   kUnsignedLessThanEqual = static_cast<uint32_t>(ls),
54   kUnsignedGreaterThanEqual = static_cast<uint32_t>(hs),
55 
56   kOverflow = static_cast<uint32_t>(vs),
57   kNoOverflow = static_cast<uint32_t>(vc),
58 
59   kZero = static_cast<uint32_t>(eq),
60   kNotZero = static_cast<uint32_t>(ne),
61 };
62 
AsMasmCondition(Condition cond)63 inline internal::Condition AsMasmCondition(Condition cond) {
64   // This is important for arm, where the internal::Condition where each value
65   // represents an encoded bit field value.
66   STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
67   return static_cast<internal::Condition>(cond);
68 }
69 
70 namespace detail {
71 
72 #ifdef DEBUG
Clobbers(Register target, MemOperand op)73 inline bool Clobbers(Register target, MemOperand op) {
74   return op.rn() == target || op.rm() == target;
75 }
76 #endif
77 
78 }  // namespace detail
79 
80 #define __ masm_->
81 
RegisterFrameOperand( interpreter::Register interpreter_register)82 MemOperand BaselineAssembler::RegisterFrameOperand(
83     interpreter::Register interpreter_register) {
84   return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
85 }
RegisterFrameAddress( interpreter::Register interpreter_register, Register rscratch)86 void BaselineAssembler::RegisterFrameAddress(
87     interpreter::Register interpreter_register, Register rscratch) {
88   return __ add(rscratch, fp,
89                 Operand(interpreter_register.ToOperand() * kSystemPointerSize));
90 }
FeedbackVectorOperand()91 MemOperand BaselineAssembler::FeedbackVectorOperand() {
92   return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
93 }
94 
Bind(Label* label)95 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
BindWithoutJumpTarget(Label* label)96 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
97 
JumpTarget()98 void BaselineAssembler::JumpTarget() {
99   // NOP on arm.
100 }
101 
Jump(Label* target, Label::Distance distance)102 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
103   __ b(target);
104 }
105 
JumpIfRoot(Register value, RootIndex index, Label* target, Label::Distance)106 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
107                                    Label* target, Label::Distance) {
108   __ JumpIfRoot(value, index, target);
109 }
110 
JumpIfNotRoot(Register value, RootIndex index, Label* target, Label::Distance)111 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
112                                       Label* target, Label::Distance) {
113   __ JumpIfNotRoot(value, index, target);
114 }
115 
JumpIfSmi(Register value, Label* target, Label::Distance)116 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
117                                   Label::Distance) {
118   __ JumpIfSmi(value, target);
119 }
120 
JumpIfImmediate(Condition cc, Register left, int right, Label* target, Label::Distance distance)121 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
122                                         Label* target,
123                                         Label::Distance distance) {
124   JumpIf(cc, left, Operand(right), target, distance);
125 }
126 
JumpIfNotSmi(Register value, Label* target, Label::Distance)127 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
128                                      Label::Distance) {
129   __ JumpIfNotSmi(value, target);
130 }
131 
CallBuiltin(Builtin builtin)132 void BaselineAssembler::CallBuiltin(Builtin builtin) {
133   //  __ CallBuiltin(static_cast<int>(builtin));
134   ASM_CODE_COMMENT_STRING(masm_,
135                           __ CommentForOffHeapTrampoline("call", builtin));
136   ScratchRegisterScope temps(this);
137   Register temp = temps.AcquireScratch();
138   __ LoadEntryFromBuiltin(builtin, temp);
139   __ Call(temp);
140 }
141 
TailCallBuiltin(Builtin builtin)142 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
143   ASM_CODE_COMMENT_STRING(masm_,
144                           __ CommentForOffHeapTrampoline("tail call", builtin));
145   ScratchRegisterScope temps(this);
146   Register temp = temps.AcquireScratch();
147   __ LoadEntryFromBuiltin(builtin, temp);
148   __ Jump(temp);
149 }
150 
TestAndBranch(Register value, int mask, Condition cc, Label* target, Label::Distance)151 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
152                                       Label* target, Label::Distance) {
153   __ tst(value, Operand(mask));
154   __ b(AsMasmCondition(cc), target);
155 }
156 
JumpIf(Condition cc, Register lhs, const Operand& rhs, Label* target, Label::Distance)157 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
158                                Label* target, Label::Distance) {
159   __ cmp(lhs, Operand(rhs));
160   __ b(AsMasmCondition(cc), target);
161 }
JumpIfObjectType(Condition cc, Register object, InstanceType instance_type, Register map, Label* target, Label::Distance)162 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
163                                          InstanceType instance_type,
164                                          Register map, Label* target,
165                                          Label::Distance) {
166   ScratchRegisterScope temps(this);
167   Register type = temps.AcquireScratch();
168   __ LoadMap(map, object);
169   __ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
170   JumpIf(cc, type, Operand(instance_type), target);
171 }
JumpIfInstanceType(Condition cc, Register map, InstanceType instance_type, Label* target, Label::Distance)172 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
173                                            InstanceType instance_type,
174                                            Label* target, Label::Distance) {
175   ScratchRegisterScope temps(this);
176   Register type = temps.AcquireScratch();
177   if (FLAG_debug_code) {
178     __ AssertNotSmi(map);
179     __ CompareObjectType(map, type, type, MAP_TYPE);
180     __ Assert(eq, AbortReason::kUnexpectedValue);
181   }
182   __ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
183   JumpIf(cc, type, Operand(instance_type), target);
184 }
JumpIfPointer(Condition cc, Register value, MemOperand operand, Label* target, Label::Distance)185 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
186                                       MemOperand operand, Label* target,
187                                       Label::Distance) {
188   ScratchRegisterScope temps(this);
189   Register tmp = temps.AcquireScratch();
190   __ ldr(tmp, operand);
191   JumpIf(cc, value, Operand(tmp), target);
192 }
JumpIfSmi(Condition cc, Register value, Smi smi, Label* target, Label::Distance)193 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
194                                   Label* target, Label::Distance) {
195   __ AssertSmi(value);
196   JumpIf(cc, value, Operand(smi), target);
197 }
JumpIfSmi(Condition cc, Register lhs, Register rhs, Label* target, Label::Distance)198 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
199                                   Label* target, Label::Distance) {
200   __ AssertSmi(lhs);
201   __ AssertSmi(rhs);
202   JumpIf(cc, lhs, Operand(rhs), target);
203 }
JumpIfTagged(Condition cc, Register value, MemOperand operand, Label* target, Label::Distance)204 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
205                                      MemOperand operand, Label* target,
206                                      Label::Distance) {
207   ScratchRegisterScope temps(this);
208   Register tmp = temps.AcquireScratch();
209   __ ldr(tmp, operand);
210   JumpIf(cc, value, Operand(tmp), target);
211 }
JumpIfTagged(Condition cc, MemOperand operand, Register value, Label* target, Label::Distance)212 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
213                                      Register value, Label* target,
214                                      Label::Distance) {
215   ScratchRegisterScope temps(this);
216   Register tmp = temps.AcquireScratch();
217   __ ldr(tmp, operand);
218   JumpIf(cc, tmp, Operand(value), target);
219 }
JumpIfByte(Condition cc, Register value, int32_t byte, Label* target, Label::Distance)220 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
221                                    Label* target, Label::Distance) {
222   JumpIf(cc, value, Operand(byte), target);
223 }
224 
Move(interpreter::Register output, Register source)225 void BaselineAssembler::Move(interpreter::Register output, Register source) {
226   Move(RegisterFrameOperand(output), source);
227 }
Move(Register output, TaggedIndex value)228 void BaselineAssembler::Move(Register output, TaggedIndex value) {
229   __ mov(output, Operand(value.ptr()));
230 }
Move(MemOperand output, Register source)231 void BaselineAssembler::Move(MemOperand output, Register source) {
232   __ str(source, output);
233 }
Move(Register output, ExternalReference reference)234 void BaselineAssembler::Move(Register output, ExternalReference reference) {
235   __ Move32BitImmediate(output, Operand(reference));
236 }
Move(Register output, Handle<HeapObject> value)237 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
238   __ Move32BitImmediate(output, Operand(value));
239 }
Move(Register output, int32_t value)240 void BaselineAssembler::Move(Register output, int32_t value) {
241   __ mov(output, Operand(value));
242 }
MoveMaybeSmi(Register output, Register source)243 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
244   __ mov(output, source);
245 }
MoveSmi(Register output, Register source)246 void BaselineAssembler::MoveSmi(Register output, Register source) {
247   __ mov(output, source);
248 }
249 
250 namespace detail {
251 
252 template <typename Arg>
ToRegister(BaselineAssembler* basm, BaselineAssembler::ScratchRegisterScope* scope, Arg arg)253 inline Register ToRegister(BaselineAssembler* basm,
254                            BaselineAssembler::ScratchRegisterScope* scope,
255                            Arg arg) {
256   Register reg = scope->AcquireScratch();
257   basm->Move(reg, arg);
258   return reg;
259 }
ToRegister(BaselineAssembler* basm, BaselineAssembler::ScratchRegisterScope* scope, Register reg)260 inline Register ToRegister(BaselineAssembler* basm,
261                            BaselineAssembler::ScratchRegisterScope* scope,
262                            Register reg) {
263   return reg;
264 }
265 
266 template <typename... Args>
267 struct PushAllHelper;
268 template <>
269 struct PushAllHelper<> {
Pushv8::internal::baseline::detail::PushAllHelper270   static int Push(BaselineAssembler* basm) { return 0; }
PushReversev8::internal::baseline::detail::PushAllHelper271   static int PushReverse(BaselineAssembler* basm) { return 0; }
272 };
273 // TODO(ishell): try to pack sequence of pushes into one instruction by
274 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
275 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
276 template <typename Arg>
277 struct PushAllHelper<Arg> {
Pushv8::internal::baseline::detail::PushAllHelper278   static int Push(BaselineAssembler* basm, Arg arg) {
279     BaselineAssembler::ScratchRegisterScope scope(basm);
280     basm->masm()->Push(ToRegister(basm, &scope, arg));
281     return 1;
282   }
PushReversev8::internal::baseline::detail::PushAllHelper283   static int PushReverse(BaselineAssembler* basm, Arg arg) {
284     return Push(basm, arg);
285   }
286 };
287 // TODO(ishell): try to pack sequence of pushes into one instruction by
288 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
289 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
290 template <typename Arg, typename... Args>
291 struct PushAllHelper<Arg, Args...> {
Pushv8::internal::baseline::detail::PushAllHelper292   static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
293     PushAllHelper<Arg>::Push(basm, arg);
294     return 1 + PushAllHelper<Args...>::Push(basm, args...);
295   }
PushReversev8::internal::baseline::detail::PushAllHelper296   static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
297     int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
298     PushAllHelper<Arg>::Push(basm, arg);
299     return nargs + 1;
300   }
301 };
302 template <>
303 struct PushAllHelper<interpreter::RegisterList> {
Pushv8::internal::baseline::detail::PushAllHelper304   static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
305     for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
306       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
307     }
308     return list.register_count();
309   }
PushReversev8::internal::baseline::detail::PushAllHelper310   static int PushReverse(BaselineAssembler* basm,
311                          interpreter::RegisterList list) {
312     for (int reg_index = list.register_count() - 1; reg_index >= 0;
313          --reg_index) {
314       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
315     }
316     return list.register_count();
317   }
318 };
319 
320 template <typename... T>
321 struct PopAllHelper;
322 template <>
323 struct PopAllHelper<> {
Popv8::internal::baseline::detail::PopAllHelper324   static void Pop(BaselineAssembler* basm) {}
325 };
326 // TODO(ishell): try to pack sequence of pops into one instruction by
327 // looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
328 // could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
329 template <>
330 struct PopAllHelper<Register> {
Popv8::internal::baseline::detail::PopAllHelper331   static void Pop(BaselineAssembler* basm, Register reg) {
332     basm->masm()->Pop(reg);
333   }
334 };
335 template <typename... T>
336 struct PopAllHelper<Register, T...> {
Popv8::internal::baseline::detail::PopAllHelper337   static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
338     PopAllHelper<Register>::Pop(basm, reg);
339     PopAllHelper<T...>::Pop(basm, tail...);
340   }
341 };
342 
343 }  // namespace detail
344 
345 template <typename... T>
Push(T.... vals)346 int BaselineAssembler::Push(T... vals) {
347   return detail::PushAllHelper<T...>::Push(this, vals...);
348 }
349 
350 template <typename... T>
PushReverse(T.... vals)351 void BaselineAssembler::PushReverse(T... vals) {
352   detail::PushAllHelper<T...>::PushReverse(this, vals...);
353 }
354 
355 template <typename... T>
Pop(T.... registers)356 void BaselineAssembler::Pop(T... registers) {
357   detail::PopAllHelper<T...>::Pop(this, registers...);
358 }
359 
LoadTaggedPointerField(Register output, Register source, int offset)360 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
361                                                int offset) {
362   __ ldr(output, FieldMemOperand(source, offset));
363 }
364 
LoadTaggedSignedField(Register output, Register source, int offset)365 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
366                                               int offset) {
367   __ ldr(output, FieldMemOperand(source, offset));
368 }
369 
LoadTaggedAnyField(Register output, Register source, int offset)370 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
371                                            int offset) {
372   __ ldr(output, FieldMemOperand(source, offset));
373 }
374 
LoadWord16FieldZeroExtend(Register output, Register source, int offset)375 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
376                                                   Register source, int offset) {
377   __ ldrh(output, FieldMemOperand(source, offset));
378 }
379 
LoadWord8Field(Register output, Register source, int offset)380 void BaselineAssembler::LoadWord8Field(Register output, Register source,
381                                        int offset) {
382   __ ldrb(output, FieldMemOperand(source, offset));
383 }
384 
StoreTaggedSignedField(Register target, int offset, Smi value)385 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
386                                                Smi value) {
387   ASM_CODE_COMMENT(masm_);
388   ScratchRegisterScope temps(this);
389   Register tmp = temps.AcquireScratch();
390   __ mov(tmp, Operand(value));
391   __ str(tmp, FieldMemOperand(target, offset));
392 }
393 
StoreTaggedFieldWithWriteBarrier(Register target, int offset, Register value)394 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
395                                                          int offset,
396                                                          Register value) {
397   ASM_CODE_COMMENT(masm_);
398   DCHECK(!AreAliased(target, value));
399   __ str(value, FieldMemOperand(target, offset));
400   __ RecordWriteField(target, offset, value, kLRHasNotBeenSaved,
401                       SaveFPRegsMode::kIgnore);
402 }
403 
StoreTaggedFieldNoWriteBarrier(Register target, int offset, Register value)404 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
405                                                        int offset,
406                                                        Register value) {
407   __ str(value, FieldMemOperand(target, offset));
408 }
409 
AddToInterruptBudgetAndJumpIfNotExceeded( int32_t weight, Label* skip_interrupt_label)410 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
411     int32_t weight, Label* skip_interrupt_label) {
412   ASM_CODE_COMMENT(masm_);
413   ScratchRegisterScope scratch_scope(this);
414   Register feedback_cell = scratch_scope.AcquireScratch();
415   LoadFunction(feedback_cell);
416   LoadTaggedPointerField(feedback_cell, feedback_cell,
417                          JSFunction::kFeedbackCellOffset);
418 
419   Register interrupt_budget = scratch_scope.AcquireScratch();
420   __ ldr(interrupt_budget,
421          FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
422   // Remember to set flags as part of the add!
423   __ add(interrupt_budget, interrupt_budget, Operand(weight), SetCC);
424   __ str(interrupt_budget,
425          FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
426   if (skip_interrupt_label) {
427     // Use compare flags set by add
428     DCHECK_LT(weight, 0);
429     __ b(ge, skip_interrupt_label);
430   }
431 }
432 
AddToInterruptBudgetAndJumpIfNotExceeded( Register weight, Label* skip_interrupt_label)433 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
434     Register weight, Label* skip_interrupt_label) {
435   ASM_CODE_COMMENT(masm_);
436   ScratchRegisterScope scratch_scope(this);
437   Register feedback_cell = scratch_scope.AcquireScratch();
438   LoadFunction(feedback_cell);
439   LoadTaggedPointerField(feedback_cell, feedback_cell,
440                          JSFunction::kFeedbackCellOffset);
441 
442   Register interrupt_budget = scratch_scope.AcquireScratch();
443   __ ldr(interrupt_budget,
444          FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
445   // Remember to set flags as part of the add!
446   __ add(interrupt_budget, interrupt_budget, weight, SetCC);
447   __ str(interrupt_budget,
448          FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
449   if (skip_interrupt_label) __ b(ge, skip_interrupt_label);
450 }
451 
AddSmi(Register lhs, Smi rhs)452 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
453   __ add(lhs, lhs, Operand(rhs));
454 }
455 
Word32And(Register output, Register lhs, int rhs)456 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
457   __ and_(output, lhs, Operand(rhs));
458 }
459 
Switch(Register reg, int case_value_base, Label** labels, int num_labels)460 void BaselineAssembler::Switch(Register reg, int case_value_base,
461                                Label** labels, int num_labels) {
462   ASM_CODE_COMMENT(masm_);
463   Label fallthrough;
464   if (case_value_base != 0) {
465     __ sub(reg, reg, Operand(case_value_base));
466   }
467 
468   // Mostly copied from code-generator-arm.cc
469   ScratchRegisterScope scope(this);
470   JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
471          &fallthrough);
472   // Ensure to emit the constant pool first if necessary.
473   __ CheckConstPool(true, true);
474   __ BlockConstPoolFor(num_labels);
475   int entry_size_log2 = 2;
476   __ add(pc, pc, Operand(reg, LSL, entry_size_log2), LeaveCC, lo);
477   __ b(&fallthrough);
478   for (int i = 0; i < num_labels; ++i) {
479     __ b(labels[i]);
480   }
481   __ bind(&fallthrough);
482 }
483 
484 #undef __
485 
486 #define __ basm.
487 
EmitReturn(MacroAssembler* masm)488 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
489   ASM_CODE_COMMENT(masm);
490   BaselineAssembler basm(masm);
491 
492   Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
493   Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
494 
495   {
496     ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
497 
498     Label skip_interrupt_label;
499     __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
500     {
501       __ masm()->SmiTag(params_size);
502       __ Push(params_size, kInterpreterAccumulatorRegister);
503 
504       __ LoadContext(kContextRegister);
505       __ LoadFunction(kJSFunctionRegister);
506       __ Push(kJSFunctionRegister);
507       __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
508 
509       __ Pop(kInterpreterAccumulatorRegister, params_size);
510       __ masm()->SmiUntag(params_size);
511     }
512 
513     __ Bind(&skip_interrupt_label);
514   }
515 
516   BaselineAssembler::ScratchRegisterScope temps(&basm);
517   Register actual_params_size = temps.AcquireScratch();
518   // Compute the size of the actual parameters + receiver (in bytes).
519   __ Move(actual_params_size,
520           MemOperand(fp, StandardFrameConstants::kArgCOffset));
521 
522   // If actual is bigger than formal, then we should use it to free up the stack
523   // arguments.
524   Label corrected_args_count;
525   __ JumpIf(Condition::kGreaterThanEqual, params_size,
526             Operand(actual_params_size), &corrected_args_count);
527   __ masm()->mov(params_size, actual_params_size);
528   __ Bind(&corrected_args_count);
529 
530   // Leave the frame (also dropping the register file).
531   __ masm()->LeaveFrame(StackFrame::BASELINE);
532 
533   // Drop receiver + arguments.
534   __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
535                            TurboAssembler::kCountIncludesReceiver);
536   __ masm()->Ret();
537 }
538 
539 #undef __
540 
AssertEqualToAccumulator( Register reg)541 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
542     Register reg) {
543   assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
544   assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
545 }
546 
547 }  // namespace baseline
548 }  // namespace internal
549 }  // namespace v8
550 
551 #endif  // V8_BASELINE_ARM_BASELINE_ASSEMBLER_ARM_INL_H_
552