1// Use of this source code is governed by a BSD-style license that can be
2// Copyright 2021 the V8 project authors. All rights reserved.
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_X64_BASELINE_COMPILER_X64_INL_H_
6#define V8_BASELINE_X64_BASELINE_COMPILER_X64_INL_H_
7
8#include "src/base/macros.h"
9#include "src/baseline/baseline-compiler.h"
10#include "src/codegen/interface-descriptors.h"
11
12namespace v8 {
13namespace internal {
14namespace baseline {
15
16#define __ basm_.
17
18void BaselineCompiler::Prologue() {
19  ASM_CODE_COMMENT(&masm_);
20  DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
21  int max_frame_size =
22      bytecode_->frame_size() + max_call_args_ * kSystemPointerSize;
23  CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
24      kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
25      max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);
26
27  PrologueFillFrame();
28}
29
30void BaselineCompiler::PrologueFillFrame() {
31  ASM_CODE_COMMENT(&masm_);
32  // Inlined register frame fill
33  interpreter::Register new_target_or_generator_register =
34      bytecode_->incoming_new_target_or_generator_register();
35  if (FLAG_debug_code) {
36    __ masm()->Cmp(kInterpreterAccumulatorRegister,
37                   handle(ReadOnlyRoots(local_isolate_).undefined_value(),
38                          local_isolate_));
39    __ masm()->Assert(equal, AbortReason::kUnexpectedValue);
40  }
41  int register_count = bytecode_->register_count();
42  // Magic value
43  const int kLoopUnrollSize = 8;
44  const int new_target_index = new_target_or_generator_register.index();
45  const bool has_new_target = new_target_index != kMaxInt;
46  if (has_new_target) {
47    DCHECK_LE(new_target_index, register_count);
48    for (int i = 0; i < new_target_index; i++) {
49      __ Push(kInterpreterAccumulatorRegister);
50    }
51    // Push new_target_or_generator.
52    __ Push(kJavaScriptCallNewTargetRegister);
53    register_count -= new_target_index + 1;
54  }
55  if (register_count < 2 * kLoopUnrollSize) {
56    // If the frame is small enough, just unroll the frame fill completely.
57    for (int i = 0; i < register_count; ++i) {
58      __ Push(kInterpreterAccumulatorRegister);
59    }
60  } else {
61    // Extract the first few registers to round to the unroll size.
62    int first_registers = register_count % kLoopUnrollSize;
63    for (int i = 0; i < first_registers; ++i) {
64      __ Push(kInterpreterAccumulatorRegister);
65    }
66    BaselineAssembler::ScratchRegisterScope scope(&basm_);
67    Register scratch = scope.AcquireScratch();
68    __ Move(scratch, register_count / kLoopUnrollSize);
69    // We enter the loop unconditionally, so make sure we need to loop at least
70    // once.
71    DCHECK_GT(register_count / kLoopUnrollSize, 0);
72    Label loop;
73    __ Bind(&loop);
74    for (int i = 0; i < kLoopUnrollSize; ++i) {
75      __ Push(kInterpreterAccumulatorRegister);
76    }
77    __ masm()->decl(scratch);
78    __ masm()->j(greater, &loop);
79  }
80}
81
82void BaselineCompiler::VerifyFrameSize() {
83  ASM_CODE_COMMENT(&masm_);
84  __ Move(kScratchRegister, rsp);
85  __ masm()->addq(kScratchRegister,
86                  Immediate(InterpreterFrameConstants::kFixedFrameSizeFromFp +
87                            bytecode_->frame_size()));
88  __ masm()->cmpq(kScratchRegister, rbp);
89  __ masm()->Assert(equal, AbortReason::kUnexpectedStackPointer);
90}
91
92#undef __
93
94}  // namespace baseline
95}  // namespace internal
96}  // namespace v8
97
98#endif  // V8_BASELINE_X64_BASELINE_COMPILER_X64_INL_H_
99