Lines Matching defs:const
84 StackTransferRecipe(const StackTransferRecipe&) = delete;
85 StackTransferRecipe& operator=(const StackTransferRecipe&) = delete;
97 V8_INLINE void TransferStackSlot(const VarState& dst, const VarState& src) {
124 const LiftoffAssembler::VarState& src,
141 const LiftoffAssembler::VarState& src,
268 LiftoffAssembler* const asm_;
403 const VarState* source, VarState* target, uint32_t count,
409 for (const VarState* source_end = source + count; source < source_end;
447 void LiftoffAssembler::CacheState::InitMerge(const CacheState& source,
470 const VarState* source_begin = source.stack_state.data();
527 void LiftoffAssembler::CacheState::Steal(const CacheState& source) {
532 void LiftoffAssembler::CacheState::Split(const CacheState& source) {
538 int GetSafepointIndexForStackSlot(const VarState& slot) {
554 for (const auto& slot : stack_state) {
572 for (const auto& slot : stack_state) {
582 for (const auto& slot : stack_state) {
596 int LiftoffAssembler::GetTotalFrameSlotCountForGC() const {
740 bool SlotInterference(const VarState& a, const VarState& b) {
746 bool SlotInterference(const VarState& a, base::Vector<const VarState> v) {
747 return std::any_of(v.begin(), v.end(), [&a](const VarState& b) {
755 const CacheState& source) {
935 void PrepareStackTransfers(const ValueKindSig* sig,
937 const VarState* slots,
948 const uint32_t param = i - 1;
950 const bool is_gp_pair = kNeedI64RegPair && kind == kI64;
951 const int num_lowered_params = is_gp_pair ? 2 : 1;
952 const VarState& slot = slots[param];
953 const uint32_t stack_offset = slot.offset();
957 const RegPairHalf half =
987 const ValueKindSig* sig, compiler::CallDescriptor* call_descriptor,
1006 void LiftoffAssembler::PrepareCall(const ValueKindSig* sig,
1085 void LiftoffAssembler::FinishCall(const ValueKindSig* sig,
1090 const bool needs_gp_pair = needs_gp_reg_pair(return_kind);
1091 const int num_lowered_params = 1 + needs_gp_pair;
1092 const ValueKind lowered_kind = needs_gp_pair ? kI32 : return_kind;
1093 const RegClass rc = reg_class_for(lowered_kind);
1149 base::Vector<const ParallelRegisterMoveTuple> tuples) {
1158 const FunctionSig* sig, compiler::CallDescriptor* descriptor) {
1242 bool LiftoffAssembler::ValidateCacheState() const {
1245 for (const VarState& var : cache_state_.stack_state) {