Lines Matching refs:gp
676 reg = LiftoffRegister::ForPair(reg.gp(), reg2.gp());
689 Register limit_address = __ GetUnusedRegister(kGpReg, {}).gp();
759 __ emit_i32_subi_jump_negative(budget_reg.gp(), budget_used,
839 tmp.gp(), kWasmInstanceRegister,
841 __ LoadTaggedPointer(tmp.gp(), tmp.gp(), no_reg,
851 LOAD_INSTANCE_FIELD(tmp.gp(), TieringBudgetArray, kSystemPointerSize,
855 __ Load(tmp, tmp.gp(), no_reg, offset, LoadType::kI32Load, pinned);
885 Register null_ref_reg = __ GetUnusedRegister(kGpReg, {}).gp();
1083 Register flag = __ GetUnusedRegister(kGpReg, {}).gp();
1121 __ Load(max_steps, max_steps_addr.gp(), no_reg, 0, LoadType::kI32Load,
1124 __ emit_i32_cond_jumpi(kUnequal, &cont, max_steps.gp(), 0);
1128 __ emit_i32_subi(max_steps.gp(), max_steps.gp(), 1);
1129 __ Store(max_steps_addr.gp(), no_reg, 0, max_steps, StoreType::kI32Store,
1206 LoadExceptionSymbol(tag_symbol_reg.gp(), pinned, root_index);
1209 LOAD_TAGGED_PTR_INSTANCE_FIELD(context_reg.gp(), NativeContext, pinned);
1252 Register imm_tag = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1260 __ emit_cond_jump(kEqual, &caught, kI32, imm_tag, caught_tag.gp());
1342 Register value = __ PopToRegister().gp();
1355 Register lhs = __ PopToRegister().gp();
1360 Register rhs = __ PopToRegister().gp();
1372 Register lhs = __ PopToRegister(LiftoffRegList{rhs}).gp();
1526 operator Register() { return reg.gp(); }
1615 __ emit_cond_jump(kEqual, trap, kI32, ret_reg.gp());
1732 if (__ emit_i32_popcnt(dst.gp(), src.gp())) return;
1757 LoadNullValue(null.gp(), pinned);
1762 dst.gp(), ref, null);
1846 __ emit_cond_jump(kEqual, trap_by_zero, kI32, ret.gp(), tmp.gp());
1849 __ emit_cond_jump(kEqual, trap_unrepresentable, kI32, ret.gp(), tmp.gp());
1874 amount.is_gp_pair() ? amount.low_gp() : amount.gp()); \
2062 __ emit_i32_divs(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero,
2071 __ emit_i32_divu(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero);
2079 __ emit_i32_rems(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
2087 __ emit_i32_remu(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
2184 LoadNullValue(null.gp(), {});
2200 MaybeEmitNullCheck(decoder, obj.gp(), pinned, arg.type);
2213 __ AllocateStackSlot(info.gp(), sizeof(int64_t));
2223 __ StoreTaggedPointer(info.gp(), no_reg, 0, return_reg, pinned);
2225 __ Store(info.gp(), no_reg, 0, return_reg,
2234 if (info.gp() != param_reg) {
2235 __ Move(param_reg, info.gp(), kPointerKind);
2253 LOAD_INSTANCE_FIELD(array.gp(), TieringBudgetArray, kSystemPointerSize,
2258 __ Store(array.gp(), no_reg, offset, budget, StoreType::kI32Store, pinned);
2357 Register addr = pinned->set(__ GetUnusedRegister(kGpReg, {})).gp();
2375 pinned->set(__ GetUnusedRegister(kGpReg, *pinned)).gp();
2388 pinned->set(__ GetUnusedRegister(kGpReg, *pinned)).gp();
2424 pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2427 Register value = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2467 pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2559 LoadNullValue(null.gp(), pinned);
2560 __ emit_cond_jump(kUnequal, trap_label, kOptRef, obj.gp(), null.gp());
2571 Register condition = pinned.set(__ PopToRegister()).gp();
2697 __ emit_cond_jump(kUnsignedGreaterEqual, &upper_half, kI32, value.gp(),
2698 tmp.gp());
2721 __ emit_cond_jump(kUnsignedGreaterEqual, &case_default, kI32, value.gp(),
2722 tmp.gp());
2808 kNeedI64RegPair && index.is_gp_pair() ? index.low_gp() : index.gp();
2854 LOAD_INSTANCE_FIELD(mem_size.gp(), MemorySize, kSystemPointerSize, pinned);
2863 end_offset_reg.gp(), mem_size.gp());
2869 __ emit_ptrsize_sub(effective_size_reg.gp(), mem_size.gp(),
2870 end_offset_reg.gp());
2873 index_ptrsize, effective_size_reg.gp());
2883 Register address = __ GetUnusedRegister(kGpReg, pinned).gp();
2918 __ emit_i32_add(effective_offset.gp(), effective_offset.gp(), index);
2924 __ AllocateStackSlot(info.gp(), sizeof(MemoryTracingInfo));
2937 info.gp(), no_reg, offsetof(MemoryTracingInfo, offset), data,
2941 __ Store(info.gp(), no_reg, offsetof(MemoryTracingInfo, is_store), data,
2944 __ Store(info.gp(), no_reg, offsetof(MemoryTracingInfo, mem_rep), data,
2951 if (info.gp() != param_reg) {
2952 __ Move(param_reg, info.gp(), kPointerKind);
2984 memory_start = __ GetUnusedRegister(kGpReg, pinned).gp();
3203 Register mem_size = __ GetUnusedRegister(kGpReg, {}).gp();
3212 result = LiftoffRegister::ForPair(mem_size, high_word.gp());
3237 __ emit_cond_jump(kUnequal /* neq */, &done, kI32, high_word.gp());
3247 if (input.gp() != param_reg) __ Move(param_reg, input.gp(), kI32);
3253 if (kReturnRegister0 != result.gp()) {
3254 __ Move(result.gp(), kReturnRegister0, kI32);
3404 Register null = __ GetUnusedRegister(kGpReg, pinned).gp();
3406 __ emit_cond_jump(kUnequal, &cont_false, ref_object.type.kind(), ref.gp(),
3429 Register null = __ GetUnusedRegister(kGpReg, pinned).gp();
3431 __ emit_cond_jump(kEqual, &cont_false, ref_object.type.kind(), ref.gp(),
4167 __ emit_i32_andi(tmp_reg.gp(), value, 0xffff);
4168 ToSmi(tmp_reg.gp());
4176 __ emit_i32_shri(tmp_reg.gp(), value, 16);
4177 ToSmi(tmp_reg.gp());
4192 Store32BitExceptionValue(values_array, index_in_array, value.gp(),
4195 Store32BitExceptionValue(values_array, index_in_array, value.gp(),
4204 dst, values_array.gp(),
4213 __ emit_i32_shli(upper.gp(), upper.gp(), 16);
4215 __ emit_i32_or(dst, upper.gp(), dst);
4245 Store32BitExceptionValue(values_array, index_in_array, value.gp(),
4252 Store32BitExceptionValue(values_array, index_in_array, gp_reg.gp(),
4272 Store32BitExceptionValue(values_array, index_in_array, tmp_reg.gp(),
4302 Load32BitExceptionValue(value.gp(), values_array, index, pinned);
4307 Load32BitExceptionValue(tmp_reg.gp(), values_array, index, pinned);
4327 Load32BitExceptionValue(tmp_reg.gp(), values_array, index, pinned);
4330 Load32BitExceptionValue(tmp_reg.gp(), values_array, index, pinned);
4339 value.gp(), values_array.gp(), no_reg,
4420 // The FixedArray for the exception values is now in the first gp return
4433 StoreExceptionValue(type, values_array.gp(), &index, pinned);
4441 LOAD_TAGGED_PTR_INSTANCE_FIELD(exception_tag.gp(), TagsTable, pinned);
4443 exception_tag.gp(), exception_tag.gp(), no_reg,
4562 Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
4645 ? pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp()
4691 ? pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp()
4856 __ emit_u32_to_uintptr(intptr_reg.gp(), reg.gp());
4875 ? __ GetUnusedRegister(kGpReg, *pinned).gp()
4876 : __ GetUnusedRegister(kGpReg, {reg.high()}, *pinned).gp();
4900 instance = __ GetUnusedRegister(kGpReg, pinned).gp();
4928 __ emit_cond_jump(kEqual, trap_label, kI32, result.gp());
4935 pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
4948 __ Store(seg_size_array, seg_index.gp(), 0, null_reg, StoreType::kI32Store,
4966 instance = __ GetUnusedRegister(kGpReg, pinned).gp();
4987 __ emit_cond_jump(kEqual, trap_label, kI32, result.gp());
5003 instance = __ GetUnusedRegister(kGpReg, pinned).gp();
5024 __ emit_cond_jump(kEqual, trap_label, kI32, result.gp());
5066 pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
5078 __ Store(dropped_elem_segments, seg_index.gp(), 0, one_reg,
5144 Register tables = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
5213 StoreObjectField(obj.gp(), no_reg, offset, value, pinned, field_kind);
5245 MaybeEmitNullCheck(decoder, obj.gp(), pinned, struct_obj.type);
5248 LoadObjectField(value, obj.gp(), no_reg, offset, field_kind, is_signed,
5262 MaybeEmitNullCheck(decoder, obj.gp(), pinned, struct_obj.type);
5263 StoreObjectField(obj.gp(), no_reg, offset, value, pinned, field_kind);
5274 __ emit_i32_cond_jumpi(kUnsignedGreaterThan, trap_label, length.gp(),
5314 __ emit_i32_shli(end_offset.gp(), length.gp(),
5317 __ emit_i32_add(end_offset.gp(), end_offset.gp(), offset.gp());
5320 __ emit_cond_jump(kUnsignedGreaterEqual, &done, kI32, offset.gp(),
5321 end_offset.gp());
5322 StoreObjectField(obj.gp(), offset.gp(), 0, value, pinned, elem_kind);
5323 __ emit_i32_addi(offset.gp(), offset.gp(), elem_size);
5354 MaybeEmitNullCheck(decoder, array.gp(), pinned, array_obj.type);
5360 __ emit_i32_shli(index.gp(), index.gp(), elem_size_shift);
5364 LoadObjectField(value, array.gp(), index.gp(),
5379 MaybeEmitNullCheck(decoder, array.gp(), pinned, array_obj.type);
5384 __ emit_i32_shli(index.gp(), index.gp(), elem_size_shift);
5386 StoreObjectField(array.gp(), index.gp(),
5394 MaybeEmitNullCheck(decoder, obj.gp(), pinned, array_obj.type);
5397 LoadObjectField(len, obj.gp(), no_reg, kLengthOffset, kI32, false, pinned);
5462 StoreObjectField(array.gp(), offset_reg.gp(),
5499 __ emit_cond_jump(kEqual, trap_label_array_too_large, kRef, result.gp(),
5500 error_smi.gp());
5505 result.gp(), error_smi.gp());
5518 __ emit_i32_shli(dst.gp(), src.gp(), kSmiTagSize);
5530 __ emit_i32_sari(dst.gp(), src.gp(), kSmiTagSize);
5542 __ emit_i32_shri(dst.gp(), src.gp(), kSmiTagSize);
5552 LOAD_TAGGED_PTR_INSTANCE_FIELD(rtt.gp(), ManagedObjectMaps, {});
5554 rtt.gp(), rtt.gp(), no_reg,
5583 LoadNullValue(tmp1.gp(), pinned);
5585 obj.type.kind(), obj_reg.gp(), tmp1.gp());
5588 __ LoadMap(tmp1.gp(), obj_reg.gp());
5593 __ emit_cond_jump(kEqual, &match, rtt.type.kind(), tmp1.gp(), rtt_reg.gp());
5600 __ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kTypeInfoOffset, pinned);
5604 __ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kSuperTypesOffset,
5611 __ LoadFixedArrayLengthAsInt32(list_length, tmp1.gp(), pinned);
5612 __ emit_i32_cond_jumpi(kUnsignedLessEqual, no_match, list_length.gp(),
5617 tmp1.gp(), tmp1.gp(), no_reg,
5619 __ emit_cond_jump(kUnequal, no_match, rtt.type.kind(), tmp1.gp(),
5620 rtt_reg.gp());
5634 result.gp());
5709 EmitDataRefCheck(registers.map_reg.gp(), no_match, registers.tmp_reg,
5718 __ Load(registers.map_reg, registers.map_reg.gp(), no_reg,
5721 __ emit_i32_cond_jumpi(kUnequal, no_match, registers.map_reg.gp(),
5730 __ Load(registers.map_reg, registers.map_reg.gp(), no_reg,
5733 __ emit_i32_cond_jumpi(kUnequal, no_match, registers.map_reg.gp(),
5742 __ emit_smi_check(obj_reg.gp(), no_match, LiftoffAssembler::kJumpOnNotSmi);
5757 (this->*type_checker)(object, &no_match, pinned, result.gp());
5943 Register tmp = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
5944 Register target = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
6013 Register index = __ PopToModifiableRegister().gp();
6017 Register table = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
6018 Register tmp_const = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
6019 Register scratch = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
6023 pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
6191 target_reg = LiftoffRegister(kReturnRegister0).gp();
6192 instance_reg = LiftoffRegister(kReturnRegister1).gp();
6208 MaybeEmitNullCheck(decoder, func_ref.gp(), pinned, func_ref_type);
6216 instance.gp(), func_ref.gp(), no_reg,
6221 LOAD_INSTANCE_FIELD(temp.gp(), IsolateRoot, kSystemPointerSize, pinned);
6222 __ LoadExternalPointer(target.gp(), func_ref.gp(),
6224 kForeignForeignAddressTag, temp.gp());
6226 __ Load(target, func_ref.gp(), no_reg,
6236 __ emit_cond_jump(kUnequal, &perform_call, kRef, target.gp(),
6237 null_address.gp());
6240 target.gp(), func_ref.gp(), no_reg,
6244 __ LoadCodeDataContainerEntry(target.gp(), target.gp());
6246 __ emit_ptrsize_addi(target.gp(), target.gp(),
6254 target_reg = target.gp();
6255 instance_reg = instance.gp();
6293 LoadNullValue(null.gp(), pinned);
6295 null.gp());
6306 __ Load(length, array.gp(), no_reg, kLengthOffset, LoadType::kI32Load,
6309 index.gp(), length.gp());
6321 __ LoadTaggedPointer(dst.gp(), src, offset_reg, offset, pinned);
6359 return LoadNullValue(reg.gp(), pinned);
6386 LoadNullValue(map_reg.gp(), pinned);
6387 __ emit_cond_jump(kEqual, no_match, kOptRef, obj_reg.gp(), map_reg.gp());
6390 __ emit_smi_check(obj_reg.gp(), no_match, LiftoffAssembler::kJumpOnSmi);
6392 __ LoadMap(map_reg.gp(), obj_reg.gp());
6405 __ emit_i32_subi(tmp.gp(), tmp.gp(), FIRST_WASM_OBJECT_TYPE);
6406 __ emit_i32_cond_jumpi(kUnsignedGreaterThan, not_data_ref, tmp.gp(),
6432 __ emit_set_if_nan(nondeterminism_addr.gp(), src.fp(), kind);
6445 __ emit_s128_set_if_nan(nondeterminism_addr.gp(), dst, tmp_gp.gp(),