Lines Matching refs:addr
50 void Assembler::adr(const ZRegister& zd, const SVEMemOperand& addr) {
52 VIXL_ASSERT(addr.IsVectorPlusVector());
54 AreSameLaneSize(zd, addr.GetVectorBase(), addr.GetVectorOffset()));
59 int shift_amount = addr.GetShiftAmount();
64 SVEOffsetModifier mod = addr.GetOffsetModifier();
82 Emit(op | msz | Rd(zd) | Rn(addr.GetVectorBase()) |
83 Rm(addr.GetVectorOffset()));
3822 const SVEMemOperand& addr,
3825 VIXL_ASSERT(addr.IsContiguous());
3827 Instr mem_op = SVEMemOperandHelper(msize_in_bytes_log2, 1, addr);
3836 const SVEMemOperand& addr,
3843 Instr mem_op = SVEMemOperandHelper(msize_in_bytes_log2, num_regs, addr);
3850 const SVEMemOperand& addr,
3859 if (addr.IsScatterGather()) {
3865 addr,
3873 if (addr.IsScalarPlusImmediate()) {
3875 } else if (addr.IsScalarPlusScalar()) {
3877 VIXL_ASSERT(!addr.GetScalarOffset().IsZero());
3882 SVELdSt1Helper(msize_in_bytes_log2, zt, pg, addr, is_signed, op);
3888 const SVEMemOperand& addr,
3897 if (addr.IsScatterGather()) {
3903 addr,
3910 if (addr.IsPlainScalar()) {
3913 SVEMemOperand addr_scalar_plus_scalar(addr.GetScalarBase(), xzr);
3925 if (addr.IsScalarPlusScalar()) {
3930 SVELdSt1Helper(msize_in_bytes_log2, zt, pg, addr, is_signed, op);
3936 const SVEMemOperand& addr,
3940 VIXL_ASSERT(addr.IsScatterGather());
3946 if (addr.IsVectorPlusImmediate()) {
3947 VIXL_ASSERT(AreSameLaneSize(zt, addr.GetVectorBase()));
3962 VIXL_ASSERT(addr.IsScalarPlusVector());
3963 VIXL_ASSERT(AreSameLaneSize(zt, addr.GetVectorOffset()));
3964 SVEOffsetModifier mod = addr.GetOffsetModifier();
3967 unsigned shift_amount = addr.GetShiftAmount();
4008 unsigned shift_amount = addr.GetShiftAmount();
4034 Instr mem_op = SVEMemOperandHelper(msize_in_bytes_log2, 1, addr, is_load);
4044 const SVEMemOperand& addr) {
4045 if (addr.IsScalarPlusScalar()) {
4047 VIXL_ASSERT(!addr.GetScalarOffset().IsZero());
4051 if (addr.IsScalarPlusImmediate()) {
4053 } else if (addr.IsScalarPlusScalar()) {
4059 SVELdSt234Helper(num_regs, zt1, pg, addr, op);
4067 const SVEMemOperand& addr) { \
4069 SVELd1Helper(k##LANE_SIZE##RegSizeInBytesLog2, zt, pg, addr, false); \
4075 const SVEMemOperand& addr) { \
4081 SVELd234Helper(2, zt1, pg, addr); \
4088 const SVEMemOperand& addr) { \
4094 SVELd234Helper(3, zt1, pg, addr); \
4102 const SVEMemOperand& addr) { \
4108 SVELd234Helper(4, zt1, pg, addr); \
4119 const SVEMemOperand& addr) { \
4121 SVELd1Helper(k##LANE_SIZE##RegSizeInBytesLog2, zt, pg, addr, true); \
4130 const SVEMemOperand& addr,
4132 VIXL_ASSERT(addr.IsScalarPlusImmediate());
4140 int64_t imm = addr.GetImmediateOffset();
4147 Emit(SVELoadAndBroadcastElementFixed | dtype | RnSP(addr.GetScalarBase()) |
4158 const SVEMemOperand& addr) {
4161 SVELd1BroadcastHelper(kBRegSizeInBytesLog2, zt, pg, addr, false);
4170 const SVEMemOperand& addr) {
4173 SVELd1BroadcastHelper(kHRegSizeInBytesLog2, zt, pg, addr, false);
4181 const SVEMemOperand& addr) {
4184 SVELd1BroadcastHelper(kSRegSizeInBytesLog2, zt, pg, addr, false);
4189 const SVEMemOperand& addr) {
4192 SVELd1BroadcastHelper(kDRegSizeInBytesLog2, zt, pg, addr, false);
4201 const SVEMemOperand& addr) {
4204 SVELd1BroadcastHelper(kBRegSizeInBytesLog2, zt, pg, addr, true);
4212 const SVEMemOperand& addr) {
4215 SVELd1BroadcastHelper(kHRegSizeInBytesLog2, zt, pg, addr, true);
4220 const SVEMemOperand& addr) {
4223 SVELd1BroadcastHelper(kWRegSizeInBytesLog2, zt, pg, addr, true);
4226 void Assembler::ldr(const CPURegister& rt, const SVEMemOperand& addr) {
4231 VIXL_ASSERT(addr.IsPlainScalar() ||
4232 (addr.IsScalarPlusImmediate() &&
4233 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4234 int64_t imm9 = addr.GetImmediateOffset();
4243 Emit(op | Rt(rt) | RnSP(addr.GetScalarBase()) | imm9h | imm9l);
4496 const SVEMemOperand& addr,
4498 VIXL_ASSERT(addr.IsVectorPlusImmediate());
4499 ZRegister zn = addr.GetVectorBase();
4525 int64_t imm5 = addr.GetImmediateOffset();
4533 const SVEMemOperand& addr,
4535 VIXL_ASSERT(addr.IsScalarPlusImmediate());
4536 int64_t imm6 = addr.GetImmediateOffset();
4558 RnSP(addr.GetScalarBase()) | ImmField<21, 16>(imm6));
4564 const SVEMemOperand& addr,
4566 VIXL_ASSERT(addr.IsScalarPlusScalar());
4571 VIXL_ASSERT(addr.GetOffsetModifier() == NO_SVE_OFFSET_MODIFIER);
4575 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
4576 VIXL_ASSERT(addr.GetShiftAmount() == kHRegSizeInBytesLog2);
4580 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
4581 VIXL_ASSERT(addr.GetShiftAmount() == kSRegSizeInBytesLog2);
4585 VIXL_ASSERT(addr.GetOffsetModifier() == SVE_LSL);
4586 VIXL_ASSERT(addr.GetShiftAmount() == kDRegSizeInBytesLog2);
4594 VIXL_ASSERT(!addr.GetScalarOffset().IsZero());
4596 RnSP(addr.GetScalarBase()) | Rm(addr.GetScalarOffset()));
4602 const SVEMemOperand& addr,
4604 VIXL_ASSERT(addr.IsScalarPlusVector());
4605 ZRegister zm = addr.GetVectorOffset();
4606 SVEOffsetModifier mod = addr.GetOffsetModifier();
4612 VIXL_ASSERT(addr.GetShiftAmount() == kBRegSizeInBytesLog2);
4615 VIXL_ASSERT(addr.GetShiftAmount() == kHRegSizeInBytesLog2);
4618 VIXL_ASSERT(addr.GetShiftAmount() == kSRegSizeInBytesLog2);
4621 VIXL_ASSERT(addr.GetShiftAmount() == kDRegSizeInBytesLog2);
4686 RnSP(addr.GetScalarBase()) | Rm(zm));
4691 const SVEMemOperand& addr,
4693 if (addr.IsVectorPlusImmediate()) {
4696 SVEGatherPrefetchVectorPlusImmediateHelper(prfop, pg, addr, prefetch_size);
4698 } else if (addr.IsScalarPlusImmediate()) {
4701 SVEGatherPrefetchScalarPlusImmediateHelper(prfop, pg, addr, prefetch_size);
4703 } else if (addr.IsScalarPlusVector()) {
4706 SVEContiguousPrefetchScalarPlusVectorHelper(prfop, pg, addr, prefetch_size);
4708 } else if (addr.IsScalarPlusScalar()) {
4711 SVEContiguousPrefetchScalarPlusScalarHelper(prfop, pg, addr, prefetch_size);
4720 const SVEMemOperand& addr) {
4722 SVEPrefetchHelper(prfop, pg, addr, kBRegSize);
4727 const SVEMemOperand& addr) {
4729 SVEPrefetchHelper(prfop, pg, addr, kDRegSize);
4734 const SVEMemOperand& addr) {
4736 SVEPrefetchHelper(prfop, pg, addr, kHRegSize);
4741 const SVEMemOperand& addr) {
4743 SVEPrefetchHelper(prfop, pg, addr, kSRegSize);
4748 const SVEMemOperand& addr,
4753 VIXL_ASSERT(addr.IsScalarPlusScalar() || addr.IsScalarPlusImmediate());
4756 if (addr.IsScalarPlusScalar()) {
4757 op = regoffset_op | Rm(addr.GetScalarOffset());
4759 int64_t imm = addr.GetImmediateOffset();
4763 Emit(op | Rt(zt) | PgLow8(pg) | RnSP(addr.GetScalarBase()));
4768 const SVEMemOperand& addr,
4772 VIXL_ASSERT(addr.IsVectorPlusScalar());
4773 ZRegister zn = addr.GetVectorBase();
4784 SVEMemOperandHelper(msize_bytes_log2, 1, addr, true));
4789 const SVEMemOperand& addr,
4792 VIXL_ASSERT(addr.IsVectorPlusScalar());
4793 ZRegister zn = addr.GetVectorBase();
4801 SVEMemOperandHelper(msize_bytes_log2, 1, addr, true));
4817 const SVEMemOperand& addr) { \
4820 VIXL_ASSERT(addr.IsScalarPlusImmediate() || addr.IsEquivalentToLSL(SH)); \
4822 SVELd1St1ScaImmHelper(zt, pg, addr, SCA, IMM, BYTES); \
4831 const SVEMemOperand& addr) { \
4833 SVELdff1Helper(k##LANE_SIZE##RegSizeInBytesLog2, zt, pg, addr, false); \
4840 const SVEMemOperand& addr) { \
4842 SVELdff1Helper(k##LANE_SIZE##RegSizeInBytesLog2, zt, pg, addr, true); \
4848 const SVEMemOperand& addr) {
4850 VIXL_ASSERT(addr.IsPlainRegister() ||
4851 (addr.IsScalarPlusImmediate() &&
4852 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4857 addr,
4864 const SVEMemOperand& addr) {
4866 VIXL_ASSERT(addr.IsPlainRegister() ||
4867 (addr.IsScalarPlusImmediate() &&
4868 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4873 addr,
4880 const SVEMemOperand& addr) {
4882 VIXL_ASSERT(addr.IsPlainRegister() ||
4883 (addr.IsScalarPlusImmediate() &&
4884 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4889 addr,
4896 const SVEMemOperand& addr) {
4898 VIXL_ASSERT(addr.IsPlainRegister() ||
4899 (addr.IsScalarPlusImmediate() &&
4900 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4905 addr,
4912 const SVEMemOperand& addr) {
4914 VIXL_ASSERT(addr.IsPlainRegister() ||
4915 (addr.IsScalarPlusImmediate() &&
4916 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4921 addr,
4928 const SVEMemOperand& addr) {
4930 VIXL_ASSERT(addr.IsPlainRegister() ||
4931 (addr.IsScalarPlusImmediate() &&
4932 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4937 addr,
4944 const SVEMemOperand& addr) {
4946 VIXL_ASSERT(addr.IsPlainRegister() ||
4947 (addr.IsScalarPlusImmediate() &&
4948 (addr.GetOffsetModifier() == SVE_MUL_VL)));
4953 addr,
4960 const SVEMemOperand& addr) {
4961 VIXL_ASSERT(addr.IsPlainScalar() ||
4962 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
4963 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(0)) ||
4964 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
4965 if (addr.IsVectorPlusScalar()) {
4966 SVELd1VecScaHelper(zt, pg, addr, 0, /* is_signed = */ false);
4970 addr,
4978 const SVEMemOperand& addr) {
4979 VIXL_ASSERT(addr.IsPlainScalar() ||
4980 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
4981 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(3)) ||
4982 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
4983 if (addr.IsVectorPlusScalar()) {
4984 SVELd1VecScaHelper(zt, pg, addr, 3, /* is_signed = */ false);
4988 addr,
4996 const SVEMemOperand& addr) {
4997 VIXL_ASSERT(addr.IsPlainScalar() ||
4998 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
4999 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(1)) ||
5000 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5001 if (addr.IsVectorPlusScalar()) {
5002 SVELd1VecScaHelper(zt, pg, addr, 1, /* is_signed = */ false);
5006 addr,
5014 const SVEMemOperand& addr) {
5015 VIXL_ASSERT(addr.IsPlainScalar() ||
5016 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
5017 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(2)) ||
5018 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5019 if (addr.IsVectorPlusScalar()) {
5020 SVELd1VecScaHelper(zt, pg, addr, 2, /* is_signed = */ false);
5024 addr,
5032 const SVEMemOperand& addr) {
5033 VIXL_ASSERT(addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2));
5034 SVELd1VecScaHelper(zt, pg, addr, 0, /* is_signed = */ true);
5039 const SVEMemOperand& addr) {
5040 VIXL_ASSERT(addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2));
5041 SVELd1VecScaHelper(zt, pg, addr, 1, /* is_signed = */ true);
5046 const SVEMemOperand& addr) {
5047 VIXL_ASSERT(addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2));
5048 SVELd1VecScaHelper(zt, pg, addr, 2, /* is_signed = */ true);
5053 const SVEMemOperand& addr,
5058 if (addr.IsScalarPlusImmediate()) {
5059 VIXL_ASSERT((addr.GetImmediateOffset() == 0) || addr.IsMulVl());
5060 int64_t imm = addr.GetImmediateOffset();
5062 op = RnSP(addr.GetScalarBase()) | ImmField<19, 16>(imm / num_regs);
5064 } else if (addr.IsScalarPlusScalar()) {
5065 VIXL_ASSERT(addr.GetScalarOffset().IsZero() ||
5066 addr.IsEquivalentToLSL(msize_in_bytes_log2));
5067 op = RnSP(addr.GetScalarBase()) | Rm(addr.GetScalarOffset());
5069 } else if (addr.IsVectorPlusImmediate()) {
5070 ZRegister zn = addr.GetVectorBase();
5071 uint64_t imm = addr.GetImmediateOffset();
5076 } else if (addr.IsVectorPlusScalar()) {
5077 VIXL_ASSERT(addr.GetOffsetModifier() == NO_SVE_OFFSET_MODIFIER);
5078 VIXL_ASSERT(addr.GetShiftAmount() == 0);
5079 ZRegister zn = addr.GetVectorBase();
5081 Register xm = addr.GetScalarOffset();
5083 } else if (addr.IsScalarPlusVector()) {
5086 Register xn = addr.GetScalarBase();
5087 ZRegister zm = addr.GetVectorOffset();
5088 SVEOffsetModifier mod = addr.GetOffsetModifier();
5096 VIXL_ASSERT(addr.GetShiftAmount() == msize_in_bytes_log2);
5100 VIXL_ASSERT(addr.GetShiftAmount() == 0);
5108 VIXL_ASSERT((addr.GetShiftAmount() == 0) ||
5109 (addr.GetShiftAmount() == msize_in_bytes_log2));
5126 const SVEMemOperand& addr) {
5127 if (addr.IsScalarPlusScalar()) {
5129 VIXL_ASSERT(!addr.GetScalarOffset().IsZero());
5132 if (addr.IsScatterGather()) {
5139 addr,
5147 if (addr.IsScalarPlusImmediate()) {
5149 } else if (addr.IsScalarPlusScalar()) {
5155 SVELdSt1Helper(msize_in_bytes_log2, zt, pg, addr, false, op);
5161 const SVEMemOperand& addr) {
5162 if (addr.IsScalarPlusScalar()) {
5164 VIXL_ASSERT(!addr.GetScalarOffset().IsZero());
5168 if (addr.IsScalarPlusImmediate()) {
5170 } else if (addr.IsScalarPlusScalar()) {
5176 SVELdSt234Helper(num_regs, zt1, pg, addr, op);
5182 const SVEMemOperand& addr) { \
5184 SVESt1Helper(k##LANE_SIZE##RegSizeInBytesLog2, zt, pg, addr); \
5190 const SVEMemOperand& addr) { \
5196 SVESt234Helper(2, zt1, pg, addr); \
5203 const SVEMemOperand& addr) { \
5209 SVESt234Helper(3, zt1, pg, addr); \
5217 const SVEMemOperand& addr) { \
5223 SVESt234Helper(4, zt1, pg, addr); \
5233 const SVEMemOperand& addr) {
5234 VIXL_ASSERT(addr.IsPlainScalar() ||
5235 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
5236 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(0)) ||
5237 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5238 if (addr.IsVectorPlusScalar()) {
5239 SVESt1VecScaHelper(zt, pg, addr, 0);
5243 addr,
5251 const SVEMemOperand& addr) {
5252 VIXL_ASSERT(addr.IsPlainScalar() ||
5253 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
5254 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(3)) ||
5255 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5256 if (addr.IsVectorPlusScalar()) {
5257 SVESt1VecScaHelper(zt, pg, addr, 3);
5261 addr,
5269 const SVEMemOperand& addr) {
5270 VIXL_ASSERT(addr.IsPlainScalar() ||
5271 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
5272 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(1)) ||
5273 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5274 if (addr.IsVectorPlusScalar()) {
5275 SVESt1VecScaHelper(zt, pg, addr, 1);
5279 addr,
5287 const SVEMemOperand& addr) {
5288 VIXL_ASSERT(addr.IsPlainScalar() ||
5289 (addr.IsScalarPlusImmediate() && addr.IsMulVl()) ||
5290 (addr.IsScalarPlusScalar() && addr.IsEquivalentToLSL(2)) ||
5291 (addr.IsVectorPlusScalar() && CPUHas(CPUFeatures::kSVE2)));
5292 if (addr.IsVectorPlusScalar()) {
5293 SVESt1VecScaHelper(zt, pg, addr, 2);
5297 addr,
5303 void Assembler::str(const CPURegister& rt, const SVEMemOperand& addr) {
5308 VIXL_ASSERT(addr.IsPlainScalar() ||
5309 (addr.IsScalarPlusImmediate() &&
5310 (addr.GetOffsetModifier() == SVE_MUL_VL)));
5311 int64_t imm9 = addr.GetImmediateOffset();
5320 Emit(op | Rt(rt) | RnSP(addr.GetScalarBase()) | imm9h | imm9l);