Lines Matching refs:addr
305 const SVEMemOperand& addr,
308 VIXL_ASSERT(!addr.IsScatterGather());
312 VIXL_ASSERT(!addr.IsMulVl() || (vl_divisor_log2 >= 0));
317 SVEOffsetModifier mod = addr.GetOffsetModifier();
318 Register base = addr.GetScalarBase();
320 if (addr.IsEquivalentToScalar()) {
326 } else if (addr.IsScalarPlusImmediate()) {
330 int64_t offset = addr.GetImmediateOffset();
332 if (addr.IsMulVl()) {
343 } else if (addr.IsScalarPlusScalar()) {
347 Register offset = addr.GetScalarOffset();
350 Add(xd, base, Operand(offset, LSL, addr.GetShiftAmount()));
1113 const SVEMemOperand& addr,
1116 VIXL_ASSERT(addr.IsScalarPlusImmediate());
1117 int64_t imm = addr.GetImmediateOffset();
1120 (this->*fn)(zt, pg, addr);
1124 CalculateSVEAddress(scratch, addr, zt);
1131 const SVEMemOperand& addr,
1136 if (addr.IsPlainScalar() ||
1137 (addr.IsScalarPlusImmediate() && IsInt9(addr.GetImmediateOffset()) &&
1138 addr.IsMulVl())) {
1140 (this->*fn)(rt, addr);
1144 if (addr.IsEquivalentToScalar()) {
1146 (this->*fn)(rt, SVEMemOperand(addr.GetScalarBase()));
1152 CalculateSVEAddress(scratch, addr, rt);
1161 const SVEMemOperand& addr,
1170 if (addr.IsPlainScalar() ||
1171 (addr.IsScalarPlusImmediate() &&
1172 IsIntN(imm_bits, addr.GetImmediateOffset() / imm_divisor) &&
1173 ((addr.GetImmediateOffset() % imm_divisor) == 0) &&
1174 (addr.GetOffsetModifier() == supported_modifier))) {
1176 (this->*fn)(zt, pg, addr);
1180 if (addr.IsScalarPlusScalar() && !addr.GetScalarOffset().IsZero() &&
1181 addr.IsEquivalentToLSL(zt.GetLaneSizeInBytesLog2())) {
1183 (this->*fn)(zt, pg, addr);
1187 if (addr.IsEquivalentToScalar()) {
1189 (this->*fn)(zt, pg, SVEMemOperand(addr.GetScalarBase()));
1193 if (addr.IsMulVl() && (supported_modifier != SVE_MUL_VL) &&
1202 CalculateSVEAddress(scratch, addr, vl_divisor_log2);
1211 const SVEMemOperand& addr,
1213 if (addr.IsPlainScalar() ||
1214 (addr.IsScalarPlusScalar() && !addr.GetScalarOffset().IsZero() &&
1215 addr.IsEquivalentToLSL(msize_in_bytes_log2)) ||
1216 (addr.IsScalarPlusImmediate() && IsInt4(addr.GetImmediateOffset()) &&
1217 addr.IsMulVl())) {
1219 (this->*fn)(zt, pg, addr);
1223 if (addr.IsEquivalentToScalar()) {
1225 (this->*fn)(zt, pg, SVEMemOperand(addr.GetScalarBase()));
1229 if (addr.IsVectorPlusImmediate()) {
1230 uint64_t offset = addr.GetImmediateOffset();
1234 (this->*fn)(zt, pg, addr);
1239 if (addr.IsScalarPlusVector()) {
1240 VIXL_ASSERT(addr.IsScatterGather());
1242 (this->*fn)(zt, pg, addr);
1247 if (addr.IsScatterGather()) {
1253 if (addr.IsVectorPlusImmediate()) {
1257 Mov(scratch, addr.GetImmediateOffset());
1261 (this->*fn)(zt, pg, SVEMemOperand(scratch, addr.GetVectorBase(), om));
1272 CalculateSVEAddress(scratch, addr, vl_divisor_log2);
1282 const SVEMemOperand& addr,
1284 if (addr.IsScatterGather()) {
1286 SVELoadStore1Helper(msize_in_bytes_log2, zt, pg, addr, fn);
1295 if (addr.IsPlainScalar() || (addr.IsScalarPlusScalar() &&
1296 addr.IsEquivalentToLSL(msize_in_bytes_log2))) {
1298 (this->*fn)(zt, pg, addr);
1307 const SVEMemOperand& addr) {
1312 addr,
1318 const SVEMemOperand& addr) {
1323 addr,
1329 const SVEMemOperand& addr) {
1334 addr,
1340 const SVEMemOperand& addr) {
1345 addr,
1351 const SVEMemOperand& addr) {
1356 addr,
1362 const SVEMemOperand& addr) {
1367 addr,
1373 const SVEMemOperand& addr) {
1378 addr,
1384 const SVEMemOperand& addr) {
1389 addr,
1395 const SVEMemOperand& addr) {
1400 addr,
1406 const SVEMemOperand& addr) {
1411 addr,
1417 const SVEMemOperand& addr) {
1422 addr,
1428 const SVEMemOperand& addr) {
1433 addr,
1439 const SVEMemOperand& addr) {
1444 addr,
1450 const SVEMemOperand& addr) {
1455 addr,
1461 const SVEMemOperand& addr) {
1466 addr,
1472 const SVEMemOperand& addr) {
1477 addr,
1483 const SVEMemOperand& addr) {
1488 addr,
1494 const SVEMemOperand& addr) {
1499 addr,
1509 const SVEMemOperand& addr) { \
1513 addr, \
1528 const SVEMemOperand& addr) {
1530 if (addr.IsVectorPlusScalar()) {
1532 ldnt1b(zt, pg, addr);
1536 addr,
1546 const SVEMemOperand& addr) {
1548 if (addr.IsVectorPlusScalar()) {
1550 ldnt1d(zt, pg, addr);
1554 addr,
1564 const SVEMemOperand& addr) {
1566 if (addr.IsVectorPlusScalar()) {
1568 ldnt1h(zt, pg, addr);
1572 addr,
1582 const SVEMemOperand& addr) {
1584 if (addr.IsVectorPlusScalar()) {
1586 ldnt1w(zt, pg, addr);
1590 addr,
1600 const SVEMemOperand& addr) {
1602 if (addr.IsVectorPlusScalar()) {
1604 stnt1b(zt, pg, addr);
1608 addr,
1617 const SVEMemOperand& addr) {
1619 if (addr.IsVectorPlusScalar()) {
1621 stnt1d(zt, pg, addr);
1625 addr,
1634 const SVEMemOperand& addr) {
1636 if (addr.IsVectorPlusScalar()) {
1638 stnt1h(zt, pg, addr);
1642 addr,
1651 const SVEMemOperand& addr) {
1653 if (addr.IsVectorPlusScalar()) {
1655 stnt1w(zt, pg, addr);
1659 addr,