Lines Matching defs:dst

1106                     const MemOperand& dst) {
1107 LoadStorePair(rt, rt2, dst, StorePairOpFor(rt, rt2));
1164 const MemOperand& dst) {
1165 LoadStorePairNonTemporal(rt, rt2, dst, StorePairNonTemporalOpFor(rt, rt2));
1199 const MemOperand& dst,
1203 LoadStore(rt, dst, STRB_w, option);
1226 const MemOperand& dst,
1230 LoadStore(rt, dst, STRH_w, option);
1253 const MemOperand& dst,
1257 LoadStore(rt, dst, StoreOpFor(rt), option);
1281 const MemOperand& dst,
1285 LoadStore(rt, dst, STRB_w, option);
1308 const MemOperand& dst,
1312 LoadStore(rt, dst, STRH_w, option);
1335 const MemOperand& dst,
1339 LoadStore(rt, dst, StoreOpFor(rt), option);
1405 const MemOperand& dst) {
1406 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1407 Emit(STXRB_w | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1413 const MemOperand& dst) {
1414 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1415 Emit(STXRH_w | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1421 const MemOperand& dst) {
1422 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1424 Emit(op | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1450 const MemOperand& dst) {
1452 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1454 Emit(op | Rs(rs) | Rt(rt) | Rt2(rt2) | RnSP(dst.GetBaseRegister()));
1470 const MemOperand& dst) {
1471 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1472 Emit(STLXRB_w | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1478 const MemOperand& dst) {
1479 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1480 Emit(STLXRH_w | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1486 const MemOperand& dst) {
1487 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1489 Emit(op | Rs(rs) | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1515 const MemOperand& dst) {
1517 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1519 Emit(op | Rs(rs) | Rt(rt) | Rt2(rt2) | RnSP(dst.GetBaseRegister()));
1533 void Assembler::stlrb(const Register& rt, const MemOperand& dst) {
1534 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1535 Emit(STLRB_w | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1538 void Assembler::stlurb(const Register& rt, const MemOperand& dst) {
1540 VIXL_ASSERT(dst.IsImmediateOffset() && IsImmLSUnscaled(dst.GetOffset()));
1542 Instr base = RnSP(dst.GetBaseRegister());
1543 int64_t offset = dst.GetOffset();
1548 void Assembler::stlrh(const Register& rt, const MemOperand& dst) {
1549 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1550 Emit(STLRH_w | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1553 void Assembler::stlurh(const Register& rt, const MemOperand& dst) {
1555 VIXL_ASSERT(dst.IsImmediateOffset() && IsImmLSUnscaled(dst.GetOffset()));
1557 Instr base = RnSP(dst.GetBaseRegister());
1558 int64_t offset = dst.GetOffset();
1563 void Assembler::stlr(const Register& rt, const MemOperand& dst) {
1564 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1566 Emit(op | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1569 void Assembler::stlur(const Register& rt, const MemOperand& dst) {
1571 VIXL_ASSERT(dst.IsImmediateOffset() && IsImmLSUnscaled(dst.GetOffset()));
1573 Instr base = RnSP(dst.GetBaseRegister());
1574 int64_t offset = dst.GetOffset();
1599 void Assembler::stllrb(const Register& rt, const MemOperand& dst) {
1601 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1602 Emit(STLLRB | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1606 void Assembler::stllrh(const Register& rt, const MemOperand& dst) {
1608 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1609 Emit(STLLRH | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
1613 void Assembler::stllr(const Register& rt, const MemOperand& dst) {
1615 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1617 Emit(op | Rs_mask | Rt(rt) | Rt2_mask | RnSP(dst.GetBaseRegister()));
2785 const MemOperand& dst) {
2790 LoadStoreStruct(vt, dst, NEON_ST2);
2797 const MemOperand& dst) {
2802 LoadStoreStructSingle(vt, lane, dst, NEONLoadStoreSingleStructStore2);
2809 const MemOperand& dst) {
2814 LoadStoreStruct(vt, dst, NEON_ST3);
2822 const MemOperand& dst) {
2827 LoadStoreStructSingle(vt, lane, dst, NEONLoadStoreSingleStructStore3);
2835 const MemOperand& dst) {
2840 LoadStoreStruct(vt, dst, NEON_ST4);
2849 const MemOperand& dst) {
2854 LoadStoreStructSingle(vt, lane, dst, NEONLoadStoreSingleStructStore4);
2911 void Assembler::st1(const VRegister& vt, int lane, const MemOperand& dst) {
2913 LoadStoreStructSingle(vt, lane, dst, NEONLoadStoreSingleStructStore1);
5890 const Register& dst,
5894 int reg_size = dst.GetSizeInBits();
5896 if (IsImmMovz(imm, reg_size) && !dst.IsSP()) {
5900 assm->movz(dst, imm);
5903 } else if (IsImmMovn(imm, reg_size) && !dst.IsSP()) {
5907 assm->movn(dst, dst.Is64Bits() ? ~imm : (~imm & kWRegMask));
5912 VIXL_ASSERT(!dst.IsZero());
5914 assm->LogicalImmediate(dst,
5915 AppropriateZeroRegFor(dst),