Lines Matching refs:imm8
32 void Assembler::ResolveSVEImm8Shift(int* imm8, int* shift) {
36 if (IsInt8(*imm8)) {
38 } else if ((*imm8 % 256) == 0) {
39 *imm8 /= 256;
44 VIXL_ASSERT(IsInt8(*imm8));
3560 int imm8,
3564 // size<23:22> | Pg<19:16> | M<14> | sh<13> | imm8<12:5> | Zd<4:0>
3569 ResolveSVEImm8Shift(&imm8, &shift);
3574 ImmField<12, 5>(imm8));
3580 // size<23:22> | Pg<19:16> | imm8<12:5> | Zd<4:0>
3594 int imm8,
3599 if (IsUint8(imm8)) {
3601 } else if (IsUint16(imm8) && ((imm8 % 256) == 0)) {
3602 imm8 /= 256;
3607 VIXL_ASSERT(IsUint8(imm8));
3611 Emit(op | SVESize(zd) | Rd(zd) | shift_bit | ImmUnsignedField<12, 5>(imm8));
3616 int imm8,
3620 // size<23:22> | opc<18:16> = 000 | sh<13> | imm8<12:5> | Zdn<4:0>
3627 SVEIntAddSubtractImmUnpredicatedHelper(ADD_z_zi, zd, imm8, shift);
3630 void Assembler::dup(const ZRegister& zd, int imm8, int shift) {
3633 // size<23:22> | opc<18:17> = 00 | sh<13> | imm8<12:5> | Zd<4:0>
3637 ResolveSVEImm8Shift(&imm8, &shift);
3641 Emit(DUP_z_i | SVESize(zd) | Rd(zd) | shift_bit | ImmField<12, 5>(imm8));
3647 // size<23:22> | opc<18:17> = 00 | o2<13> = 0 | imm8<12:5> | Zd<4:0>
3656 void Assembler::mul(const ZRegister& zd, const ZRegister& zn, int imm8) {
3659 // size<23:22> | opc<18:16> = 000 | o2<13> = 0 | imm8<12:5> | Zdn<4:0>
3666 Emit(MUL_z_zi | SVESize(zd) | Rd(zd) | ImmField<12, 5>(imm8));
3669 void Assembler::smax(const ZRegister& zd, const ZRegister& zn, int imm8) {
3672 // size<23:22> | opc<18:16> = 000 | o2<13> = 0 | imm8<12:5> | Zdn<4:0>
3679 Emit(SMAX_z_zi | SVESize(zd) | Rd(zd) | ImmField<12, 5>(imm8));
3682 void Assembler::smin(const ZRegister& zd, const ZRegister& zn, int imm8) {
3685 // size<23:22> | opc<18:16> = 010 | o2<13> = 0 | imm8<12:5> | Zdn<4:0>
3692 Emit(SMIN_z_zi | SVESize(zd) | Rd(zd) | ImmField<12, 5>(imm8));
3697 int imm8,
3701 // size<23:22> | opc<18:16> = 100 | sh<13> | imm8<12:5> | Zdn<4:0>
3708 SVEIntAddSubtractImmUnpredicatedHelper(SQADD_z_zi, zd, imm8, shift);
3713 int imm8,
3717 // size<23:22> | opc<18:16> = 110 | sh<13> | imm8<12:5> | Zdn<4:0>
3724 SVEIntAddSubtractImmUnpredicatedHelper(SQSUB_z_zi, zd, imm8, shift);
3729 int imm8,
3733 // size<23:22> | opc<18:16> = 001 | sh<13> | imm8<12:5> | Zdn<4:0>
3740 SVEIntAddSubtractImmUnpredicatedHelper(SUB_z_zi, zd, imm8, shift);
3745 int imm8,
3749 // size<23:22> | opc<18:16> = 011 | sh<13> | imm8<12:5> | Zdn<4:0>
3756 SVEIntAddSubtractImmUnpredicatedHelper(SUBR_z_zi, zd, imm8, shift);
3759 void Assembler::umax(const ZRegister& zd, const ZRegister& zn, int imm8) {
3762 // size<23:22> | opc<18:16> = 001 | o2<13> = 0 | imm8<12:5> | Zdn<4:0>
3769 Emit(UMAX_z_zi | SVESize(zd) | Rd(zd) | ImmUnsignedField<12, 5>(imm8));
3772 void Assembler::umin(const ZRegister& zd, const ZRegister& zn, int imm8) {
3775 // size<23:22> | opc<18:16> = 011 | o2<13> = 0 | imm8<12:5> | Zdn<4:0>
3782 Emit(UMIN_z_zi | SVESize(zd) | Rd(zd) | ImmUnsignedField<12, 5>(imm8));
3787 int imm8,
3791 // size<23:22> | opc<18:16> = 101 | sh<13> | imm8<12:5> | Zdn<4:0>
3798 SVEIntAddSubtractImmUnpredicatedHelper(UQADD_z_zi, zd, imm8, shift);
3803 int imm8,
3807 // size<23:22> | opc<18:16> = 111 | sh<13> | imm8<12:5> | Zdn<4:0>
3814 SVEIntAddSubtractImmUnpredicatedHelper(UQSUB_z_zi, zd, imm8, shift);
6537 int imm8,
6540 cpy(zd, pg, imm8, shift);
6589 void Assembler::mov(const ZRegister& zd, int imm8, int shift) {
6590 dup(zd, imm8, shift);