Lines Matching refs:sf

19 #define A64_VARIANT(sf) \
20 ((sf) ? AARCH64_INSN_VARIANT_64BIT : AARCH64_INSN_VARIANT_32BIT)
23 #define A64_COMP_BRANCH(sf, Rt, offset, type) \
24 aarch64_insn_gen_comp_branch_imm(0, offset, Rt, A64_VARIANT(sf), \
26 #define A64_CBZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, ZERO)
27 #define A64_CBNZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, NONZERO)
80 #define A64_SIZE(sf) \
81 ((sf) ? AARCH64_INSN_SIZE_64 : AARCH64_INSN_SIZE_32)
82 #define A64_LSX(sf, Rt, Rn, Rs, type) \
83 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
86 #define A64_LDXR(sf, Rt, Rn) \
87 A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
89 #define A64_STXR(sf, Rt, Rn, Rs) \
90 A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
93 #define A64_STADD(sf, Rn, Rs) \
94 aarch64_insn_gen_stadd(Rn, Rs, A64_SIZE(sf))
97 #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \
99 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
101 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD)
102 #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB)
103 #define A64_ADDS_I(sf, Rd, Rn, imm12) \
104 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD_SETFLAGS)
105 #define A64_SUBS_I(sf, Rd, Rn, imm12) \
106 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB_SETFLAGS)
108 #define A64_CMN_I(sf, Rn, imm12) A64_ADDS_I(sf, A64_ZR, Rn, imm12)
110 #define A64_CMP_I(sf, Rn, imm12) A64_SUBS_I(sf, A64_ZR, Rn, imm12)
112 #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0)
115 #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \
117 A64_VARIANT(sf), AARCH64_INSN_BITFIELD_MOVE_##type)
119 #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED)
121 #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED)
124 #define A64_LSL(sf, Rd, Rn, shift) ({ \
125 int sz = (sf) ? 64 : 32; \
126 A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
129 #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
131 #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
134 #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15)
135 #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31)
138 #define A64_MOVEW(sf, Rd, imm16, shift, type) \
140 A64_VARIANT(sf), AARCH64_INSN_MOVEWIDE_##type)
144 #define A64_MOVN(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, INVERSE)
145 #define A64_MOVZ(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, ZERO)
146 #define A64_MOVK(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, KEEP)
149 #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \
151 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
153 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
154 #define A64_SUB(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB)
155 #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS)
157 #define A64_NEG(sf, Rd, Rm) A64_SUB(sf, Rd, A64_ZR, Rm)
159 #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm)
162 #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \
163 A64_VARIANT(sf), AARCH64_INSN_DATA1_##type)
165 #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16)
166 #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32)
171 #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \
172 A64_VARIANT(sf), AARCH64_INSN_DATA2_##type)
173 #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV)
174 #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV)
175 #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV)
176 #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV)
180 #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
181 A64_VARIANT(sf), AARCH64_INSN_DATA3_MADD)
183 #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
184 A64_VARIANT(sf), AARCH64_INSN_DATA3_MSUB)
186 #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm)
189 #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \
191 A64_VARIANT(sf), AARCH64_INSN_LOGIC_##type)
193 #define A64_AND(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND)
194 #define A64_ORR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR)
195 #define A64_EOR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR)
196 #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS)
198 #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm)
201 #define A64_LOGIC_IMM(sf, Rd, Rn, imm, type) ({ \
202 u64 imm64 = (sf) ? (u64)imm : (u64)(u32)imm; \
204 A64_VARIANT(sf), Rn, Rd, imm64); \
207 #define A64_AND_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND)
208 #define A64_ORR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, ORR)
209 #define A64_EOR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, EOR)
210 #define A64_ANDS_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND_SETFLAGS)
212 #define A64_TST_I(sf, Rn, imm) A64_ANDS_I(sf, A64_ZR, Rn, imm)