Lines Matching refs:sf

19 #define A64_VARIANT(sf) \  argument
20 ((sf) ? AARCH64_INSN_VARIANT_64BIT : AARCH64_INSN_VARIANT_32BIT)
23 #define A64_COMP_BRANCH(sf, Rt, offset, type) \ argument
24 aarch64_insn_gen_comp_branch_imm(0, offset, Rt, A64_VARIANT(sf), \
26 #define A64_CBZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, ZERO) argument
27 #define A64_CBNZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, NONZERO) argument
100 #define A64_SIZE(sf) \ argument
101 ((sf) ? AARCH64_INSN_SIZE_64 : AARCH64_INSN_SIZE_32)
102 #define A64_LSX(sf, Rt, Rn, Rs, type) \ argument
103 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
106 #define A64_LDXR(sf, Rt, Rn) \ argument
107 A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
109 #define A64_STXR(sf, Rt, Rn, Rs) \ argument
110 A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
112 #define A64_STLXR(sf, Rt, Rn, Rs) \ argument
113 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
122 #define A64_ST_OP(sf, Rn, Rs, op) \ argument
124 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
127 #define A64_STADD(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, ADD) argument
128 #define A64_STCLR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, CLR) argument
129 #define A64_STEOR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, EOR) argument
130 #define A64_STSET(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, SET) argument
132 #define A64_LD_OP_AL(sf, Rt, Rn, Rs, op) \ argument
134 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
137 #define A64_LDADDAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, ADD) argument
138 #define A64_LDCLRAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, CLR) argument
139 #define A64_LDEORAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, EOR) argument
140 #define A64_LDSETAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SET) argument
142 #define A64_SWPAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SWP) argument
144 #define A64_CASAL(sf, Rt, Rn, Rs) \ argument
145 aarch64_insn_gen_cas(Rt, Rn, Rs, A64_SIZE(sf), \
149 #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \ argument
151 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
153 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD) argument
154 #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB) argument
155 #define A64_ADDS_I(sf, Rd, Rn, imm12) \ argument
156 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD_SETFLAGS)
157 #define A64_SUBS_I(sf, Rd, Rn, imm12) \ argument
158 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB_SETFLAGS)
160 #define A64_CMN_I(sf, Rn, imm12) A64_ADDS_I(sf, A64_ZR, Rn, imm12) argument
162 #define A64_CMP_I(sf, Rn, imm12) A64_SUBS_I(sf, A64_ZR, Rn, imm12) argument
164 #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0) argument
167 #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \ argument
169 A64_VARIANT(sf), AARCH64_INSN_BITFIELD_MOVE_##type)
171 #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED) argument
173 #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED) argument
176 #define A64_LSL(sf, Rd, Rn, shift) ({ \ argument
177 int sz = (sf) ? 64 : 32; \
178 A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
181 #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31) argument
183 #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31) argument
186 #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15) argument
187 #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31) argument
190 #define A64_MOVEW(sf, Rd, imm16, shift, type) \ argument
192 A64_VARIANT(sf), AARCH64_INSN_MOVEWIDE_##type)
196 #define A64_MOVN(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, INVERSE) argument
197 #define A64_MOVZ(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, ZERO) argument
198 #define A64_MOVK(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, KEEP) argument
201 #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \ argument
203 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
205 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD) argument
206 #define A64_SUB(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB) argument
207 #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS) argument
209 #define A64_NEG(sf, Rd, Rm) A64_SUB(sf, Rd, A64_ZR, Rm) argument
211 #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm) argument
214 #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \ argument
215 A64_VARIANT(sf), AARCH64_INSN_DATA1_##type)
217 #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16) argument
218 #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32) argument
223 #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \ argument
224 A64_VARIANT(sf), AARCH64_INSN_DATA2_##type)
225 #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV) argument
226 #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV) argument
227 #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV) argument
228 #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV) argument
232 #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \ argument
233 A64_VARIANT(sf), AARCH64_INSN_DATA3_MADD)
235 #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \ argument
236 A64_VARIANT(sf), AARCH64_INSN_DATA3_MSUB)
238 #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm) argument
241 #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \ argument
243 A64_VARIANT(sf), AARCH64_INSN_LOGIC_##type)
245 #define A64_AND(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND) argument
246 #define A64_ORR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR) argument
247 #define A64_EOR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR) argument
248 #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS) argument
250 #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm) argument
252 #define A64_MVN(sf, Rd, Rm) \ argument
253 A64_LOGIC_SREG(sf, Rd, A64_ZR, Rm, ORN)
256 #define A64_LOGIC_IMM(sf, Rd, Rn, imm, type) ({ \ argument
257 u64 imm64 = (sf) ? (u64)imm : (u64)(u32)imm; \
259 A64_VARIANT(sf), Rn, Rd, imm64); \
262 #define A64_AND_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND) argument
263 #define A64_ORR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, ORR) argument
264 #define A64_EOR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, EOR) argument
265 #define A64_ANDS_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND_SETFLAGS) argument
267 #define A64_TST_I(sf, Rn, imm) A64_ANDS_I(sf, A64_ZR, Rn, imm) argument