Lines Matching refs:imm

113 				  u32 insn, u64 imm)  in aarch64_insn_encode_immediate()  argument
124 immlo = (imm & ADR_IMM_LOMASK) << ADR_IMM_LOSHIFT; in aarch64_insn_encode_immediate()
125 imm >>= ADR_IMM_HILOSPLIT; in aarch64_insn_encode_immediate()
126 immhi = (imm & ADR_IMM_HIMASK) << ADR_IMM_HISHIFT; in aarch64_insn_encode_immediate()
127 imm = immlo | immhi; in aarch64_insn_encode_immediate()
141 insn |= (imm & mask) << shift; in aarch64_insn_encode_immediate()
409 unsigned int imm, in aarch64_insn_gen_load_store_imm() argument
422 if (imm & ~(BIT(12 + shift) - BIT(shift))) { in aarch64_insn_gen_load_store_imm()
423 pr_err("%s: invalid imm: %d\n", __func__, imm); in aarch64_insn_gen_load_store_imm()
427 imm >>= shift; in aarch64_insn_gen_load_store_imm()
448 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm); in aarch64_insn_gen_load_store_imm()
726 int imm, enum aarch64_insn_variant variant, in aarch64_insn_gen_add_sub_imm() argument
761 if (imm & ~(BIT(24) - 1)) in aarch64_insn_gen_add_sub_imm()
765 if (imm & ~(SZ_4K - 1)) { in aarch64_insn_gen_add_sub_imm()
767 if (imm & (SZ_4K - 1)) in aarch64_insn_gen_add_sub_imm()
770 imm >>= 12; in aarch64_insn_gen_add_sub_imm()
778 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm); in aarch64_insn_gen_add_sub_imm()
781 pr_err("%s: invalid immediate encoding %d\n", __func__, imm); in aarch64_insn_gen_add_sub_imm()
841 int imm, int shift, in aarch64_insn_gen_movewide() argument
862 if (imm & ~(SZ_64K - 1)) { in aarch64_insn_gen_movewide()
863 pr_err("%s: invalid immediate encoding %d\n", __func__, imm); in aarch64_insn_gen_movewide()
892 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm); in aarch64_insn_gen_movewide()
1208 s32 imm; in aarch64_get_branch_offset() local
1211 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_26, insn); in aarch64_get_branch_offset()
1212 return (imm << 6) >> 4; in aarch64_get_branch_offset()
1217 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_19, insn); in aarch64_get_branch_offset()
1218 return (imm << 13) >> 11; in aarch64_get_branch_offset()
1222 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_14, insn); in aarch64_get_branch_offset()
1223 return (imm << 18) >> 16; in aarch64_get_branch_offset()
1309 static u32 aarch64_encode_immediate(u64 imm, in aarch64_encode_immediate() argument
1332 if (!imm || imm == mask || imm & ~mask) in aarch64_encode_immediate()
1342 if ((imm & emask) != ((imm >> tmp) & emask)) in aarch64_encode_immediate()
1353 imm &= mask; in aarch64_encode_immediate()
1356 ones = hweight64(imm); in aarch64_encode_immediate()
1367 if (range_of_ones(imm)) { in aarch64_encode_immediate()
1373 ror = __ffs64(imm); in aarch64_encode_immediate()
1382 imm |= ~mask; in aarch64_encode_immediate()
1383 if (!range_of_ones(~imm)) in aarch64_encode_immediate()
1390 ror = fls64(~imm); in aarch64_encode_immediate()
1409 u64 imm) in aarch64_insn_gen_logical_immediate() argument
1433 return aarch64_encode_immediate(imm, variant, insn); in aarch64_insn_gen_logical_immediate()