Lines Matching refs:shift
27 int shift; in aarch64_get_imm_shift_mask() local
32 shift = 0; in aarch64_get_imm_shift_mask()
36 shift = 5; in aarch64_get_imm_shift_mask()
40 shift = 5; in aarch64_get_imm_shift_mask()
44 shift = 5; in aarch64_get_imm_shift_mask()
48 shift = 10; in aarch64_get_imm_shift_mask()
52 shift = 12; in aarch64_get_imm_shift_mask()
56 shift = 15; in aarch64_get_imm_shift_mask()
61 shift = 10; in aarch64_get_imm_shift_mask()
65 shift = 16; in aarch64_get_imm_shift_mask()
69 shift = 22; in aarch64_get_imm_shift_mask()
76 *shiftp = shift; in aarch64_get_imm_shift_mask()
91 int shift; in aarch64_insn_decode_immediate() local
95 shift = 0; in aarch64_insn_decode_immediate()
102 if (aarch64_get_imm_shift_mask(type, &mask, &shift) < 0) { in aarch64_insn_decode_immediate()
109 return (insn >> shift) & mask; in aarch64_insn_decode_immediate()
116 int shift; in aarch64_insn_encode_immediate() local
123 shift = 0; in aarch64_insn_encode_immediate()
132 if (aarch64_get_imm_shift_mask(type, &mask, &shift) < 0) { in aarch64_insn_encode_immediate()
140 insn &= ~(mask << shift); in aarch64_insn_encode_immediate()
141 insn |= (imm & mask) << shift; in aarch64_insn_encode_immediate()
149 int shift; in aarch64_insn_decode_register() local
154 shift = 0; in aarch64_insn_decode_register()
157 shift = 5; in aarch64_insn_decode_register()
161 shift = 10; in aarch64_insn_decode_register()
164 shift = 16; in aarch64_insn_decode_register()
172 return (insn >> shift) & GENMASK(4, 0); in aarch64_insn_decode_register()
179 int shift; in aarch64_insn_encode_register() local
192 shift = 0; in aarch64_insn_encode_register()
195 shift = 5; in aarch64_insn_encode_register()
199 shift = 10; in aarch64_insn_encode_register()
203 shift = 16; in aarch64_insn_encode_register()
211 insn &= ~(GENMASK(4, 0) << shift); in aarch64_insn_encode_register()
212 insn |= reg << shift; in aarch64_insn_encode_register()
414 u32 shift; in aarch64_insn_gen_load_store_imm() local
421 shift = aarch64_insn_ldst_size[size]; in aarch64_insn_gen_load_store_imm()
422 if (imm & ~(BIT(12 + shift) - BIT(shift))) { in aarch64_insn_gen_load_store_imm()
427 imm >>= shift; in aarch64_insn_gen_load_store_imm()
481 int shift; in aarch64_insn_gen_load_store_pair() local
508 shift = 2; in aarch64_insn_gen_load_store_pair()
516 shift = 3; in aarch64_insn_gen_load_store_pair()
534 offset >> shift); in aarch64_insn_gen_load_store_pair()
841 int imm, int shift, in aarch64_insn_gen_movewide() argument
869 if (shift != 0 && shift != 16) { in aarch64_insn_gen_movewide()
871 shift); in aarch64_insn_gen_movewide()
877 if (shift != 0 && shift != 16 && shift != 32 && shift != 48) { in aarch64_insn_gen_movewide()
879 shift); in aarch64_insn_gen_movewide()
888 insn |= (shift >> 4) << 21; in aarch64_insn_gen_movewide()
898 int shift, in aarch64_insn_gen_add_sub_shifted_reg() argument
924 if (shift & ~(SZ_32 - 1)) { in aarch64_insn_gen_add_sub_shifted_reg()
926 shift); in aarch64_insn_gen_add_sub_shifted_reg()
932 if (shift & ~(SZ_64 - 1)) { in aarch64_insn_gen_add_sub_shifted_reg()
934 shift); in aarch64_insn_gen_add_sub_shifted_reg()
950 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_6, insn, shift); in aarch64_insn_gen_add_sub_shifted_reg()
1092 int shift, in aarch64_insn_gen_logical_shifted_reg() argument
1130 if (shift & ~(SZ_32 - 1)) { in aarch64_insn_gen_logical_shifted_reg()
1132 shift); in aarch64_insn_gen_logical_shifted_reg()
1138 if (shift & ~(SZ_64 - 1)) { in aarch64_insn_gen_logical_shifted_reg()
1140 shift); in aarch64_insn_gen_logical_shifted_reg()
1156 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_6, insn, shift); in aarch64_insn_gen_logical_shifted_reg()