Home
last modified time | relevance | path

Searched refs:tmp2 (Results 1 – 25 of 66) sorted by relevance

123

/arch/arm64/include/asm/
A Dasm_pointer_auth.h15 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_KERNEL_KEY_APIA]
16 msr_s SYS_APIAKEYLO_EL1, \tmp2
55 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APIA]
56 msr_s SYS_APIAKEYLO_EL1, \tmp2
63 mrs_s \tmp2, SYS_ID_AA64ISAR2_EL1
64 ubfx \tmp2, \tmp2, #ID_AA64ISAR2_EL1_APA3_SHIFT, #4
65 orr \tmp1, \tmp1, \tmp2
69 mrs \tmp2, sctlr_el1
70 orr \tmp2, \tmp2, \tmp1
71 msr sctlr_el1, \tmp2
[all …]
A Dasm-uaccess.h26 .macro __uaccess_ttbr0_enable, tmp1, tmp2
29 mrs \tmp2, ttbr1_el1
30 extr \tmp2, \tmp2, \tmp1, #48
31 ror \tmp2, \tmp2, #16
32 msr ttbr1_el1, \tmp2 // set the active ASID
37 .macro uaccess_ttbr0_disable, tmp1, tmp2
41 restore_irq \tmp2
45 .macro uaccess_ttbr0_enable, tmp1, tmp2, tmp3
48 __uaccess_ttbr0_enable \tmp1, \tmp2
53 .macro uaccess_ttbr0_disable, tmp1, tmp2
[all …]
A Dassembler.h422 dcache_line_size \tmp1, \tmp2
435 icache_line_size \tmp1, \tmp2
436 sub \tmp2, \tmp1, #1
437 bic \tmp2, \start, \tmp2
440 add \tmp2, \tmp2, \tmp1
441 cmp \tmp2, \end
456 offset_ttbr1 \tmp1, \tmp2
474 load_ttbr1 \page_table, \tmp, \tmp2
632 and \tmp1, \tmp1, \tmp2
634 cmp \tmp1, \tmp2
[all …]
/arch/sparc/include/asm/
A Dhead_64.h45 or %tmp2, %lo(__CHEETAH_ID), %tmp2;\
46 cmp %tmp1, %tmp2; \
54 or %tmp2, %lo(__JALAPENO_ID), %tmp2;\
55 cmp %tmp1, %tmp2; \
61 srlx %tmp1, (32 + 16), %tmp2; \
62 cmp %tmp2, CHEETAH_MANUF; \
65 srlx %tmp1, (32 + 16), %tmp2; \
66 cmp %tmp2, CHEETAH_PLUS_IMPL; \
72 srlx %tmp1, (32 + 16), %tmp2; \
73 cmp %tmp2, CHEETAH_MANUF; \
[all …]
/arch/arm64/lib/
A Dstrlen.S31 #define tmp2 x5 macro
97 orr tmp2, data1, REP8_7f
126 orr tmp2, tmp1, tmp3
132 orr tmp2, tmp1, tmp3
138 orr tmp2, data1, REP8_7f
155 orr tmp2, data1, REP8_7f
156 bic has_nul1, tmp1, tmp2
162 add tmp2, len, 8
164 csel len, len, tmp2, cc
171 orr tmp2, data1, REP8_7f
[all …]
A Dstrnlen.S39 tmp2 .req x9 label
75 orr tmp2, data1, #REP8_7f
78 bic has_nul1, tmp1, tmp2
107 CPU_BE( orr tmp2, data2, #REP8_7f )
108 CPU_BE( bic has_nul2, tmp1, tmp2 )
141 mov tmp2, #~0
143 CPU_BE( lsl tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
145 CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
149 orr data1, data1, tmp2
150 orr data2a, data2, tmp2
A Dmemset.S34 tmp2 .req x4 label
72 neg tmp2, dst
73 ands tmp2, tmp2, #15
82 sub count, count, tmp2
83 add dst, dst, tmp2
169 neg tmp2, dst
170 ands tmp2, tmp2, zva_bits_x
173 sub tmp1, count, tmp2
189 subs tmp2, tmp2, #64
194 add dst, dst, tmp2
A Dstrncmp.S38 #define tmp2 x9 macro
80 orr tmp2, data1, #REP8_7f
139 orr tmp2, tmp3, #REP8_7f
140 bic has_nul, tmp1, tmp2
169 mov tmp2, #~0
170 LS_FW tmp2, tmp2, tmp3 /* Shift (count & 63). */
174 orr data1, data1, tmp2
175 orr data2, data2, tmp2
238 ldp tmp1, tmp2, [src2], #16
261 LS_FW data2, tmp2, offset
[all …]
A Dcopy_template.S29 tmp2 .req x4 label
57 neg tmp2, src
58 ands tmp2, tmp2, #15/* Bytes to reach alignment. */
60 sub count, count, tmp2
67 tbz tmp2, #0, 1f
71 tbz tmp2, #1, 2f
75 tbz tmp2, #2, 3f
79 tbz tmp2, #3, .LSrcAligned
A Dcsum.c63 __uint128_t tmp1, tmp2, tmp3, tmp4; in do_csum() local
66 tmp2 = *(__uint128_t *)(ptr + 2); in do_csum()
75 tmp2 += (tmp2 >> 64) | (tmp2 << 64); in do_csum()
78 tmp1 = ((tmp1 >> 64) << 64) | (tmp2 >> 64); in do_csum()
/arch/arm/mach-at91/
A Dpm_suspend.S24 tmp2 .req r5 label
161 tst tmp1, tmp2
689 bic tmp2, tmp2, #AT91_PMC_PLL_UPDT_ID
695 bic tmp2, tmp2, #0xffffff00
700 bic tmp2, tmp2, #0xffffff
766 mov tmp3, tmp2
783 mov tmp3, tmp2
889 bic tmp2, tmp2, #AT91_PMC_MCR_V2_CSS
890 bic tmp2, tmp2, #AT91_PMC_MCR_V2_DIV
891 orr tmp2, tmp2, #AT91_PMC_MCR_V2_CSS_MAINCK
[all …]
/arch/powerpc/include/asm/book3s/32/
A Dmmu-hash.h99 .macro update_user_segments_by_4 tmp1 tmp2 tmp3 tmp4
100 uus_addi 1, \tmp2, \tmp1, 0x111
105 uus_mtsr 1, \tmp2
110 uus_addi 5, \tmp2, \tmp2, 0x444
115 uus_mtsr 5, \tmp2
120 uus_addi 9, \tmp2, \tmp2, 0x444
125 uus_mtsr 9, \tmp2
130 uus_addi 13, \tmp2, \tmp2, 0x444
135 uus_mtsr 13, \tmp2
/arch/m68k/lib/
A Dchecksum.c40 unsigned long tmp1, tmp2; in csum_partial() local
118 "=&d" (tmp1), "=&d" (tmp2) in csum_partial()
139 unsigned long tmp1, tmp2; in csum_and_copy_from_user() local
261 "=&d" (tmp1), "=d" (tmp2) in csum_and_copy_from_user()
276 unsigned long tmp1, tmp2; in csum_partial_copy_nocheck() local
364 "=&d" (tmp1), "=&d" (tmp2) in csum_partial_copy_nocheck()
/arch/arm/include/asm/
A Dtls.h10 .macro switch_tls_none, base, tp, tpuser, tmp1, tmp2
13 .macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2
14 mrc p15, 0, \tmp2, c13, c0, 2 @ get the user r/w register
17 str \tmp2, [\base, #TI_TP_VALUE + 4] @ save it
20 .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2
28 mov \tmp2, #0xffff0fff
30 streq \tp, [\tmp2, #-15] @ set TLS value at 0xffff0ff0
37 .L1_\@: switch_tls_v6k \base, \tp, \tpuser, \tmp1, \tmp2
41 .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2
A Duaccess-asm.h131 .macro uaccess_entry, tsk, tmp0, tmp1, tmp2, disable
138 mov \tmp2, #DACR_UACCESS_DISABLE
139 mcr p15, 0, \tmp2, c3, c0, 0
143 bic \tmp2, \tmp0, #domain_mask(DOMAIN_KERNEL)
144 orr \tmp2, \tmp2, #domain_val(DOMAIN_KERNEL, DOMAIN_CLIENT)
145 mcr p15, 0, \tmp2, c3, c0, 0
/arch/arm/mach-tegra/
A Dsleep.h81 .macro check_cpu_part_num part_num, tmp1, tmp2
84 mov32 \tmp2, \part_num
85 cmp \tmp1, \tmp2
89 .macro exit_smp, tmp1, tmp2
95 check_cpu_part_num 0xc09, \tmp1, \tmp2
99 moveq \tmp2, #0xf
100 moveq \tmp2, \tmp2, lsl \tmp1
102 streq \tmp2, [\tmp1] @ invalidate SCU tags for CPU
/arch/alpha/kernel/
A Dtraps.c380 long error, tmp1, tmp2, tmp3, tmp4; in do_entUna() local
403 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna()
407 una_reg(reg) = tmp1|tmp2; in do_entUna()
419 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna()
423 una_reg(reg) = (int)(tmp1|tmp2); in do_entUna()
439 una_reg(reg) = tmp1|tmp2; in do_entUna()
667 unsigned long tmp1, tmp2, tmp3, tmp4; in do_entUnaUser() local
730 *reg_addr = tmp1|tmp2; in do_entUnaUser()
762 alpha_write_fp_reg(reg, tmp1|tmp2); in do_entUnaUser()
778 *reg_addr = (int)(tmp1|tmp2); in do_entUnaUser()
[all …]
/arch/csky/abiv2/
A Dfpu.c152 unsigned long tmp1, tmp2; in save_to_user_fp() local
158 tmp2 = mfcr("cr<2, 2>"); in save_to_user_fp()
161 user_fp->fesr = tmp2; in save_to_user_fp()
202 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3), in save_to_user_fp()
214 unsigned long tmp1, tmp2; in restore_from_user_fp() local
220 tmp2 = user_fp->fesr; in restore_from_user_fp()
223 mtcr("cr<2, 2>", tmp2); in restore_from_user_fp()
264 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3), in restore_from_user_fp()
/arch/arc/include/asm/
A Duaccess.h143 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_from_user() local
219 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_from_user()
243 "=r"(tmp1), "=r"(tmp2) in raw_copy_from_user()
358 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_from_user()
371 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_to_user() local
442 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_to_user()
466 "=r"(tmp1), "=r"(tmp2) in raw_copy_to_user()
581 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_to_user()
/arch/arm/net/
A Dbpf_jit_32.c847 const s8 *tmp2 = bpf2a32[TMP_REG_2]; in emit_a32_alu_r64() local
1482 emit(ARM_AND_I(tmp2[0], tmp2[0], 0xff), ctx); in emit_rev16()
1483 emit(ARM_ORR_SI(rd, tmp2[0], tmp2[1], SRTYPE_LSL, 8), ctx); in emit_rev16()
1497 emit(ARM_ORR_SI(ARM_IP, tmp2[0], tmp2[1], SRTYPE_LSL, 24), ctx); in emit_rev32()
1500 emit(ARM_AND_I(tmp2[1], tmp2[1], 0xff), ctx); in emit_rev32()
1502 emit(ARM_AND_I(tmp2[0], tmp2[0], 0xff), ctx); in emit_rev32()
1503 emit(ARM_MOV_SI(tmp2[0], tmp2[0], SRTYPE_LSL, 8), ctx); in emit_rev32()
1504 emit(ARM_ORR_SI(tmp2[0], tmp2[0], tmp2[1], SRTYPE_LSL, 16), ctx); in emit_rev32()
1701 rt = tmp2[0]; in build_insn()
1979 rm = tmp2[0]; in build_insn()
[all …]
/arch/loongarch/lib/
A Dcsum.c61 __uint128_t tmp1, tmp2, tmp3, tmp4; in do_csum() local
64 tmp2 = *(__uint128_t *)(ptr + 2); in do_csum()
73 tmp2 += (tmp2 >> 64) | (tmp2 << 64); in do_csum()
76 tmp1 = ((tmp1 >> 64) << 64) | (tmp2 >> 64); in do_csum()
/arch/xtensa/lib/
A Dumulsidi3.S199 .macro mul_mulsi3_body dst, src1, src2, tmp1, tmp2
202 extui \tmp2, \src1, 0, 1
203 movnez \dst, \tmp1, \tmp2
206 extui \tmp2, \src1, 1, 1
207 movnez \dst, \tmp1, \tmp2
210 extui \tmp2, \src1, 2, 1
211 movnez \dst, \tmp1, \tmp2
214 extui \tmp2, \src1, 3, 1
215 movnez \dst, \tmp1, \tmp2
/arch/mips/mm/
A Dsc-ip22.c102 unsigned long addr, tmp1, tmp2; in indy_sc_enable() local
128 : "=r" (tmp1), "=r" (tmp2), "=r" (addr)); in indy_sc_enable()
133 unsigned long tmp1, tmp2, tmp3; in indy_sc_disable() local
158 : "=r" (tmp1), "=r" (tmp2), "=r" (tmp3)); in indy_sc_disable()
/arch/arm/mach-davinci/
A Dmux.c65 unsigned tmp1, tmp2; in davinci_cfg_reg() local
74 tmp2 = (cfg->mode << cfg->mask_offset); in davinci_cfg_reg()
75 reg |= tmp2; in davinci_cfg_reg()
77 if (tmp1 != tmp2) in davinci_cfg_reg()
/arch/alpha/lib/
A Ddivide.S59 #define tmp2 $4 macro
113 DIV_ONLY(stq tmp2,32($30))
139 2: DIV_ONLY(addq quotient,mask,tmp2)
143 DIV_ONLY(cmovne compare,tmp2,quotient)
152 DIV_ONLY(ldq tmp2,32($30))

Completed in 39 milliseconds

123