| /arch/hexagon/mm/ |
| A D | copy_user_template.S | 19 p0 = cmp.gtu(bytes,#0) 25 p1 = cmp.gtu(bytes,#15) 32 loopcount = lsr(bytes,#3) 58 p1 = cmp.gtu(bytes,#7) 63 loopcount = lsr(bytes,#2) 84 p1 = cmp.gtu(bytes,#3) 107 p3=sp1loop0(.Loop1,bytes) 125 p0 = cmp.gtu(bytes,#0) 144 if (p0) bytes = add(bytes,#-1) 156 if (p0) bytes = add(bytes,#-2) [all …]
|
| /arch/powerpc/include/asm/ |
| A D | cacheflush.h | 73 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_range() local 74 void *addr = (void *)(start & ~(bytes - 1)); in flush_dcache_range() 75 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in flush_dcache_range() 81 for (i = 0; i < size >> shift; i++, addr += bytes) in flush_dcache_range() 95 unsigned long bytes = l1_dcache_bytes(); in clean_dcache_range() local 96 void *addr = (void *)(start & ~(bytes - 1)); in clean_dcache_range() 97 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in clean_dcache_range() 100 for (i = 0; i < size >> shift; i++, addr += bytes) in clean_dcache_range() 114 unsigned long bytes = l1_dcache_bytes(); in invalidate_dcache_range() local 115 void *addr = (void *)(start & ~(bytes - 1)); in invalidate_dcache_range() [all …]
|
| A D | xor_altivec.h | 6 void xor_altivec_2(unsigned long bytes, unsigned long * __restrict p1, 8 void xor_altivec_3(unsigned long bytes, unsigned long * __restrict p1, 11 void xor_altivec_4(unsigned long bytes, unsigned long * __restrict p1, 15 void xor_altivec_5(unsigned long bytes, unsigned long * __restrict p1,
|
| /arch/arm/include/asm/ |
| A D | xor.h | 50 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_2() 72 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_3() 96 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_4() 118 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_5() 160 xor_arm4regs_2(bytes, p1, p2); in xor_neon_2() 163 xor_block_neon_inner.do_2(bytes, p1, p2); in xor_neon_2() 174 xor_arm4regs_3(bytes, p1, p2, p3); in xor_neon_3() 177 xor_block_neon_inner.do_3(bytes, p1, p2, p3); in xor_neon_3() 189 xor_arm4regs_4(bytes, p1, p2, p3, p4); in xor_neon_4() 192 xor_block_neon_inner.do_4(bytes, p1, p2, p3, p4); in xor_neon_4() [all …]
|
| /arch/s390/lib/ |
| A D | xor.c | 14 static void xor_xc_2(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_2() argument 31 : : "d" (bytes), "a" (p1), "a" (p2) in xor_xc_2() 35 static void xor_xc_3(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_3() argument 57 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3) in xor_xc_3() 61 static void xor_xc_4(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_4() argument 88 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4() 92 static void xor_xc_5(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_5() argument 125 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
|
| /arch/loongarch/include/asm/ |
| A D | xor_simd.h | 9 void xor_lsx_2(unsigned long bytes, unsigned long * __restrict p1, 11 void xor_lsx_3(unsigned long bytes, unsigned long * __restrict p1, 13 void xor_lsx_4(unsigned long bytes, unsigned long * __restrict p1, 16 void xor_lsx_5(unsigned long bytes, unsigned long * __restrict p1, 22 void xor_lasx_2(unsigned long bytes, unsigned long * __restrict p1, 24 void xor_lasx_3(unsigned long bytes, unsigned long * __restrict p1, 26 void xor_lasx_4(unsigned long bytes, unsigned long * __restrict p1, 29 void xor_lasx_5(unsigned long bytes, unsigned long * __restrict p1,
|
| /arch/loongarch/lib/ |
| A D | xor_simd.h | 13 void __xor_lsx_2(unsigned long bytes, unsigned long * __restrict p1, 15 void __xor_lsx_3(unsigned long bytes, unsigned long * __restrict p1, 17 void __xor_lsx_4(unsigned long bytes, unsigned long * __restrict p1, 20 void __xor_lsx_5(unsigned long bytes, unsigned long * __restrict p1, 26 void __xor_lasx_2(unsigned long bytes, unsigned long * __restrict p1, 28 void __xor_lasx_3(unsigned long bytes, unsigned long * __restrict p1, 30 void __xor_lasx_4(unsigned long bytes, unsigned long * __restrict p1, 33 void __xor_lasx_5(unsigned long bytes, unsigned long * __restrict p1,
|
| A D | xor_simd_glue.c | 15 void xor_##flavor##_2(unsigned long bytes, unsigned long * __restrict p1, \ 19 __xor_##flavor##_2(bytes, p1, p2); \ 25 void xor_##flavor##_3(unsigned long bytes, unsigned long * __restrict p1, \ 30 __xor_##flavor##_3(bytes, p1, p2, p3); \ 36 void xor_##flavor##_4(unsigned long bytes, unsigned long * __restrict p1, \ 42 __xor_##flavor##_4(bytes, p1, p2, p3, p4); \ 48 void xor_##flavor##_5(unsigned long bytes, unsigned long * __restrict p1, \ 55 __xor_##flavor##_5(bytes, p1, p2, p3, p4, p5); \
|
| A D | xor_template.c | 16 void XOR_FUNC_NAME(2)(unsigned long bytes, 20 unsigned long lines = bytes / LINE_WIDTH; 35 void XOR_FUNC_NAME(3)(unsigned long bytes, 40 unsigned long lines = bytes / LINE_WIDTH; 57 void XOR_FUNC_NAME(4)(unsigned long bytes, 63 unsigned long lines = bytes / LINE_WIDTH; 83 void XOR_FUNC_NAME(5)(unsigned long bytes, 90 unsigned long lines = bytes / LINE_WIDTH;
|
| /arch/powerpc/lib/ |
| A D | xor_vmx_glue.c | 15 void xor_altivec_2(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_2() argument 20 __xor_altivec_2(bytes, p1, p2); in xor_altivec_2() 26 void xor_altivec_3(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_3() argument 32 __xor_altivec_3(bytes, p1, p2, p3); in xor_altivec_3() 38 void xor_altivec_4(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_4() argument 45 __xor_altivec_4(bytes, p1, p2, p3, p4); in xor_altivec_4() 51 void xor_altivec_5(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_5() argument 59 __xor_altivec_5(bytes, p1, p2, p3, p4, p5); in xor_altivec_5()
|
| A D | pmem.c | 16 unsigned long bytes = l1_dcache_bytes(); in __clean_pmem_range() local 17 void *addr = (void *)(start & ~(bytes - 1)); in __clean_pmem_range() 18 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in __clean_pmem_range() 21 for (i = 0; i < size >> shift; i++, addr += bytes) in __clean_pmem_range() 28 unsigned long bytes = l1_dcache_bytes(); in __flush_pmem_range() local 29 void *addr = (void *)(start & ~(bytes - 1)); in __flush_pmem_range() 30 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in __flush_pmem_range() 33 for (i = 0; i < size >> shift; i++, addr += bytes) in __flush_pmem_range()
|
| A D | xor_vmx.h | 9 void __xor_altivec_2(unsigned long bytes, unsigned long * __restrict p1, 11 void __xor_altivec_3(unsigned long bytes, unsigned long * __restrict p1, 14 void __xor_altivec_4(unsigned long bytes, unsigned long * __restrict p1, 18 void __xor_altivec_5(unsigned long bytes, unsigned long * __restrict p1,
|
| A D | xor_vmx.c | 52 void __xor_altivec_2(unsigned long bytes, in __xor_altivec_2() argument 58 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_2() 71 void __xor_altivec_3(unsigned long bytes, in __xor_altivec_3() argument 79 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_3() 95 void __xor_altivec_4(unsigned long bytes, in __xor_altivec_4() argument 105 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_4() 124 void __xor_altivec_5(unsigned long bytes, in __xor_altivec_5() argument 136 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_5()
|
| /arch/arm64/include/asm/ |
| A D | xor.h | 19 xor_neon_2(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_2() argument 23 xor_block_inner_neon.do_2(bytes, p1, p2); in xor_neon_2() 28 xor_neon_3(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_3() argument 33 xor_block_inner_neon.do_3(bytes, p1, p2, p3); in xor_neon_3() 38 xor_neon_4(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_4() argument 44 xor_block_inner_neon.do_4(bytes, p1, p2, p3, p4); in xor_neon_4() 49 xor_neon_5(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_5() argument 56 xor_block_inner_neon.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
|
| /arch/riscv/include/asm/ |
| A D | xor.h | 13 static void xor_vector_2(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_2() argument 17 xor_regs_2_(bytes, p1, p2); in xor_vector_2() 21 static void xor_vector_3(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_3() argument 26 xor_regs_3_(bytes, p1, p2, p3); in xor_vector_3() 30 static void xor_vector_4(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_4() argument 36 xor_regs_4_(bytes, p1, p2, p3, p4); in xor_vector_4() 40 static void xor_vector_5(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_5() argument 47 xor_regs_5_(bytes, p1, p2, p3, p4, p5); in xor_vector_5()
|
| /arch/alpha/lib/ |
| A D | ev6-memcpy.S | 68 ldq $1, 0($17) # L : get 8 bytes 87 ldq $6, 0($17) # L0 : bytes 0..7 91 ldq $4, 8($17) # L : bytes 8..15 92 ldq $5, 16($17) # L : bytes 16..23 96 ldq $3, 24($17) # L : bytes 24..31 102 stq $6, 0($16) # L : bytes 0..7 106 stq $4, 8($16) # L : bytes 8..15 107 stq $5, 16($16) # L : bytes 16..23 116 ldq $6, 0($17) # L : bytes 0..7 117 ldq $4, 8($17) # L : bytes 8..15 [all …]
|
| /arch/sparc/include/asm/ |
| A D | xor_32.h | 16 sparc_2(unsigned long bytes, unsigned long * __restrict p1, in sparc_2() argument 19 int lines = bytes / (sizeof (long)) / 8; in sparc_2() 54 sparc_3(unsigned long bytes, unsigned long * __restrict p1, in sparc_3() argument 58 int lines = bytes / (sizeof (long)) / 8; in sparc_3() 106 sparc_4(unsigned long bytes, unsigned long * __restrict p1, in sparc_4() argument 111 int lines = bytes / (sizeof (long)) / 8; in sparc_4() 172 sparc_5(unsigned long bytes, unsigned long * __restrict p1, in sparc_5() argument 178 int lines = bytes / (sizeof (long)) / 8; in sparc_5()
|
| A D | xor_64.h | 15 void xor_vis_2(unsigned long bytes, unsigned long * __restrict p1, 17 void xor_vis_3(unsigned long bytes, unsigned long * __restrict p1, 20 void xor_vis_4(unsigned long bytes, unsigned long * __restrict p1, 24 void xor_vis_5(unsigned long bytes, unsigned long * __restrict p1, 40 void xor_niagara_2(unsigned long bytes, unsigned long * __restrict p1, 42 void xor_niagara_3(unsigned long bytes, unsigned long * __restrict p1, 45 void xor_niagara_4(unsigned long bytes, unsigned long * __restrict p1, 49 void xor_niagara_5(unsigned long bytes, unsigned long * __restrict p1,
|
| /arch/alpha/include/asm/ |
| A D | xor.h | 9 xor_alpha_2(unsigned long bytes, unsigned long * __restrict p1, 12 xor_alpha_3(unsigned long bytes, unsigned long * __restrict p1, 16 xor_alpha_4(unsigned long bytes, unsigned long * __restrict p1, 21 xor_alpha_5(unsigned long bytes, unsigned long * __restrict p1, 28 xor_alpha_prefetch_2(unsigned long bytes, unsigned long * __restrict p1, 31 xor_alpha_prefetch_3(unsigned long bytes, unsigned long * __restrict p1, 35 xor_alpha_prefetch_4(unsigned long bytes, unsigned long * __restrict p1, 40 xor_alpha_prefetch_5(unsigned long bytes, unsigned long * __restrict p1,
|
| /arch/x86/include/asm/ |
| A D | insn.h | 19 insn_byte_t bytes[4]; member 36 p->bytes[n] = v; in insn_set_byte() 45 insn_byte_t bytes[4]; member 63 p->bytes[n] = v; in insn_set_byte() 178 return X86_REX2_M(insn->rex_prefix.bytes[1]); in insn_rex2_m_bit() 213 return X86_VEX_P(insn->vex_prefix.bytes[1]); in insn_vex_p_bits() 215 return X86_VEX_P(insn->vex_prefix.bytes[2]); in insn_vex_p_bits() 222 return X86_VEX_W(insn->vex_prefix.bytes[2]); in insn_vex_w_bit() 231 if (insn->prefixes.bytes[3]) in insn_last_prefix_id() 280 …for (idx = 0; idx < ARRAY_SIZE(insn->prefixes.bytes) && (prefix = insn->prefixes.bytes[idx]) != 0;… [all …]
|
| A D | xor_32.h | 24 xor_pII_mmx_2(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_2() argument 27 unsigned long lines = bytes >> 7; in xor_pII_mmx_2() 68 xor_pII_mmx_3(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_3() argument 72 unsigned long lines = bytes >> 7; in xor_pII_mmx_3() 118 xor_pII_mmx_4(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_4() argument 123 unsigned long lines = bytes >> 7; in xor_pII_mmx_4() 181 unsigned long lines = bytes >> 7; in xor_pII_mmx_5() 261 unsigned long lines = bytes >> 6; in xor_p5_mmx_2() 310 unsigned long lines = bytes >> 6; in xor_p5_mmx_3() 369 unsigned long lines = bytes >> 6; in xor_p5_mmx_4() [all …]
|
| A D | xor.h | 60 xor_sse_2(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_2() argument 63 unsigned long lines = bytes >> 8; in xor_sse_2() 115 unsigned long lines = bytes >> 8; in xor_sse_2_pf64() 147 xor_sse_3(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_3() argument 151 unsigned long lines = bytes >> 8; in xor_sse_3() 211 unsigned long lines = bytes >> 8; in xor_sse_3_pf64() 245 xor_sse_4(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_4() argument 250 unsigned long lines = bytes >> 8; in xor_sse_4() 318 unsigned long lines = bytes >> 8; in xor_sse_4_pf64() 360 unsigned long lines = bytes >> 8; in xor_sse_5() [all …]
|
| A D | xor_avx.h | 29 static void xor_avx_2(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_2() argument 32 unsigned long lines = bytes >> 9; in xor_avx_2() 56 static void xor_avx_3(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_3() argument 60 unsigned long lines = bytes >> 9; in xor_avx_3() 87 static void xor_avx_4(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_4() argument 92 unsigned long lines = bytes >> 9; in xor_avx_4() 122 static void xor_avx_5(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_5() argument 128 unsigned long lines = bytes >> 9; in xor_avx_5()
|
| /arch/powerpc/mm/ |
| A D | cacheflush.c | 37 unsigned long bytes = l1_icache_bytes(); in invalidate_icache_range() local 38 char *addr = (char *)(start & ~(bytes - 1)); in invalidate_icache_range() 39 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in invalidate_icache_range() 42 for (i = 0; i < size >> shift; i++, addr += bytes) in invalidate_icache_range() 86 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_icache_phys() local 87 unsigned long nb = PAGE_SIZE / bytes; in flush_dcache_icache_phys() 114 : "r" (nb), "r" (msr), "i" (bytes), "r" (msr0) in flush_dcache_icache_phys()
|
| /arch/x86/kernel/ |
| A D | alternative.c | 141 u8 *bytes = thunk; in its_init_thunk() local 155 bytes[i++] = 0xfd; in its_init_thunk() 165 bytes[i++] = 0xff; in its_init_thunk() 167 bytes[i++] = 0xcc; in its_init_thunk() 359 if (insn->opcode.bytes[0] == 0x0F && insn->opcode.bytes[1] == 0x1F) in insn_is_nop() 746 bytes[i++] = modrm; in emit_indirect() 762 bytes[i++] = op; in __emit_trampoline() 907 bytes[i++] = 0x0f; in patch_retpoline() 952 u8 bytes[16]; in apply_retpolines() local 1059 u8 bytes[16]; in apply_returns() local [all …]
|