| /linux/arch/x86/power/ |
| A D | hibernate_asm_64.S | 37 movq %cr3, %rcx; # flush TLB 38 movq %rcx, %cr3 48 movq pt_regs_cx(%rax), %rcx 79 movq %rcx, pt_regs_cx(%rax) 117 jmpq *%rcx 125 movq %rbx, %rcx 126 andq $~(X86_CR4_PGE), %rcx 127 movq %rcx, %cr4; # turn off PGE 128 movq %cr3, %rcx; # flush TLB 129 movq %rcx, %cr3; [all …]
|
| /linux/arch/x86/crypto/ |
| A D | blake2s-core.S | 57 movq %rcx,%xmm15 68 leaq SIGMA(%rip),%rcx 70 movzbl (%rcx),%eax 72 movzbl 0x1(%rcx),%eax 74 movzbl 0x2(%rcx),%eax 76 movzbl 0x3(%rcx),%eax 91 movzbl 0x4(%rcx),%eax 93 movzbl 0x5(%rcx),%eax 160 addq $0x10,%rcx 161 cmpq %r8,%rcx [all …]
|
| A D | chacha-avx512vl-x86_64.S | 115 cmp $0x10,%rcx 122 cmp $0x20,%rcx 129 cmp $0x30,%rcx 136 cmp $0x40,%rcx 144 cmp $0x50,%rcx 150 cmp $0x60,%rcx 156 cmp $0x70,%rcx 162 cmp $0x80,%rcx 173 mov %rcx,%rax 174 and $0xf,%rcx [all …]
|
| A D | camellia-aesni-avx-asm_64.S | 194 %rcx, (%r9)); 726 leaq 8 * 16(%rax), %rcx; 730 %xmm15, %rax, %rcx); 734 %xmm15, %rax, %rcx, 0); 746 %xmm15, %rax, %rcx, 8); 766 vmovdqu 0 * 16(%rcx), %xmm8; 767 vmovdqu 1 * 16(%rcx), %xmm9; 813 leaq 8 * 16(%rax), %rcx; 817 %xmm15, %rax, %rcx); 852 vmovdqu 0 * 16(%rcx), %xmm8; [all …]
|
| A D | camellia-aesni-avx2-asm_64.S | 227 %rcx, (%r9)); 760 leaq 8 * 32(%rax), %rcx; 764 %ymm15, %rax, %rcx); 768 %ymm15, %rax, %rcx, 0); 780 %ymm15, %rax, %rcx, 8); 800 vmovdqu 0 * 32(%rcx), %ymm8; 801 vmovdqu 1 * 32(%rcx), %ymm9; 847 leaq 8 * 32(%rax), %rcx; 851 %ymm15, %rax, %rcx); 886 vmovdqu 0 * 32(%rcx), %ymm8; [all …]
|
| A D | sm4-aesni-avx-asm_64.S | 161 cmpq $2, %rcx; 166 cmpq $3, %rcx; 236 cmpq $2, %rcx; 241 cmpq $3, %rcx; 371 cmpq $5, %rcx; 386 cmpq $7, %rcx; 395 cmpq $6, %rcx; 405 cmpq $7, %rcx; 429 vmovdqu (%rcx), RA0; 462 vmovdqu RTMP1, (%rcx); [all …]
|
| A D | aria-aesni-avx2-asm_64.S | 1065 vmovdqu %ymm8, (0 * 32)(%rcx); 1066 vmovdqu %ymm9, (1 * 32)(%rcx); 1093 vmovdqu (0 * 32)(%rcx), %ymm0; 1094 vmovdqu (1 * 32)(%rcx), %ymm1; 1095 vmovdqu (2 * 32)(%rcx), %ymm2; 1096 vmovdqu (3 * 32)(%rcx), %ymm3; 1097 vmovdqu (4 * 32)(%rcx), %ymm4; 1192 leaq (%rcx), %rsi; 1193 leaq (%rcx), %rdx; 1412 leaq (%rcx), %rsi; [all …]
|
| A D | poly1305-x86_64-cryptogams.pl | 266 cmovc %rcx,%r11 389 mov %r9,%rcx 1473 add %rcx,%rax 1487 mov %r10,%rcx 1490 and \$-4,%rcx 1491 add %rcx,%rax 1498 mov %r9,%rcx 2879 lea (%rcx,%rcx,4),%rcx # *5 3846 adc %rcx,%r9 3851 mov %r9,%rcx [all …]
|
| /linux/arch/x86/kernel/cpu/sgx/ |
| A D | encls.h | 80 #define __encls_ret_1(rax, rcx) \ argument 82 __encls_ret_N(rax, "c"(rcx)); \ 85 #define __encls_ret_2(rax, rbx, rcx) \ argument 87 __encls_ret_N(rax, "b"(rbx), "c"(rcx)); \ 90 #define __encls_ret_3(rax, rbx, rcx, rdx) \ argument 92 __encls_ret_N(rax, "b"(rbx), "c"(rcx), "d"(rdx)); \ 124 #define __encls_2(rax, rbx, rcx) \ argument 127 __encls_N(rax, ign_rbx_out, "b"(rbx), "c"(rcx)); \ 130 #define __encls_1_1(rax, data, rcx) \ argument 133 int ret = __encls_N(rax, rbx_out, "c"(rcx)); \
|
| /linux/arch/x86/kernel/ |
| A D | ftrace_64.S | 84 movq %rcx, RCX(%rsp) 127 movq RCX(%rsp), %rcx 162 movq %rcx, RSP(%rsp) 170 leaq (%rsp), %rcx 224 movq %rcx, EFLAGS(%rsp) 226 movq $__KERNEL_DS, %rcx 227 movq %rcx, SS(%rsp) 228 movq $__KERNEL_CS, %rcx 229 movq %rcx, CS(%rsp) 232 movq %rcx, RSP(%rsp) [all …]
|
| A D | head_64.S | 107 leaq early_top_pgt(%rip), %rcx 108 addq %rcx, %rax 212 movq %cr4, %rcx 217 movq %rcx, %cr4 223 movq %rcx, %cr4 268 movq apic_mmio_base(%rip), %rcx 269 addq $APIC_ID, %rcx 270 movl (%rcx), %eax 284 cmpl (%rbx,%rcx,4), %eax 432 movq TASK_threadsp(%rcx), %rsp [all …]
|
| A D | relocate_kernel_64.S | 174 movq %rcx, %r11 262 movq %rdi, %rcx /* Put the indirection_page in %rcx */ 269 movq (%rbx), %rcx 274 movq %rcx, %rdi 280 movq %rcx, %rbx 290 movq %rcx, %rsi /* For ever source page do a copy */
|
| /linux/arch/x86/lib/ |
| A D | copy_user_64.S | 33 cmpq $64,%rcx 47 dec %rcx 77 movq %rcx,%rax 78 shrq $3,%rcx 86 1: leaq (%rax,%rcx,8),%rcx
|
| A D | memset_64.S | 36 movq %rdx,%rcx 52 imulq %rcx,%rax 60 movq %rdx,%rcx 61 shrq $6,%rcx 66 decq %rcx
|
| A D | csum-copy_64.S | 134 movq %rcx, %r10 219 subq $2, %rcx 229 subq $4, %rcx 242 leaq -1(%rcx, %rcx), %rcx 243 rorq $1, %rcx
|
| A D | clear_page_64.S | 66 cmpq $64,%rcx 78 dec %rcx 105 subq $64,%rcx 106 cmpq $64,%rcx
|
| A D | memmove_64.S | 42 #define MEMMOVE_BYTES movq %rdx, %rcx; rep movsb; RET 86 movq %rdx, %rcx 89 shrq $3, %rcx 100 movq %rdx, %rcx 105 shrq $3, %rcx
|
| /linux/arch/x86/mm/ |
| A D | mem_encrypt_boot.S | 37 movq %rcx, %rax /* Workarea stack page */ 49 movq $(.L__enc_copy_end - __enc_copy), %rcx /* Encryption routine length */ 56 movq %r12, %rcx /* Area length */ 111 movq %rcx, %r9 /* Save area length */ 135 movq %r12, %rcx 140 movq %r12, %rcx
|
| /linux/tools/arch/x86/lib/ |
| A D | memset_64.S | 36 movq %rdx,%rcx 52 imulq %rcx,%rax 60 movq %rdx,%rcx 61 shrq $6,%rcx 66 decq %rcx
|
| /linux/arch/x86/xen/ |
| A D | xen-asm.S | 42 push %rcx 58 pop %rcx 126 pop %rcx 170 pop %rcx 240 popq %rcx 260 popq %rcx 282 popq %rcx
|
| /linux/arch/x86/entry/vdso/ |
| A D | vsgx.S | 45 mov SGX_ENCLAVE_OFFSET_OF_RUN(%rbp), %rcx 50 cmpq $0, (%rcx, %rbx) 57 mov SGX_ENCLAVE_RUN_TCS(%rcx), %rbx 58 lea .Lasync_exit_pointer(%rip), %rcx 104 mov %rsp, %rcx
|
| /linux/arch/x86/entry/ |
| A D | calling.h | 68 .macro PUSH_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 unwind_hint=1 78 pushq \rcx /* pt_regs->cx */ 125 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 clear_bp=1 unwind_hint=1 126 PUSH_REGS rdx=\rdx, rcx=\rcx, rax=\rax, save_ret=\save_ret unwind_hint=\unwind_hint 142 popq %rcx 440 pushq %rcx 454 popq %rcx
|
| /linux/tools/testing/selftests/kvm/x86_64/ |
| A D | userspace_io_test.c | 84 if (regs.rcx == 2) in main() 85 regs.rcx = 1; in main() 86 if (regs.rcx == 3) in main() 87 regs.rcx = 8192; in main()
|
| /linux/arch/x86/platform/efi/ |
| A D | efi_stub_64.S | 22 mov %rcx, %r8 23 mov %rsi, %rcx
|
| /linux/arch/x86/purgatory/ |
| A D | entry64.S | 40 movq rcx(%rip), %rcx 63 rcx: .quad 0x0 label
|