| /arch/x86/virt/vmx/tdx/ |
| A D | tdxcall.S | 52 movq TDX_MODULE_rcx(%rsi), %rcx 53 movq TDX_MODULE_rdx(%rsi), %rdx 54 movq TDX_MODULE_r8(%rsi), %r8 55 movq TDX_MODULE_r9(%rsi), %r9 56 movq TDX_MODULE_r10(%rsi), %r10 57 movq TDX_MODULE_r11(%rsi), %r11 72 movq TDX_MODULE_r12(%rsi), %r12 73 movq TDX_MODULE_r13(%rsi), %r13 80 pushq %rsi 85 movq TDX_MODULE_rsi(%rsi), %rsi [all …]
|
| /arch/x86/lib/ |
| A D | copy_page_64.S | 34 movq 0x8*0(%rsi), %rax 35 movq 0x8*1(%rsi), %rbx 36 movq 0x8*2(%rsi), %rdx 37 movq 0x8*3(%rsi), %r8 38 movq 0x8*4(%rsi), %r9 39 movq 0x8*5(%rsi), %r10 43 prefetcht0 5*64(%rsi) 54 leaq 64 (%rsi), %rsi 67 movq 0x8*3(%rsi), %r8 68 movq 0x8*4(%rsi), %r9 [all …]
|
| A D | memmove_64.S | 35 cmp %rdi, %rsi 37 mov %rsi, %r8 70 movq 2*8(%rsi), %r9 71 movq 3*8(%rsi), %r8 72 leaq 4*8(%rsi), %rsi 102 movq (%rsi), %r11 104 leaq -8(%rsi, %rdx), %rsi 128 addq %rdx, %rsi 140 leaq -4*8(%rsi), %rsi 152 subq %rdx, %rsi [all …]
|
| A D | memcpy_64.S | 66 movq 0*8(%rsi), %r8 67 movq 1*8(%rsi), %r9 68 movq 2*8(%rsi), %r10 69 movq 3*8(%rsi), %r11 70 leaq 4*8(%rsi), %rsi 85 addq %rdx, %rsi 99 leaq -4*8(%rsi), %rsi 111 subq %rdx, %rsi 120 movq 0*8(%rsi), %r8 121 movq 1*8(%rsi), %r9 [all …]
|
| A D | csum-copy_64.S | 107 movq %rbx, (%rsi) 109 movq %r8, 8(%rsi) 125 leaq 64(%rsi), %rsi 147 leaq 8(%rsi), %rsi /* preserve carry */ 176 movw %bx, (%rsi) 178 leaq 2(%rsi), %rsi 190 movb %bl, (%rsi) 217 movw %bx, (%rsi) 220 leaq 2(%rsi), %rsi 230 leaq 4(%rsi), %rsi [all …]
|
| A D | copy_user_uncached_64.S | 42 10: movq (%rsi),%r8 43 11: movq 8(%rsi),%r9 59 addq $64,%rsi 106 addq $8,%rsi 125 addq $4,%rsi 132 70: movw (%rsi),%ax 134 addq $2,%rsi 174 inc %rsi 184 addq $2,%rsi 194 addq $4,%rsi [all …]
|
| A D | cmpxchg16b_emu.S | 26 cmpq __percpu (%rsi), %rax 28 cmpq __percpu 8(%rsi), %rdx 32 movq %rbx, __percpu (%rsi) 33 movq %rcx, __percpu 8(%rsi) 45 movq __percpu (%rsi), %rax 46 movq __percpu 8(%rsi), %rdx
|
| A D | copy_user_64.S | 46 0: movb (%rsi),%al 49 inc %rsi 60 2: movq (%rsi),%rax 62 addq $8,%rsi 81 0: movq (%rsi),%rax 94 addq %rax,%rsi
|
| A D | copy_mc_64.S | 37 movb (%rsi), %al 40 incq %rsi 52 movq (%rsi), %r8 55 addq $8, %rsi 68 movb (%rsi), %al 71 incq %rsi
|
| A D | bhi.S | 57 cmovne %r10, %rsi 68 cmovne %r10, %rsi 80 cmovne %r10, %rsi 93 cmovne %r10, %rsi 114 cmovne %r10, %rsi 129 cmovne %r10, %rsi
|
| /arch/x86/kernel/ |
| A D | sev_verify_cbit.S | 25 movq sme_me_mask(%rip), %rsi 26 testq %rsi, %rsi 30 movq sev_status(%rip), %rsi 31 testq %rsi, %rsi 35 movq %cr4, %rsi 38 movq %rsi, %rdx 71 movq %rsi, %cr4
|
| A D | relocate_kernel_64.S | 124 jmp *%rsi 156 pushq %rsi 370 movq %rdx, %rsi 468 1: jmp *%rsi 479 jmp *%rsi 485 call *%rsi 488 call *%rsi 491 call *%rsi 494 call *%rsi 545 pushq %rsi [all …]
|
| /arch/x86/crypto/ |
| A D | sm4-aesni-avx-asm_64.S | 235 vmovdqu RA0, 0*16(%rsi); 238 vmovdqu RA1, 1*16(%rsi); 240 vmovdqu RA2, 2*16(%rsi); 243 vmovdqu RA3, 3*16(%rsi); 396 vmovdqu RA0, (0 * 16)(%rsi); 397 vmovdqu RA1, (1 * 16)(%rsi); 398 vmovdqu RA2, (2 * 16)(%rsi); 399 vmovdqu RA3, (3 * 16)(%rsi); 400 vmovdqu RB0, (4 * 16)(%rsi); 402 vmovdqu RB1, (5 * 16)(%rsi); [all …]
|
| A D | aes-gcm-aesni-x86_64.S | 748 lea 16(KEY), %rsi 751 movdqa (%rsi), TMP0 753 add $16, %rsi 756 movdqa (%rsi), TMP0 770 lea 16(KEY), %rsi 774 movdqa (%rsi), TMP0 775 add $16, %rsi 784 add $16, %rsi 794 add $16, %rsi 930 lea 16(KEY), %rsi [all …]
|
| A D | sm4-aesni-avx2-asm_64.S | 376 vmovdqu RA0, (0 * 32)(%rsi); 377 vmovdqu RA1, (1 * 32)(%rsi); 378 vmovdqu RA2, (2 * 32)(%rsi); 379 vmovdqu RA3, (3 * 32)(%rsi); 380 vmovdqu RB0, (4 * 32)(%rsi); 381 vmovdqu RB1, (5 * 32)(%rsi); 382 vmovdqu RB2, (6 * 32)(%rsi); 383 vmovdqu RB3, (7 * 32)(%rsi); 429 vmovdqu RA0, (0 * 32)(%rsi); 430 vmovdqu RA1, (1 * 32)(%rsi); [all …]
|
| A D | blowfish-x86_64-asm_64.S | 22 #define RIO %rsi 45 #define RT1 %rsi 111 movq %rsi, %r10; 142 movq %rsi, %r10; 285 movq %rsi, %r11; 323 movq %rsi, %r11;
|
| /arch/x86/mm/ |
| A D | mem_encrypt_boot.S | 43 movq %rsi, %r11 /* Decrypted area */ 48 leaq __enc_copy(%rip), %rsi /* Encryption routine */ 54 movq %r11, %rsi /* Decrypted area */ 114 movq %rsi, %r11 /* Save decrypted area address */ 134 movq %r11, %rsi /* Source - decrypted area */ 139 movq %r8, %rsi /* Source - intermediate copy buffer */
|
| /arch/x86/boot/compressed/ |
| A D | idt_handlers_64.S | 24 pushq %rsi 42 movq ORIG_RAX(%rsp), %rsi 59 popq %rsi
|
| A D | head_64.S | 317 movl BP_kernel_alignment(%rsi), %eax 329 movl BP_init_size(%rsi), %ebx 374 movq %rsi, %r15 408 leaq rva(top_pgtable)(%rbx), %rsi 419 leaq (_bss-8)(%rip), %rsi 468 movq %rbp, %rsi 474 movq %r15, %rsi
|
| /arch/x86/um/ |
| A D | setjmp_64.S | 25 pop %rsi # Return address, and adjust the stack 29 push %rsi # Make the call/return stack happy 35 movq %rsi,56(%rdi) # Return address
|
| /arch/x86/net/ |
| A D | bpf_timed_may_goto.S | 27 pushq %rsi 49 popq %rsi
|
| /arch/x86/purgatory/ |
| A D | entry64.S | 42 movq rsi(%rip), %rsi 68 rsi: .quad 0x0 label
|
| /arch/x86/entry/ |
| A D | calling.h | 70 pushq %rsi /* pt_regs->si */ 71 movq 8(%rsp), %rsi /* temporarily store the return address in %rsi */ 77 pushq %rsi /* pt_regs->si */ 98 pushq %rsi /* return address on top of stack */ 146 popq %rsi 441 pushq %rsi 459 popq %rsi
|
| /arch/x86/kernel/acpi/ |
| A D | wakeup_64.S | 41 movq saved_rsi(%rip), %rsi 58 movq %rsi, pt_regs_si(%rax) 80 movq %rsi, saved_rsi(%rip) 106 movq pt_regs_si(%rax), %rsi
|
| /arch/x86/kernel/cpu/ |
| A D | vmware.c | 74 unsigned long out0, rbx, rcx, rdx, rsi, rdi; in vmware_hypercall_slow() local 80 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 92 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 104 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 123 *out4 = rsi; in vmware_hypercall_slow() 521 args.rsi = in4; in vmware_tdx_hypercall() 539 *out4 = args.rsi; in vmware_tdx_hypercall()
|