| /arch/x86/kernel/ |
| A D | ftrace_64.S | 72 movq %rsp, %rbp 76 movq %rsp, %rbp 88 movq %r8, R8(%rsp) 89 movq %r9, R9(%rsp) 122 movq R9(%rsp), %r9 123 movq R8(%rsp), %r8 172 leaq (%rsp), %rcx 175 movq $0, CS(%rsp) 307 add $8, %rsp 363 movq %rsp, %rdi [all …]
|
| A D | relocate_kernel_64.S | 86 lidt (%rsp) 87 lgdt (%rsp) 88 addq $8, %rsp 118 lea PAGE_SIZE(%rsi), %rsp 147 lgdt (%rsp) 148 addq $10, %rsp 158 lidt (%rsp) 159 addq $10, %rsp 256 leaq PAGE_SIZE(%r10), %rsp 275 lea PAGE_SIZE(%r8), %rsp [all …]
|
| A D | head_64.S | 344 subq $16, %rsp 345 movw $(GDT_SIZE-1), (%rsp) 347 movq %rax, 2(%rsp) 348 lgdt (%rsp) 349 addq $16, %rsp 460 movq %rsp, %rdi 461 movq ORIG_RAX(%rsp), %rsi 470 addq $8, %rsp 563 movq %rsp, %rdi 564 movq ORIG_RAX(%rsp), %rsi [all …]
|
| /arch/x86/entry/ |
| A D | entry_64.S | 112 movq %rsp, %rdi 145 movq %rsp, %rdi 161 popq %rsp 355 pushq 5*8(%rsp) 590 movq %rsp, %rdi 718 movq %rax, %rsp 1170 movq %rsp, %rdx 1192 movq %rsp, %rdi 1305 subq $8, %rsp 1405 movq %rsp, %rdi [all …]
|
| A D | entry_64_compat.S | 60 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 125 movq %rsp, %rdi 193 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp 196 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp 217 movq %rsp, %rdi 235 movq RBX(%rsp), %rbx /* pt_regs->rbx */ 236 movq RBP(%rsp), %rbp /* pt_regs->rbp */ 237 movq EFLAGS(%rsp), %r11 /* pt_regs->flags (in r11) */ 238 movq RIP(%rsp), %rcx /* pt_regs->ip (in rcx) */ 239 addq $RAX, %rsp /* Skip r8-r15 */ [all …]
|
| A D | entry_64_fred.S | 21 movq %rsp, %rdi /* %rdi -> pt_regs */ 63 mov %rsp, %rbp 76 sub $(FRED_CONFIG_REDZONE_AMOUNT << 6), %rsp 77 and $FRED_STACK_FRAME_RSP_MASK, %rsp 116 movq %rsp, %rdi /* %rdi -> pt_regs */
|
| /arch/x86/platform/efi/ |
| A D | efi_thunk_64.S | 37 movq %rsp, %rax 44 subq $0x24, %rsp 48 movl %ebp, 0x18(%rsp) 49 movl %ebx, 0x1c(%rsp) 50 movl %eax, 0x20(%rsp) 64 movl %esi, 0x4(%rsp) 65 movl %edx, 0x8(%rsp) 66 movl %ecx, 0xc(%rsp) 67 movl %r8d, 0x10(%rsp) 68 movl %r9d, 0x14(%rsp) [all …]
|
| A D | efi_stub_64.S | 15 movq %rsp, %rbp 16 and $~0xf, %rsp 18 subq $48, %rsp 19 mov %r9, 32(%rsp) 20 mov %rax, 40(%rsp)
|
| /arch/x86/lib/ |
| A D | csum-copy_64.S | 40 subq $5*8, %rsp 41 movq %rbx, 0*8(%rsp) 42 movq %r12, 1*8(%rsp) 43 movq %r14, 2*8(%rsp) 44 movq %r13, 3*8(%rsp) 45 movq %r15, 4*8(%rsp) 198 movq 0*8(%rsp), %rbx 199 movq 1*8(%rsp), %r12 200 movq 2*8(%rsp), %r14 201 movq 3*8(%rsp), %r13 [all …]
|
| A D | copy_page_64.S | 26 subq $2*8, %rsp 27 movq %rbx, (%rsp) 28 movq %r12, 1*8(%rsp) 86 movq (%rsp), %rbx 87 movq 1*8(%rsp), %r12 88 addq $2*8, %rsp
|
| A D | cmpxchg16b_emu.S | 36 orl $X86_EFLAGS_ZF, (%rsp) 49 andl $(~X86_EFLAGS_ZF), (%rsp)
|
| /arch/x86/entry/vdso/ |
| A D | vsgx.S | 32 mov %rsp, %rbp 82 .cfi_def_cfa %rsp, 8 104 mov %rsp, %rcx 110 mov %rsp, %rbx 117 and $-0x10, %rsp 135 lea 0x10(%rsp, %rbx), %rsp
|
| /arch/x86/xen/ |
| A D | xen-asm.S | 253 addq $8, %rsp /* skip regs->orig_ax */ 284 movq $__USER_DS, 4*8(%rsp) 285 movq $__USER_CS, 1*8(%rsp) 304 movq $__USER_DS, 4*8(%rsp) 305 movq $__USER32_CS, 1*8(%rsp) 326 movq $__USER_DS, 4*8(%rsp) 327 movq $__USER32_CS, 1*8(%rsp) 338 lea 16(%rsp), %rsp /* strip %rcx, %r11 */
|
| A D | xen-head.S | 32 leaq __top_init_kernel_stack(%rip), %rsp 60 mov %rdi, %rsp
|
| /arch/x86/kernel/acpi/ |
| A D | wakeup_64.S | 37 movq saved_rsp(%rip), %rsp 51 subq $8, %rsp 56 movq %rsp, pt_regs_sp(%rax) 76 movq %rsp, saved_rsp(%rip) 82 addq $8, %rsp 104 movq pt_regs_sp(%rax), %rsp 125 movq %rsp, %rdi 130 addq $8, %rsp
|
| /arch/x86/um/ |
| A D | setjmp_64.S | 11 # %rsp (post-return) 28 movq %rsp,8(%rdi) # Post-return %rsp! 47 movq 8(%rdi),%rsp
|
| /arch/x86/include/asm/ |
| A D | unwind_hints.h | 23 .macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0 signal=1 24 .if \base == %rsp 56 .macro UNWIND_HINT_IRET_REGS base=%rsp offset=0 signal=1 60 .macro UNWIND_HINT_IRET_ENTRY base=%rsp offset=0 signal=1
|
| /arch/x86/purgatory/ |
| A D | entry64.S | 30 leaq stack_init(%rip), %rsp 44 movq rsp(%rip), %rsp 66 rsp: .quad 0x0 label
|
| /arch/x86/boot/compressed/ |
| A D | idt_handlers_64.S | 40 movq %rsp, %rdi 42 movq ORIG_RAX(%rsp), %rsi 63 addq $8, %rsp
|
| /arch/s390/pci/ |
| A D | pci_clp.c | 42 unsigned int rsp; in zpci_err_clp() member 44 } __packed data = {rsp, rc}; in zpci_err_clp() 143 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) in clp_query_pci_fngrp() 147 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fngrp() 214 if (!rc && rrb->response.hdr.rsp == CLP_RC_OK) { in clp_query_pci_fn() 221 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_query_pci_fn() 276 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_set_pci_fn() 278 rc = rrb->response.hdr.rsp; in clp_set_pci_fn() 311 zpci_err_clp(rrb->response.hdr.rsp, rc); in clp_setup_writeback_mio() 360 if (rc || rrb->response.hdr.rsp != CLP_RC_OK) { in clp_list_pci_req() [all …]
|
| /arch/x86/mm/ |
| A D | mem_encrypt_boot.S | 34 movq %rsp, %rbp /* RBP now has original stack pointer */ 38 leaq PAGE_SIZE(%rax), %rsp /* Set new stack pointer */ 65 movq %rbp, %rsp /* Restore original stack pointer */
|
| /arch/x86/crypto/ |
| A D | sm3-avx-asm_64.S | 340 movq %rsp, %rbp; 344 subq $STACK_SIZE, %rsp; 345 andq $(~63), %rsp; 347 movq %rbx, (STACK_REG_SAVE + 0 * 8)(%rsp); 348 movq %r15, (STACK_REG_SAVE + 1 * 8)(%rsp); 349 movq %r14, (STACK_REG_SAVE + 2 * 8)(%rsp); 350 movq %r13, (STACK_REG_SAVE + 3 * 8)(%rsp); 351 movq %r12, (STACK_REG_SAVE + 4 * 8)(%rsp); 501 movq (STACK_REG_SAVE + 0 * 8)(%rsp), %rbx; 502 movq (STACK_REG_SAVE + 1 * 8)(%rsp), %r15; [all …]
|
| /arch/x86/boot/startup/ |
| A D | la57toggle.S | 35 movq %rsp, %rbx 52 orq %rbx, %rsp
|
| /arch/arm64/boot/dts/ti/ |
| A D | k3-am65-iot2050-common-pg1.dtsi | 69 <&main_udmap 0x4102>, /* mgmnt rsp slice 0 */ 70 <&main_udmap 0x4103>; /* mgmnt rsp slice 1 */
|
| /arch/x86/hyperv/ |
| A D | hv_vtl.c | 121 u64 rsp = (unsigned long)idle->thread.sp; in hv_vtl_bringup_vcpu() local 156 input->vp_context.rsp = rsp; in hv_vtl_bringup_vcpu()
|