Searched refs:_ASM_AX (Results 1 – 11 of 11) sorted by relevance
| /arch/x86/kvm/vmx/ |
| A D | vmenter.S | 138 mov (%_ASM_SP), %_ASM_AX 151 mov VCPU_R8 (%_ASM_AX), %r8 152 mov VCPU_R9 (%_ASM_AX), %r9 153 mov VCPU_R10(%_ASM_AX), %r10 154 mov VCPU_R11(%_ASM_AX), %r11 161 mov VCPU_RAX(%_ASM_AX), %_ASM_AX 199 push %_ASM_AX 228 pop %_ASM_AX 280 mov %_ASM_BX, %_ASM_AX 324 push %_ASM_AX [all …]
|
| A D | vmx.c | 6600 "movzbl (%[flush_pages], %%" _ASM_AX "), %%ecx\n\t" in vmx_l1d_flush() 6609 "movzbl (%[flush_pages], %%" _ASM_AX "), %%ecx\n\t" in vmx_l1d_flush()
|
| /arch/x86/kvm/svm/ |
| A D | vmenter.S | 147 1: vmload %_ASM_AX 152 mov KVM_VMCB_pa(%_ASM_AX), %_ASM_AX 176 3: vmrun %_ASM_AX 179 pop %_ASM_AX 189 mov %r8, VCPU_R8 (%_ASM_AX) 190 mov %r9, VCPU_R9 (%_ASM_AX) 191 mov %r10, VCPU_R10(%_ASM_AX) 200 mov %_ASM_AX, %_ASM_DI 203 5: vmsave %_ASM_AX 207 pop %_ASM_AX [all …]
|
| /arch/x86/lib/ |
| A D | getuser.S | 66 UACCESS movzbl (%_ASM_AX),%edx 77 UACCESS movzwl (%_ASM_AX),%edx 88 UACCESS movl (%_ASM_AX),%edx 103 UACCESS movq (%_ASM_AX),%rdx 105 UACCESS movl (%_ASM_AX),%edx 106 UACCESS movl 4(%_ASM_AX),%ecx 141 UACCESS movl (%_ASM_AX),%edx 153 UACCESS movq (%_ASM_AX),%rdx 156 UACCESS movl (%_ASM_AX),%edx 157 UACCESS movl 4(%_ASM_AX),%ecx [all …]
|
| A D | putuser.S | 115 7: mov %_ASM_AX,(%_ASM_CX) 128 9: mov %_ASM_AX,(%_ASM_CX)
|
| /arch/x86/xen/ |
| A D | xen-asm.S | 135 _ASM_MOV PER_CPU_VAR(xen_vcpu), %_ASM_AX 136 _ASM_MOV XEN_vcpu_info_arch_cr2(%_ASM_AX), %_ASM_AX 144 _ASM_MOV PER_CPU_VAR(xen_vcpu_info + XEN_vcpu_info_arch_cr2), %_ASM_AX
|
| /arch/x86/kernel/ |
| A D | irqflags.S | 14 pop %_ASM_AX
|
| /arch/x86/include/asm/ |
| A D | asm.h | 45 #define _ASM_AX __ASM_REG(ax) macro 60 #define _ASM_ARG1 _ASM_AX
|
| A D | kexec.h | 88 asm volatile("mov %%" _ASM_AX ",%0" : "=m"(newregs->ax)); in crash_setup_regs()
|
| A D | paravirt.h | 580 "xor %%" _ASM_AX ", %%" _ASM_AX ";", in pv_vcpu_is_preempted()
|
| A D | uaccess.h | 174 register __typeof__(*(ptr)) __val_pu asm("%"_ASM_AX); \
|
Completed in 30 milliseconds