Home
last modified time | relevance | path

Searched refs:vmx_pages (Results 1 – 15 of 15) sorted by relevance

/linux-6.3-rc2/tools/testing/selftests/kvm/x86_64/
A Dstate_test.c76 static void vmx_l1_guest_code(struct vmx_pages *vmx_pages) in vmx_l1_guest_code() argument
80 GUEST_ASSERT(vmx_pages->vmcs_gpa); in vmx_l1_guest_code()
81 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in vmx_l1_guest_code()
83 GUEST_ASSERT(load_vmcs(vmx_pages)); in vmx_l1_guest_code()
84 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in vmx_l1_guest_code()
87 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in vmx_l1_guest_code()
89 prepare_vmcs(vmx_pages, vmx_l2_guest_code, in vmx_l1_guest_code()
93 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in vmx_l1_guest_code()
95 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in vmx_l1_guest_code()
115 GUEST_ASSERT(!vmptrld(vmx_pages->shadow_vmcs_gpa)); in vmx_l1_guest_code()
[all …]
A Dvmx_preemption_timer_test.c68 void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument
76 GUEST_ASSERT(vmx_pages->vmcs_gpa); in l1_guest_code()
77 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
78 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
79 GUEST_ASSERT(vmptrstz() == vmx_pages->vmcs_gpa); in l1_guest_code()
81 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
146 void guest_code(struct vmx_pages *vmx_pages) in guest_code() argument
148 if (vmx_pages) in guest_code()
149 l1_guest_code(vmx_pages); in guest_code()
A Dvmx_apic_access_test.c37 static void l1_guest_code(struct vmx_pages *vmx_pages, unsigned long high_gpa) in l1_guest_code() argument
43 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
44 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
47 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
55 vmwrite(APIC_ACCESS_ADDR, vmx_pages->apic_access_gpa); in l1_guest_code()
77 struct vmx_pages *vmx; in main()
A Dvmx_close_while_nested_test.c32 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument
37 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
38 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
41 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
A Dvmx_invalid_nested_guest_state.c26 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument
31 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
32 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
35 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
A Dvmx_tsc_adjust_test.c75 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument
86 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
87 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
90 prepare_vmcs(vmx_pages, l2_guest_code, in l1_guest_code()
A Dsmm_test.c73 struct vmx_pages *vmx_pages = arg; in guest_code() local
90 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in guest_code()
91 GUEST_ASSERT(load_vmcs(vmx_pages)); in guest_code()
92 prepare_vmcs(vmx_pages, l2_guest_code, in guest_code()
A Dvmx_nested_tsc_scaling_test.c82 static void l1_guest_code(struct vmx_pages *vmx_pages) in l1_guest_code() argument
90 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in l1_guest_code()
91 GUEST_ASSERT(load_vmcs(vmx_pages)); in l1_guest_code()
94 prepare_vmcs(vmx_pages, l2_guest_code, &l2_guest_stack[L2_GUEST_STACK_SIZE]); in l1_guest_code()
A Dhyperv_evmcs.c79 void guest_code(struct vmx_pages *vmx_pages, struct hyperv_test_pages *hv_pages, in guest_code() argument
96 GUEST_ASSERT(prepare_for_vmx_operation(vmx_pages)); in guest_code()
104 prepare_vmcs(vmx_pages, l2_guest_code, in guest_code()
145 __set_bit(MSR_FS_BASE & 0x1fff, vmx_pages->msr + 0x400); in guest_code()
157 __set_bit(MSR_GS_BASE & 0x1fff, vmx_pages->msr + 0x400); in guest_code()
A Dvmx_dirty_log_test.c48 void l1_guest_code(struct vmx_pages *vmx) in l1_guest_code()
70 struct vmx_pages *vmx; in main()
A Dtriple_fault_event_test.c27 void l1_guest_code_vmx(struct vmx_pages *vmx) in l1_guest_code_vmx()
A Dnested_exceptions_test.c129 static void l1_vmx_code(struct vmx_pages *vmx) in l1_vmx_code()
/linux-6.3-rc2/tools/testing/selftests/kvm/include/x86_64/
A Dvmx.h500 struct vmx_pages { struct
557 struct vmx_pages *vcpu_alloc_vmx(struct kvm_vm *vm, vm_vaddr_t *p_vmx_gva);
558 bool prepare_for_vmx_operation(struct vmx_pages *vmx);
559 void prepare_vmcs(struct vmx_pages *vmx, void *guest_rip, void *guest_rsp);
560 bool load_vmcs(struct vmx_pages *vmx);
564 void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm,
566 void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm,
568 void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm,
570 void nested_identity_map_1g(struct vmx_pages *vmx, struct kvm_vm *vm,
573 void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm,
[all …]
/linux-6.3-rc2/tools/testing/selftests/kvm/lib/x86_64/
A Dvmx.c74 struct vmx_pages *
78 struct vmx_pages *vmx = addr_gva2hva(vm, vmx_gva); in vcpu_alloc_vmx()
116 bool prepare_for_vmx_operation(struct vmx_pages *vmx) in prepare_for_vmx_operation()
160 bool load_vmcs(struct vmx_pages *vmx) in load_vmcs()
191 static inline void init_vmcs_control_fields(struct vmx_pages *vmx) in init_vmcs_control_fields()
399 void __nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, in __nested_pg_map()
450 void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_pg_map()
473 void __nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, in __nested_map()
490 void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_map()
499 void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm, in nested_map_memslot()
[all …]
A Dmemstress.c32 static void memstress_l1_guest_code(struct vmx_pages *vmx, uint64_t vcpu_id) in memstress_l1_guest_code()
62 void memstress_setup_ept(struct vmx_pages *vmx, struct kvm_vm *vm) in memstress_setup_ept()
82 struct vmx_pages *vmx, *vmx0 = NULL; in memstress_setup_nested()

Completed in 23 milliseconds