Home
last modified time | relevance | path

Searched refs:root_to_sp (Results 1 – 5 of 5) sorted by relevance

/arch/x86/kvm/mmu/
A Dtdp_mmu.h52 return root_to_sp(vcpu->arch.mmu->mirror_root_hpa); in tdp_mmu_get_root_for_fault()
54 return root_to_sp(vcpu->arch.mmu->root.hpa); in tdp_mmu_get_root_for_fault()
61 return root_to_sp(vcpu->arch.mmu->mirror_root_hpa); in tdp_mmu_get_root()
63 return root_to_sp(vcpu->arch.mmu->root.hpa); in tdp_mmu_get_root()
A Dspte.h266 static inline struct kvm_mmu_page *root_to_sp(hpa_t root) in root_to_sp() function
285 struct kvm_mmu_page *root = root_to_sp(vcpu->arch.mmu->root.hpa); in kvm_vcpu_can_access_host_mmio()
A Dspte.c143 struct kvm_mmu_page *root = root_to_sp(vcpu->arch.mmu->root.hpa); in kvm_track_host_mmio_mapping()
A Dmmu.c3733 sp = root_to_sp(*root_hpa); in mmu_free_root_page()
3789 } else if (root_to_sp(mmu->root.hpa)) { in kvm_mmu_free_roots()
3833 sp = root_to_sp(root_hpa); in kvm_mmu_free_guest_mode_roots()
4209 sp = root_to_sp(root); in is_unsync_root()
4243 sp = root_to_sp(root); in kvm_mmu_sync_roots()
4738 struct kvm_mmu_page *sp = root_to_sp(vcpu->arch.mmu->root.hpa); in is_page_fault_stale()
5011 sp = root_to_sp(root->hpa); in is_root_usable()
5088 if (VALID_PAGE(mmu->root.hpa) && !root_to_sp(mmu->root.hpa)) in fast_pgd_switch()
5135 struct kvm_mmu_page *sp = root_to_sp(vcpu->arch.mmu->root.hpa); in kvm_mmu_new_pgd()
6012 sp = root_to_sp(root_hpa); in is_obsolete_root()
A Dtdp_mmu.c1931 struct kvm_mmu_page *root = root_to_sp(vcpu->arch.mmu->root.hpa); in kvm_tdp_mmu_get_walk()
1948 leaf = __kvm_tdp_mmu_get_walk(vcpu, gpa, sptes, root_to_sp(root)); in kvm_tdp_mmu_gpa_is_mapped()

Completed in 28 milliseconds