| /linux/arch/powerpc/kvm/ |
| A D | book3s_32_mmu.c | 69 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, 75 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) in find_sr() 80 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_ea_to_vp() 94 u32 sre, gva_t eaddr, in kvmppc_mmu_book3s_32_get_pteg() 121 static u32 kvmppc_mmu_book3s_32_get_ptem(u32 sre, gva_t eaddr, bool primary) in kvmppc_mmu_book3s_32_get_ptem() 127 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_xlate_bat() 182 static int kvmppc_mmu_book3s_32_xlate_pte(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_xlate_pte() 290 static int kvmppc_mmu_book3s_32_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_xlate()
|
| A D | e500.h | 129 int kvmppc_e500_emul_tlbivax(struct kvm_vcpu *vcpu, gva_t ea); 130 int kvmppc_e500_emul_tlbilx(struct kvm_vcpu *vcpu, int type, gva_t ea); 131 int kvmppc_e500_emul_tlbsx(struct kvm_vcpu *vcpu, gva_t ea); 156 static inline gva_t get_tlb_eaddr(const struct kvm_book3e_206_tlb_entry *tlbe) in get_tlb_eaddr() 167 static inline gva_t get_tlb_end(const struct kvm_book3e_206_tlb_entry *tlbe) in get_tlb_end()
|
| A D | e500_mmu.c | 48 static int tlb0_set_base(gva_t addr, int sets, int ways) in tlb0_set_base() 81 gva_t eaddr, int tlbsel, unsigned int pid, int as) in kvmppc_e500_tlb_index() 127 gva_t eaddr, int as) in kvmppc_e500_deliver_tlb_miss() 155 gva_t eaddr; in kvmppc_recalc_tlb1map_range() 245 int kvmppc_e500_emul_tlbivax(struct kvm_vcpu *vcpu, gva_t ea) in kvmppc_e500_emul_tlbivax() 293 gva_t ea) in tlbilx_one() 341 int kvmppc_e500_emul_tlbsx(struct kvm_vcpu *vcpu, gva_t ea) in kvmppc_e500_emul_tlbsx() 454 gva_t eaddr, unsigned int pid, int as) in kvmppc_e500_tlb_search() 473 gva_t eaddr; in kvmppc_core_vcpu_translate() 495 int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) in kvmppc_mmu_itlb_index() [all …]
|
| A D | book3s_64_mmu.c | 29 gva_t eaddr) in kvmppc_mmu_book3s_64_find_slbe() 73 static u64 kvmppc_slb_calc_vpn(struct kvmppc_slb *slb, gva_t eaddr) in kvmppc_slb_calc_vpn() 81 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_ea_to_vp() 109 static u32 kvmppc_mmu_book3s_64_get_page(struct kvmppc_slb *slbe, gva_t eaddr) in kvmppc_mmu_book3s_64_get_page() 117 struct kvmppc_slb *slbe, gva_t eaddr, in kvmppc_mmu_book3s_64_get_pteg() 155 static u64 kvmppc_mmu_book3s_64_get_avpn(struct kvmppc_slb *slbe, gva_t eaddr) in kvmppc_mmu_book3s_64_get_avpn() 191 static int kvmppc_mmu_book3s_64_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_xlate() 412 static int kvmppc_mmu_book3s_64_slbfee(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_slbfee()
|
| A D | book3s_64_mmu_radix.c | 34 gva_t eaddr, void *to, void *from, in __kvmhv_copy_tofrom_guest_radix() 90 static long kvmhv_copy_tofrom_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmhv_copy_tofrom_guest_radix() 113 long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *to, in kvmhv_copy_from_guest_radix() 125 long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *from, in kvmhv_copy_to_guest_radix() 131 int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_walk_radix_tree() 226 int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_radix_translate_table() 257 int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_radix_xlate()
|
| A D | e500_emulate.c | 135 gva_t ea; in kvmppc_core_emulate_op_e500()
|
| A D | e500mc.c | 59 gva_t eaddr; in kvmppc_e500_tlbil_one()
|
| A D | book3s_64_mmu_hv.c | 303 gva_t eaddr) in kvmppc_mmu_book3s_hv_find_slbe() 332 static int kvmppc_mmu_book3s_64_hv_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_hv_xlate() 421 unsigned long gpa, gva_t ea, int is_store) in kvmppc_hv_emulate_mmio()
|
| A D | e500_mmu_host.c | 628 gva_t geaddr; in kvmppc_load_last_inst()
|
| /linux/arch/x86/kvm/ |
| A D | x86.h | 225 gva_t gva, gfn_t gfn, unsigned access) in vcpu_cache_mmio_info() 251 #define MMIO_GVA_ANY (~(gva_t)0) 253 static inline void vcpu_clear_mmio_info(struct kvm_vcpu *vcpu, gva_t gva) in vcpu_clear_mmio_info() 310 gva_t addr, void *val, unsigned int bytes, 314 gva_t addr, void *val, unsigned int bytes, 329 void kvm_fixup_and_inject_pf_error(struct kvm_vcpu *vcpu, gva_t gva, u16 error_code); 452 int kvm_handle_invpcid(struct kvm_vcpu *vcpu, unsigned long type, gva_t gva);
|
| A D | x86.c | 6519 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_read() 6527 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_fetch() 6535 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_write() 6545 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_system() 6551 static int kvm_read_guest_virt_helper(gva_t addr, void *val, unsigned int bytes, in kvm_read_guest_virt_helper() 6584 gva_t addr, void *val, unsigned int bytes, in kvm_fetch_guest_virt() 6610 gva_t addr, void *val, unsigned int bytes, in kvm_read_guest_virt() 6628 gva_t addr, void *val, unsigned int bytes, in emulator_read_std() 6649 static int kvm_write_guest_virt_helper(gva_t addr, void *val, unsigned int bytes, in kvm_write_guest_virt_helper() 6694 int kvm_write_guest_virt_system(struct kvm_vcpu *vcpu, gva_t addr, void *val, in kvm_write_guest_virt_system() [all …]
|
| A D | trace.h | 818 TP_PROTO(gva_t gva, gpa_t gpa, bool write, bool gpa_match), 822 __field(gva_t, gva)
|
| /linux/arch/x86/kvm/vmx/ |
| A D | sgx.c | 24 int size, int alignment, gva_t *gva) in sgx_get_encls_gva() 72 static int sgx_gva_to_gpa(struct kvm_vcpu *vcpu, gva_t gva, bool write, in sgx_gva_to_gpa() 103 static int sgx_inject_fault(struct kvm_vcpu *vcpu, gva_t gva, int trapnr) in sgx_inject_fault() 142 gva_t secs_gva) in __handle_encls_ecreate() 208 gva_t pageinfo_gva, secs_gva; in handle_encls_ecreate() 209 gva_t metadata_gva, contents_gva; in handle_encls_ecreate() 290 gva_t sig_gva, secs_gva, token_gva; in handle_encls_einit()
|
| A D | vmx_ops.h | 19 void invvpid_error(unsigned long ext, u16 vpid, gva_t gva); 257 static inline void __invvpid(unsigned long ext, u16 vpid, gva_t gva) in __invvpid() 298 static inline void vpid_sync_vcpu_addr(int vpid, gva_t addr) in vpid_sync_vcpu_addr()
|
| A D | nested.h | 34 u32 vmx_instruction_info, bool wr, int len, gva_t *ret);
|
| A D | nested.c | 4651 u32 vmx_instruction_info, bool wr, int len, gva_t *ret) in get_vmx_mem_address() 4653 gva_t off; in get_vmx_mem_address() 4683 off = (gva_t)sign_extend64(off, 31); in get_vmx_mem_address() 4685 off = (gva_t)sign_extend64(off, 15); in get_vmx_mem_address() 4798 gva_t gva; in nested_vmx_get_vmptr() 5070 gva_t gva = 0; in handle_vmread() 5154 gva_t gva; in handle_vmwrite() 5340 gva_t gva; in handle_vmptrst() 5368 gva_t gva; in handle_invept() 5447 gva_t gva; in handle_invvpid()
|
| /linux/arch/powerpc/include/asm/ |
| A D | kvm_book3s.h | 160 extern long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, 163 unsigned long gpa, gva_t ea, int is_store); 179 gva_t eaddr, void *to, void *from, 181 extern long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 183 extern long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, 185 extern int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr, 188 extern int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr, 191 extern int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr,
|
| A D | kvm_host.h | 391 int (*slbfee)(struct kvm_vcpu *vcpu, gva_t eaddr, ulong *ret_slb); 397 int (*xlate)(struct kvm_vcpu *vcpu, gva_t eaddr, 401 u64 (*ea_to_vp)(struct kvm_vcpu *vcpu, gva_t eaddr, bool data); 707 gva_t vaddr_accessed;
|
| A D | kvm_ppc.h | 109 extern int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr); 110 extern int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr); 112 gva_t eaddr);
|
| /linux/arch/x86/include/asm/ |
| A D | kvm_host.h | 429 void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva, hpa_t root_hpa); 1366 void (*tlb_flush_gva)(struct kvm_vcpu *vcpu, gva_t addr); 1767 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva, 1769 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva, 1771 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva, 1773 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva, 1788 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva); 1790 gva_t gva, hpa_t root_hpa); 1791 void kvm_mmu_invpcid_gva(struct kvm_vcpu *vcpu, gva_t gva, unsigned long pcid);
|
| /linux/include/linux/ |
| A D | kvm_types.h | 36 typedef unsigned long gva_t; typedef
|
| /linux/arch/x86/kvm/mmu/ |
| A D | paging_tmpl.h | 552 struct kvm_vcpu *vcpu, gva_t addr, in FNAME() 943 static void FNAME(invlpg)(struct kvm_vcpu *vcpu, gva_t gva, hpa_t root_hpa) in FNAME()
|
| A D | mmu.c | 2073 gva_t gaddr, in kvm_mmu_get_page() 2553 static int kvm_mmu_unprotect_page_virt(struct kvm_vcpu *vcpu, gva_t gva) in kvm_mmu_unprotect_page_virt() 3025 gva_t gva = fault->is_tdp ? 0 : fault->addr; in handle_abnormal_pfn() 3350 static hpa_t mmu_alloc_root(struct kvm_vcpu *vcpu, gfn_t gfn, gva_t gva, in mmu_alloc_root() 3897 static void shadow_page_table_clear_flood(struct kvm_vcpu *vcpu, gva_t addr) in shadow_page_table_clear_flood() 5359 gva_t gva, hpa_t root_hpa) in kvm_mmu_invalidate_gva() 5397 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva) in kvm_mmu_invlpg() 5405 void kvm_mmu_invpcid_gva(struct kvm_vcpu *vcpu, gva_t gva, unsigned long pcid) in kvm_mmu_invpcid_gva()
|
| /linux/arch/mips/include/asm/ |
| A D | kvm_host.h | 739 gpa_t (*gva_to_gpa)(gva_t gva);
|
| /linux/arch/x86/kvm/svm/ |
| A D | svm.c | 2391 gva_t gva = kvm_rax_read(vcpu); in invlpga_interception() 3118 gva_t gva; in invpcid_interception() 3687 static void svm_flush_tlb_gva(struct kvm_vcpu *vcpu, gva_t gva) in svm_flush_tlb_gva()
|