| /linux/arch/arm64/kvm/hyp/nvhe/ |
| A D | tlb.c | 14 struct kvm_s2_mmu *mmu; 19 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context() 23 struct kvm_s2_mmu *host_s2_mmu = &host_mmu.arch.mmu; in enter_vmid_context() 120 struct kvm_s2_mmu *mmu = cxt->mmu; in exit_vmid_context() 148 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa() 178 void __kvm_tlb_flush_vmid_ipa_nsh(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa_nsh() 208 void __kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_range() 235 void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu) in __kvm_tlb_flush_vmid() 249 void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu) in __kvm_flush_cpu_context()
|
| A D | hyp-main.c | 201 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_ipa() 210 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_ipa_nsh() 220 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_range() 229 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid() 236 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_flush_cpu_context()
|
| A D | switch.c | 274 struct kvm_s2_mmu *mmu; in __kvm_vcpu_run()
|
| A D | mem_protect.c | 140 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in kvm_host_prepare_stage2() 234 struct kvm_s2_mmu *mmu = &vm->kvm.arch.mmu; in kvm_guest_prepare_stage2() 291 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in __pkvm_prot_finalize()
|
| A D | pkvm.c | 365 struct kvm_s2_mmu *mmu = &hyp_vm->kvm.arch.mmu; in insert_vm_table_entry()
|
| /linux/arch/arm64/include/asm/ |
| A D | kvm_mmu.h | 169 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start, 171 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end); 172 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end); 175 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type); 177 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu); 301 static __always_inline u64 kvm_get_vttbr(struct kvm_s2_mmu *mmu) 317 static __always_inline void __load_stage2(struct kvm_s2_mmu *mmu, 331 static inline struct kvm *kvm_s2_mmu_to_kvm(struct kvm_s2_mmu *mmu) 342 static inline bool kvm_s2_mmu_valid(struct kvm_s2_mmu *mmu) 347 static inline bool kvm_is_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu)
|
| A D | kvm_asm.h | 212 struct kvm_s2_mmu; 227 extern void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu); 228 extern void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, phys_addr_t ipa, 230 extern void __kvm_tlb_flush_vmid_ipa_nsh(struct kvm_s2_mmu *mmu, 233 extern void __kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, 235 extern void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu); 237 extern int __kvm_tlbi_s1e2(struct kvm_s2_mmu *mmu, u64 va, u64 sys_encoding);
|
| A D | kvm_nested.h | 69 extern void kvm_init_nested_s2_mmu(struct kvm_s2_mmu *mmu); 70 extern struct kvm_s2_mmu *lookup_s2_mmu(struct kvm_vcpu *vcpu); 76 void (*)(struct kvm_s2_mmu *, 132 unsigned long compute_tlb_inval_range(struct kvm_s2_mmu *mmu, u64 val);
|
| A D | kvm_pgtable.h | 421 struct kvm_s2_mmu *mmu; 524 int __kvm_pgtable_stage2_init(struct kvm_pgtable *pgt, struct kvm_s2_mmu *mmu, 829 void kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu,
|
| A D | kvm_host.h | 148 struct kvm_s2_mmu { struct 282 struct kvm_s2_mmu mmu; 297 struct kvm_s2_mmu *nested_mmus; 696 struct kvm_s2_mmu *hw_mmu;
|
| /linux/arch/arm64/kvm/hyp/vhe/ |
| A D | tlb.c | 14 struct kvm_s2_mmu *mmu; 20 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context() 92 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa() 124 void __kvm_tlb_flush_vmid_ipa_nsh(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa_nsh() 156 void __kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_range() 185 void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu) in __kvm_tlb_flush_vmid() 201 void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu) in __kvm_flush_cpu_context() 232 int __kvm_tlbi_s1e2(struct kvm_s2_mmu *mmu, u64 va, u64 sys_encoding) in __kvm_tlbi_s1e2()
|
| /linux/arch/arm64/kvm/ |
| A D | nested.c | 54 struct kvm_s2_mmu *tmp; in kvm_vcpu_init_nested() 538 void (*tlbi_callback)(struct kvm_s2_mmu *, in kvm_s2_mmu_iterate_by_vmid() argument 544 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_s2_mmu_iterate_by_vmid() 586 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in lookup_s2_mmu() 608 struct kvm_s2_mmu *s2_mmu; in get_s2_mmu_nested() 666 void kvm_init_nested_s2_mmu(struct kvm_s2_mmu *mmu) in kvm_init_nested_s2_mmu() 751 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_wp() 765 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_unmap() 779 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_flush() 791 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_arch_flush_shadow_all() [all …]
|
| A D | mmu.c | 56 static int stage2_apply_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, in stage2_apply_range() 319 static void __unmap_stage2_range(struct kvm_s2_mmu *mmu, phys_addr_t start, u64 size, in __unmap_stage2_range() 331 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start, in kvm_stage2_unmap_range() 337 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end) in kvm_stage2_flush_range() 866 static int kvm_init_ipa_range(struct kvm_s2_mmu *mmu, unsigned long type) in kvm_init_ipa_range() 913 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type) in kvm_init_stage2_mmu() 1053 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu) in kvm_free_stage2_pgd() 1114 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_phys_addr_ioremap() 1152 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end) in kvm_stage2_wp_range() 1712 struct kvm_s2_mmu *mmu; in handle_access_fault()
|
| A D | ptdump.c | 99 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_ptdump_parser_create() 134 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_ptdump_guest_show()
|
| A D | at.c | 881 struct kvm_s2_mmu *mmu; in __kvm_at_s1e01_fast() 995 struct kvm_s2_mmu *mmu; in __kvm_at_s1e2()
|
| A D | sys_regs.c | 2991 static void s2_mmu_unmap_range(struct kvm_s2_mmu *mmu, in s2_mmu_unmap_range() 3093 static void s2_mmu_unmap_ipa(struct kvm_s2_mmu *mmu, in s2_mmu_unmap_ipa() 3139 static void s2_mmu_tlbi_s1e1(struct kvm_s2_mmu *mmu, in s2_mmu_tlbi_s1e1()
|
| A D | arm.c | 578 struct kvm_s2_mmu *mmu; in kvm_arch_vcpu_load()
|
| /linux/arch/arm64/kvm/hyp/ |
| A D | pgtable.c | 585 struct kvm_s2_mmu *mmu; 653 void kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, in kvm_tlb_flush_vmid_range() 783 struct kvm_s2_mmu *mmu) in stage2_try_break_pte() 847 struct kvm_s2_mmu *mmu, in stage2_unmap_put_pte() 1112 struct kvm_s2_mmu *mmu = pgt->mmu; in stage2_unmap_walker() 1448 struct kvm_s2_mmu *mmu; in stage2_split_walker() 1487 mmu = container_of(mc, struct kvm_s2_mmu, split_page_cache); in stage2_split_walker() 1526 int __kvm_pgtable_stage2_init(struct kvm_pgtable *pgt, struct kvm_s2_mmu *mmu, in __kvm_pgtable_stage2_init()
|