Home
last modified time | relevance | path

Searched refs:kvm_mmu (Results 1 – 25 of 51) sorted by relevance

123

/linux/arch/x86/kvm/mmu/
A Dtdp_mmu.h94 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) in is_tdp_mmu()
114 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) { return false; } in is_tdp_mmu()
A Dmmu.c3363 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_direct_roots()
3471 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_shadow_roots()
3581 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_special_roots()
4157 struct kvm_mmu *mmu = vcpu->arch.mmu; in cached_root_available()
4181 struct kvm_mmu *mmu = vcpu->arch.mmu; in fast_pgd_switch()
4379 struct kvm_mmu *context) in reset_rsvds_bits_mask()
4422 struct kvm_mmu *context, bool execonly) in reset_rsvds_bits_mask_ept()
4439 struct kvm_mmu *context) in reset_shadow_zero_bits_mask()
4487 struct kvm_mmu *context) in reset_tdp_shadow_zero_bits_mask()
4675 struct kvm_mmu *mmu) in reset_guest_paging_metadata()
[all …]
A Dpaging_tmpl.h104 static inline void FNAME(protect_clean_gpte)(struct kvm_mmu *mmu, unsigned *access, in FNAME()
140 static bool FNAME(is_rsvd_bits_set)(struct kvm_mmu *mmu, u64 gpte, int level) in FNAME()
146 static int FNAME(cmpxchg_gpte)(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in FNAME()
236 struct kvm_mmu *mmu, in FNAME()
308 static inline bool FNAME(is_last_gpte)(struct kvm_mmu *mmu, in FNAME()
341 struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in FNAME()
A Dtdp_mmu.c955 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_map()
1489 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_get_walk()
1518 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_fast_pf_get_last_sptep()
/linux/arch/x86/include/asm/
A Dkvm_host.h417 struct kvm_mmu { struct
655 struct kvm_mmu *mmu;
658 struct kvm_mmu root_mmu;
661 struct kvm_mmu guest_mmu;
671 struct kvm_mmu nested_mmu;
677 struct kvm_mmu *walk_mmu;
1593 int load_pdptrs(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, unsigned long cr3);
1762 void kvm_mmu_free_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
1764 void kvm_mmu_free_guest_mode_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu);
1789 void kvm_mmu_invalidate_gva(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
/linux/arch/x86/kvm/
A Dmmu.h250 static inline u8 permission_fault(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in permission_fault()
/linux/arch/arm64/kvm/hyp/
A D.entry.nvhe.o.cmd147 arch/arm64/include/asm/kvm_mmu.h \
A D.entry.o.cmd147 arch/arm64/include/asm/kvm_mmu.h \
A D.reserved_mem.o.cmd1282 arch/arm64/include/asm/kvm_mmu.h \
A D.vgic-v3-sr.o.cmd1278 arch/arm64/include/asm/kvm_mmu.h \
/linux/arch/arm64/kvm/hyp/nvhe/
A D.host.nvhe.o.cmd147 arch/arm64/include/asm/kvm_mmu.h \
A D.hyp-init.nvhe.o.cmd156 arch/arm64/include/asm/kvm_mmu.h \
A D.debug-sr.nvhe.o.cmd1276 arch/arm64/include/asm/kvm_mmu.h \
A D.page_alloc.nvhe.o.cmd1277 arch/arm64/include/asm/kvm_mmu.h \
A D.psci-relay.nvhe.o.cmd1275 arch/arm64/include/asm/kvm_mmu.h \
/linux/arch/x86/kvm/vmx/
A Dvmx.c2926 struct kvm_mmu *mmu = vcpu->arch.mmu; in vmx_flush_tlb_current()
2963 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in vmx_ept_load_pdptrs()
2978 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in ept_save_pdptrs()
/linux/arch/arm64/kvm/
A D.fpsimd.o.cmd1275 arch/arm64/include/asm/kvm_mmu.h \
A D.pvtime.o.cmd1274 arch/arm64/include/asm/kvm_mmu.h \
A D.reset.o.cmd1279 arch/arm64/include/asm/kvm_mmu.h \
A D.sys_regs.o.cmd1277 arch/arm64/include/asm/kvm_mmu.h \
A D.va_layout.o.cmd1280 arch/arm64/include/asm/kvm_mmu.h \
/linux/arch/arm64/kvm/hyp/vhe/
A D.tlb.o.cmd1275 arch/arm64/include/asm/kvm_mmu.h \
/linux/arch/arm64/kvm/vgic/
A D.vgic-its.o.cmd1278 arch/arm64/include/asm/kvm_mmu.h \
A D.vgic-kvm-device.o.cmd1274 arch/arm64/include/asm/kvm_mmu.h \
A D.vgic-mmio-v3.o.cmd1277 arch/arm64/include/asm/kvm_mmu.h \

Completed in 84 milliseconds

123