| /arch/s390/mm/ |
| A D | gmap.c | 37 #define IS_GADDR_VALID(gaddr) ((gaddr) & VALID_GADDR_FLAG) argument 38 #define MAKE_VALID_GADDR(gaddr) (((gaddr) & HPAGE_MASK) | VALID_GADDR_FLAG) argument 364 unsigned long gaddr; in __gmap_unlink_by_vmaddr() local 516 unsigned long gaddr); 1890 gaddr < end) { in gmap_shadow_notify() 1959 gmap_call_notifier(gmap, gaddr, gaddr + PAGE_SIZE - 1); in ptep_notify() 1969 gmap_call_notifier(gmap, gaddr, gaddr + HPAGE_SIZE - 1); in pmdp_notify_gmap() 1985 gaddr &= HPAGE_MASK; in gmap_pmdp_xchg() 2003 unsigned long gaddr; in gmap_pmdp_clear() local 2053 unsigned long gaddr; in gmap_pmdp_idte_local() local [all …]
|
| /arch/s390/include/asm/ |
| A D | gmap.h | 111 unsigned long __gmap_translate(struct gmap *, unsigned long gaddr); 112 int __gmap_link(struct gmap *gmap, unsigned long gaddr, unsigned long vmaddr); 113 void __gmap_zap(struct gmap *, unsigned long gaddr); 116 int gmap_read_table(struct gmap *gmap, unsigned long gaddr, unsigned long *val); 132 int gmap_protect_one(struct gmap *gmap, unsigned long gaddr, int prot, unsigned long bits); 135 unsigned long gaddr, unsigned long vmaddr); 140 unsigned long *gmap_table_walk(struct gmap *gmap, unsigned long gaddr, int level);
|
| A D | uv.h | 219 u64 gaddr; member 245 u64 gaddr; member 325 u64 gaddr; member
|
| A D | pgtable.h | 1380 int ptep_force_prot(struct mm_struct *mm, unsigned long gaddr,
|
| /arch/s390/kvm/ |
| A D | pv.c | 52 vmaddr = gfn_to_hva(kvm, gpa_to_gfn(gaddr)); in kvm_s390_pv_make_secure() 64 .gaddr = gaddr, in kvm_s390_pv_convert_to_secure() 67 return kvm_s390_pv_make_secure(kvm, gaddr, &uvcb); in kvm_s390_pv_convert_to_secure() 87 page = gfn_to_page(kvm, gpa_to_gfn(gaddr)); in kvm_s390_pv_destroy_page() 696 .gaddr = addr, in unpack_one() 725 uvcb.gaddr, *rc, *rrc); in unpack_one() 827 u64 *gaddr, u64 buff_user_len, u16 *rc, u16 *rrc) in kvm_s390_pv_dump_stor_state() argument 833 .gaddr = *gaddr, in kvm_s390_pv_dump_stor_state() 844 if (!IS_ALIGNED(*gaddr, HPAGE_SIZE)) in kvm_s390_pv_dump_stor_state() 884 uvcb.gaddr += HPAGE_SIZE; in kvm_s390_pv_dump_stor_state() [all …]
|
| A D | kvm-s390.h | 308 u64 *gaddr, u64 buff_user_len, u16 *rc, u16 *rrc); 311 int kvm_s390_pv_destroy_page(struct kvm *kvm, unsigned long gaddr); 312 int kvm_s390_pv_convert_to_secure(struct kvm *kvm, unsigned long gaddr); 313 int kvm_s390_pv_make_secure(struct kvm *kvm, unsigned long gaddr, void *uvcb); 464 int __kvm_s390_handle_dat_fault(struct kvm_vcpu *vcpu, gfn_t gfn, gpa_t gaddr, unsigned int flags); 468 static inline int kvm_s390_handle_dat_fault(struct kvm_vcpu *vcpu, gpa_t gaddr, unsigned int flags) in kvm_s390_handle_dat_fault() argument 470 return __kvm_s390_handle_dat_fault(vcpu, gpa_to_gfn(gaddr), gaddr, flags); in kvm_s390_handle_dat_fault()
|
| A D | priv.c | 258 unsigned long gaddr, vmaddr; in handle_iske() local 275 gaddr = vcpu->run->s.regs.gprs[reg2] & PAGE_MASK; in handle_iske() 276 gaddr = kvm_s390_logical_to_effective(vcpu, gaddr); in handle_iske() 277 gaddr = kvm_s390_real_to_abs(vcpu, gaddr); in handle_iske() 278 vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(gaddr)); in handle_iske() 306 unsigned long vmaddr, gaddr; in handle_rrbe() local 322 gaddr = vcpu->run->s.regs.gprs[reg2] & PAGE_MASK; in handle_rrbe() 323 gaddr = kvm_s390_logical_to_effective(vcpu, gaddr); in handle_rrbe() 324 gaddr = kvm_s390_real_to_abs(vcpu, gaddr); in handle_rrbe() 325 vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(gaddr)); in handle_rrbe()
|
| A D | kvm-s390.c | 700 unsigned long gaddr, vmaddr; in kvm_arch_sync_dirty_log() local 708 gaddr = gfn_to_gpa(cur_gfn); in kvm_arch_sync_dirty_log() 2925 if (!kvm_is_gpa_in_memslot(kvm, mop->gaddr)) { in kvm_s390_vm_mem_op_abs() 2987 if (!kvm_is_gpa_in_memslot(kvm, mop->gaddr)) { in kvm_s390_vm_mem_op_cmpxchg() 4522 gfn_t gfn = gpa_to_gfn(gaddr); in __kvm_s390_fixup_fault_sync() 4529 tmp = __gmap_translate(gmap, gaddr); in __kvm_s390_fixup_fault_sync() 4536 rc = __gmap_link(gmap, gaddr, vmaddr); in __kvm_s390_fixup_fault_sync() 4574 gpa_t gaddr = kvm_s390_get_prefix(vcpu); in kvm_s390_mprotect_notify_prefix() local 4920 rc = __gmap_link(vcpu->arch.gmap, gaddr, vmaddr); in __kvm_s390_handle_dat_fault() 4933 gfn = gpa_to_gfn(gaddr); in vcpu_dat_fault_handler() [all …]
|
| A D | pci.c | 231 phys_addr_t gaddr; in kvm_s390_pci_aif_enable() local 260 gaddr = page_to_phys(aibv_page) + (fib->fmt0.aibv & ~PAGE_MASK); in kvm_s390_pci_aif_enable() 261 fib->fmt0.aibv = gaddr; in kvm_s390_pci_aif_enable()
|
| A D | intercept.c | 538 .gaddr = guest_uvcb->paddr, in handle_pv_uvc() 547 rc = kvm_s390_pv_make_secure(vcpu->kvm, uvcb.gaddr, &uvcb); in handle_pv_uvc()
|
| /arch/riscv/include/asm/ |
| A D | insn-def.h | 190 #define HFENCE_GVMA(gaddr, vmid) \ argument 192 __RD(0), RS1(gaddr), RS2(vmid)) 227 #define HINVAL_GVMA(gaddr, vmid) \ argument 229 __RD(0), RS1(gaddr), RS2(vmid))
|
| /arch/s390/include/uapi/asm/ |
| A D | kvm.h | 62 __u64 gaddr; /* the guest address */ member 242 __u64 gaddr; /* For dump storage state */ member
|