Lines Matching refs:pte_index

69 			     unsigned long *rmap, long pte_index, int realmode)  in kvmppc_add_revmap_chain()  argument
84 tail->forw = pte_index; in kvmppc_add_revmap_chain()
85 head->back = pte_index; in kvmppc_add_revmap_chain()
87 rev->forw = rev->back = pte_index; in kvmppc_add_revmap_chain()
89 pte_index | KVMPPC_RMAP_PRESENT | KVMPPC_RMAP_HPT; in kvmppc_add_revmap_chain()
147 static void remove_revmap_chain(struct kvm *kvm, long pte_index, in remove_revmap_chain() argument
170 if (head == pte_index) { in remove_revmap_chain()
172 if (head == pte_index) in remove_revmap_chain()
185 long pte_index, unsigned long pteh, unsigned long ptel, in kvmppc_do_h_enter() argument
303 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_do_h_enter()
306 pte_index &= ~7UL; in kvmppc_do_h_enter()
307 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_enter()
336 pte_index += i; in kvmppc_do_h_enter()
338 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_enter()
355 rev = &kvm->arch.hpt.rev[pte_index]; in kvmppc_do_h_enter()
376 kvmppc_add_revmap_chain(kvm, rev, rmap, pte_index, in kvmppc_do_h_enter()
396 *pte_idx_ret = pte_index; in kvmppc_do_h_enter()
402 long pte_index, unsigned long pteh, unsigned long ptel) in kvmppc_h_enter() argument
404 return kvmppc_do_h_enter(vcpu->kvm, flags, pte_index, pteh, ptel, in kvmppc_h_enter()
486 unsigned long pte_index, unsigned long avpn, in kvmppc_do_h_remove() argument
496 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_do_h_remove()
498 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_do_h_remove()
514 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_do_h_remove()
518 rb = compute_tlbie_rb(v, pte_r, pte_index); in kvmppc_do_h_remove()
529 remove_revmap_chain(kvm, pte_index, rev, v, in kvmppc_do_h_remove()
548 unsigned long pte_index, unsigned long avpn) in kvmppc_h_remove() argument
550 return kvmppc_do_h_remove(vcpu->kvm, flags, pte_index, avpn, in kvmppc_h_remove()
562 unsigned long flags, req, pte_index, rcbits; in kvmppc_h_bulk_remove() local
575 pte_index = args[j]; in kvmppc_h_bulk_remove()
576 flags = pte_index >> 56; in kvmppc_h_bulk_remove()
577 pte_index &= ((1ul << 56) - 1); in kvmppc_h_bulk_remove()
585 pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) { in kvmppc_h_bulk_remove()
587 args[j] = ((0xa0 | flags) << 56) + pte_index; in kvmppc_h_bulk_remove()
591 hp = (__be64 *) (kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_bulk_remove()
623 args[j] = ((0x90 | flags) << 56) + pte_index; in kvmppc_h_bulk_remove()
627 args[j] = ((0x80 | flags) << 56) + pte_index; in kvmppc_h_bulk_remove()
628 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_bulk_remove()
643 tlbrb[n] = compute_tlbie_rb(hp0, hp1, pte_index); in kvmppc_h_bulk_remove()
659 pte_index = args[j] & ((1ul << 56) - 1); in kvmppc_h_bulk_remove()
662 remove_revmap_chain(kvm, pte_index, rev, in kvmppc_h_bulk_remove()
675 unsigned long pte_index, unsigned long avpn) in kvmppc_h_protect() argument
685 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_protect()
688 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_protect()
708 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_protect()
727 rb = compute_tlbie_rb(v, r, pte_index); in kvmppc_h_protect()
746 unsigned long pte_index) in kvmppc_h_read() argument
756 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_read()
759 pte_index &= ~3; in kvmppc_h_read()
762 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_read()
763 for (i = 0; i < n; ++i, ++pte_index) { in kvmppc_h_read()
764 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_read()
787 unsigned long pte_index) in kvmppc_h_clear_ref() argument
798 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_clear_ref()
801 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_clear_ref()
802 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_clear_ref()
818 kvmppc_clear_ref_hpte(kvm, hpte, pte_index); in kvmppc_h_clear_ref()
836 unsigned long pte_index) in kvmppc_h_clear_mod() argument
846 if (pte_index >= kvmppc_hpt_npte(&kvm->arch.hpt)) in kvmppc_h_clear_mod()
849 rev = real_vmalloc_addr(&kvm->arch.hpt.rev[pte_index]); in kvmppc_h_clear_mod()
850 hpte = (__be64 *)(kvm->arch.hpt.virt + (pte_index << 4)); in kvmppc_h_clear_mod()
866 kvmppc_invalidate_hpte(kvm, hpte, pte_index); in kvmppc_h_clear_mod()
1017 unsigned long pte_index) in kvmppc_invalidate_hpte() argument
1029 rb = compute_tlbie_rb(hp0, hp1, pte_index); in kvmppc_invalidate_hpte()
1035 unsigned long pte_index) in kvmppc_clear_ref_hpte() argument
1047 rb = compute_tlbie_rb(hp0, hp1, pte_index); in kvmppc_clear_ref_hpte()
1211 index = cache_entry->pte_index; in kvmppc_hpte_hv_fault()
1287 cache_entry->pte_index = index; in kvmppc_hpte_hv_fault()