Lines Matching refs:memslot

369 				   const struct kvm_memory_slot *memslot)  in kvm_flush_remote_tlbs_memslot()  argument
379 kvm_flush_remote_tlbs_range(kvm, memslot->base_gfn, memslot->npages); in kvm_flush_remote_tlbs_memslot()
970 static void kvm_destroy_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_destroy_dirty_bitmap() argument
972 if (!memslot->dirty_bitmap) in kvm_destroy_dirty_bitmap()
975 vfree(memslot->dirty_bitmap); in kvm_destroy_dirty_bitmap()
976 memslot->dirty_bitmap = NULL; in kvm_destroy_dirty_bitmap()
995 struct kvm_memory_slot *memslot; in kvm_free_memslots() local
1007 hash_for_each_safe(slots->id_hash, bkt, idnode, memslot, id_node[1]) in kvm_free_memslots()
1008 kvm_free_memslot(kvm, memslot); in kvm_free_memslots()
1417 static int kvm_alloc_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_alloc_dirty_bitmap() argument
1419 unsigned long dirty_bytes = kvm_dirty_bitmap_bytes(memslot); in kvm_alloc_dirty_bitmap()
1421 memslot->dirty_bitmap = __vcalloc(2, dirty_bytes, GFP_KERNEL_ACCOUNT); in kvm_alloc_dirty_bitmap()
1422 if (!memslot->dirty_bitmap) in kvm_alloc_dirty_bitmap()
2137 int *is_dirty, struct kvm_memory_slot **memslot) in kvm_get_dirty_log() argument
2148 *memslot = NULL; in kvm_get_dirty_log()
2157 *memslot = id_to_memslot(slots, id); in kvm_get_dirty_log()
2158 if (!(*memslot) || !(*memslot)->dirty_bitmap) in kvm_get_dirty_log()
2161 kvm_arch_sync_dirty_log(kvm, *memslot); in kvm_get_dirty_log()
2163 n = kvm_dirty_bitmap_bytes(*memslot); in kvm_get_dirty_log()
2166 any = (*memslot)->dirty_bitmap[i]; in kvm_get_dirty_log()
2168 if (copy_to_user(log->dirty_bitmap, (*memslot)->dirty_bitmap, n)) in kvm_get_dirty_log()
2202 struct kvm_memory_slot *memslot; in kvm_get_dirty_log_protect() local
2219 memslot = id_to_memslot(slots, id); in kvm_get_dirty_log_protect()
2220 if (!memslot || !memslot->dirty_bitmap) in kvm_get_dirty_log_protect()
2223 dirty_bitmap = memslot->dirty_bitmap; in kvm_get_dirty_log_protect()
2225 kvm_arch_sync_dirty_log(kvm, memslot); in kvm_get_dirty_log_protect()
2227 n = kvm_dirty_bitmap_bytes(memslot); in kvm_get_dirty_log_protect()
2240 dirty_bitmap_buffer = kvm_second_dirty_bitmap(memslot); in kvm_get_dirty_log_protect()
2256 kvm_arch_mmu_enable_log_dirty_pt_masked(kvm, memslot, in kvm_get_dirty_log_protect()
2263 kvm_flush_remote_tlbs_memslot(kvm, memslot); in kvm_get_dirty_log_protect()
2313 struct kvm_memory_slot *memslot; in kvm_clear_dirty_log_protect() local
2334 memslot = id_to_memslot(slots, id); in kvm_clear_dirty_log_protect()
2335 if (!memslot || !memslot->dirty_bitmap) in kvm_clear_dirty_log_protect()
2338 dirty_bitmap = memslot->dirty_bitmap; in kvm_clear_dirty_log_protect()
2342 if (log->first_page > memslot->npages || in kvm_clear_dirty_log_protect()
2343 log->num_pages > memslot->npages - log->first_page || in kvm_clear_dirty_log_protect()
2344 (log->num_pages < memslot->npages - log->first_page && (log->num_pages & 63))) in kvm_clear_dirty_log_protect()
2347 kvm_arch_sync_dirty_log(kvm, memslot); in kvm_clear_dirty_log_protect()
2350 dirty_bitmap_buffer = kvm_second_dirty_bitmap(memslot); in kvm_clear_dirty_log_protect()
2373 kvm_arch_mmu_enable_log_dirty_pt_masked(kvm, memslot, in kvm_clear_dirty_log_protect()
2380 kvm_flush_remote_tlbs_memslot(kvm, memslot); in kvm_clear_dirty_log_protect()
2633 struct kvm_memory_slot *memslot = gfn_to_memslot(kvm, gfn); in kvm_is_visible_gfn() local
2635 return kvm_is_visible_memslot(memslot); in kvm_is_visible_gfn()
2641 struct kvm_memory_slot *memslot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_is_visible_gfn() local
2643 return kvm_is_visible_memslot(memslot); in kvm_vcpu_is_visible_gfn()
3364 struct kvm_memory_slot *memslot, gfn_t gfn, in __kvm_write_guest_page() argument
3373 addr = gfn_to_hva_memslot(memslot, gfn); in __kvm_write_guest_page()
3379 mark_page_dirty_in_slot(kvm, memslot, gfn); in __kvm_write_guest_page()
3466 ghc->memslot = __gfn_to_memslot(slots, start_gfn); in __kvm_gfn_to_hva_cache_init()
3467 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, in __kvm_gfn_to_hva_cache_init()
3477 ghc->memslot = NULL; in __kvm_gfn_to_hva_cache_init()
3511 if (unlikely(!ghc->memslot)) in kvm_write_guest_offset_cached()
3517 mark_page_dirty_in_slot(kvm, ghc->memslot, gpa >> PAGE_SHIFT); in kvm_write_guest_offset_cached()
3549 if (unlikely(!ghc->memslot)) in kvm_read_guest_offset_cached()
3588 const struct kvm_memory_slot *memslot, in mark_page_dirty_in_slot() argument
3600 if (memslot && kvm_slot_dirty_track_enabled(memslot)) { in mark_page_dirty_in_slot()
3601 unsigned long rel_gfn = gfn - memslot->base_gfn; in mark_page_dirty_in_slot()
3602 u32 slot = (memslot->as_id << 16) | memslot->id; in mark_page_dirty_in_slot()
3606 else if (memslot->dirty_bitmap) in mark_page_dirty_in_slot()
3607 set_bit_le(rel_gfn, memslot->dirty_bitmap); in mark_page_dirty_in_slot()
3614 struct kvm_memory_slot *memslot; in mark_page_dirty() local
3616 memslot = gfn_to_memslot(kvm, gfn); in mark_page_dirty()
3617 mark_page_dirty_in_slot(kvm, memslot, gfn); in mark_page_dirty()
3623 struct kvm_memory_slot *memslot; in kvm_vcpu_mark_page_dirty() local
3625 memslot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_mark_page_dirty()
3626 mark_page_dirty_in_slot(vcpu->kvm, memslot, gfn); in kvm_vcpu_mark_page_dirty()