Lines Matching refs:slots
507 struct kvm_memslots *slots; in __kvm_handle_hva_range() local
518 slots = __kvm_memslots(kvm, i); in __kvm_handle_hva_range()
519 kvm_for_each_memslot(slot, slots) { in __kvm_handle_hva_range()
857 struct kvm_memslots *slots; in kvm_alloc_memslots() local
859 slots = kvzalloc(sizeof(struct kvm_memslots), GFP_KERNEL_ACCOUNT); in kvm_alloc_memslots()
860 if (!slots) in kvm_alloc_memslots()
864 slots->id_to_index[i] = -1; in kvm_alloc_memslots()
866 return slots; in kvm_alloc_memslots()
888 static void kvm_free_memslots(struct kvm *kvm, struct kvm_memslots *slots) in kvm_free_memslots() argument
892 if (!slots) in kvm_free_memslots()
895 kvm_for_each_memslot(memslot, slots) in kvm_free_memslots()
898 kvfree(slots); in kvm_free_memslots()
1065 struct kvm_memslots *slots = kvm_alloc_memslots(); in kvm_create_vm() local
1067 if (!slots) in kvm_create_vm()
1070 slots->generation = i; in kvm_create_vm()
1071 rcu_assign_pointer(kvm->memslots[i], slots); in kvm_create_vm()
1266 static inline void kvm_memslot_delete(struct kvm_memslots *slots, in kvm_memslot_delete() argument
1269 struct kvm_memory_slot *mslots = slots->memslots; in kvm_memslot_delete()
1272 if (WARN_ON(slots->id_to_index[memslot->id] == -1)) in kvm_memslot_delete()
1275 slots->used_slots--; in kvm_memslot_delete()
1277 if (atomic_read(&slots->last_used_slot) >= slots->used_slots) in kvm_memslot_delete()
1278 atomic_set(&slots->last_used_slot, 0); in kvm_memslot_delete()
1280 for (i = slots->id_to_index[memslot->id]; i < slots->used_slots; i++) { in kvm_memslot_delete()
1282 slots->id_to_index[mslots[i].id] = i; in kvm_memslot_delete()
1285 slots->id_to_index[memslot->id] = -1; in kvm_memslot_delete()
1292 static inline int kvm_memslot_insert_back(struct kvm_memslots *slots) in kvm_memslot_insert_back() argument
1294 return slots->used_slots++; in kvm_memslot_insert_back()
1304 static inline int kvm_memslot_move_backward(struct kvm_memslots *slots, in kvm_memslot_move_backward() argument
1307 struct kvm_memory_slot *mslots = slots->memslots; in kvm_memslot_move_backward()
1310 if (WARN_ON_ONCE(slots->id_to_index[memslot->id] == -1) || in kvm_memslot_move_backward()
1311 WARN_ON_ONCE(!slots->used_slots)) in kvm_memslot_move_backward()
1319 for (i = slots->id_to_index[memslot->id]; i < slots->used_slots - 1; i++) { in kvm_memslot_move_backward()
1327 slots->id_to_index[mslots[i].id] = i; in kvm_memslot_move_backward()
1339 static inline int kvm_memslot_move_forward(struct kvm_memslots *slots, in kvm_memslot_move_forward() argument
1343 struct kvm_memory_slot *mslots = slots->memslots; in kvm_memslot_move_forward()
1354 slots->id_to_index[mslots[i].id] = i; in kvm_memslot_move_forward()
1400 static void update_memslots(struct kvm_memslots *slots, in update_memslots() argument
1407 kvm_memslot_delete(slots, memslot); in update_memslots()
1410 i = kvm_memslot_insert_back(slots); in update_memslots()
1412 i = kvm_memslot_move_backward(slots, memslot); in update_memslots()
1413 i = kvm_memslot_move_forward(slots, memslot, i); in update_memslots()
1419 slots->memslots[i] = *memslot; in update_memslots()
1420 slots->id_to_index[memslot->id] = i; in update_memslots()
1439 int as_id, struct kvm_memslots *slots) in install_new_memslots() argument
1445 slots->generation = gen | KVM_MEMSLOT_GEN_UPDATE_IN_PROGRESS; in install_new_memslots()
1461 rcu_assign_pointer(kvm->memslots[as_id], slots); in install_new_memslots()
1479 gen = slots->generation & ~KVM_MEMSLOT_GEN_UPDATE_IN_PROGRESS; in install_new_memslots()
1492 slots->generation = gen; in install_new_memslots()
1497 static size_t kvm_memslots_size(int slots) in kvm_memslots_size() argument
1500 (sizeof(struct kvm_memory_slot) * slots); in kvm_memslots_size()
1517 struct kvm_memslots *slots; in kvm_dup_memslots() local
1525 slots = kvzalloc(new_size, GFP_KERNEL_ACCOUNT); in kvm_dup_memslots()
1526 if (likely(slots)) in kvm_dup_memslots()
1527 kvm_copy_memslots(slots, old); in kvm_dup_memslots()
1529 return slots; in kvm_dup_memslots()
1538 struct kvm_memslots *slots; in kvm_set_memslot() local
1557 slots = kvm_dup_memslots(__kvm_memslots(kvm, as_id), change); in kvm_set_memslot()
1558 if (!slots) { in kvm_set_memslot()
1568 slot = id_to_memslot(slots, new->id); in kvm_set_memslot()
1576 slots = install_new_memslots(kvm, as_id, slots); in kvm_set_memslot()
1596 kvm_copy_memslots(slots, __kvm_memslots(kvm, as_id)); in kvm_set_memslot()
1606 slot = id_to_memslot(slots, new->id); in kvm_set_memslot()
1623 update_memslots(slots, new, change); in kvm_set_memslot()
1624 slots = install_new_memslots(kvm, as_id, slots); in kvm_set_memslot()
1632 kvfree(slots); in kvm_set_memslot()
1637 slot = id_to_memslot(slots, new->id); in kvm_set_memslot()
1639 slots = install_new_memslots(kvm, as_id, slots); in kvm_set_memslot()
1643 kvfree(slots); in kvm_set_memslot()
1826 struct kvm_memslots *slots; in kvm_get_dirty_log() local
1843 slots = __kvm_memslots(kvm, as_id); in kvm_get_dirty_log()
1844 *memslot = id_to_memslot(slots, id); in kvm_get_dirty_log()
1888 struct kvm_memslots *slots; in kvm_get_dirty_log_protect() local
1905 slots = __kvm_memslots(kvm, as_id); in kvm_get_dirty_log_protect()
1906 memslot = id_to_memslot(slots, id); in kvm_get_dirty_log_protect()
1999 struct kvm_memslots *slots; in kvm_clear_dirty_log_protect() local
2020 slots = __kvm_memslots(kvm, as_id); in kvm_clear_dirty_log_protect()
2021 memslot = id_to_memslot(slots, id); in kvm_clear_dirty_log_protect()
2094 struct kvm_memslots *slots = kvm_vcpu_memslots(vcpu); in kvm_vcpu_gfn_to_memslot() local
2098 slot = try_get_memslot(slots, vcpu->last_used_slot, gfn); in kvm_vcpu_gfn_to_memslot()
2107 slot = search_memslots(slots, gfn, &slot_index); in kvm_vcpu_gfn_to_memslot()
2877 static int __kvm_gfn_to_hva_cache_init(struct kvm_memslots *slots, in __kvm_gfn_to_hva_cache_init() argument
2888 ghc->generation = slots->generation; in __kvm_gfn_to_hva_cache_init()
2900 ghc->memslot = __gfn_to_memslot(slots, start_gfn); in __kvm_gfn_to_hva_cache_init()
2921 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_gfn_to_hva_cache_init() local
2922 return __kvm_gfn_to_hva_cache_init(slots, ghc, gpa, len); in kvm_gfn_to_hva_cache_init()
2930 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_write_guest_offset_cached() local
2937 if (slots->generation != ghc->generation) { in kvm_write_guest_offset_cached()
2938 if (__kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len)) in kvm_write_guest_offset_cached()
2968 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_read_guest_offset_cached() local
2975 if (slots->generation != ghc->generation) { in kvm_read_guest_offset_cached()
2976 if (__kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len)) in kvm_read_guest_offset_cached()