Lines Matching refs:gmap

34 static struct gmap *gmap_alloc(unsigned long limit)  in gmap_alloc()
36 struct gmap *gmap; in gmap_alloc() local
58 gmap = kzalloc(sizeof(struct gmap), GFP_KERNEL_ACCOUNT); in gmap_alloc()
59 if (!gmap) in gmap_alloc()
61 INIT_LIST_HEAD(&gmap->crst_list); in gmap_alloc()
62 INIT_LIST_HEAD(&gmap->children); in gmap_alloc()
63 INIT_LIST_HEAD(&gmap->pt_list); in gmap_alloc()
64 INIT_RADIX_TREE(&gmap->guest_to_host, GFP_KERNEL_ACCOUNT); in gmap_alloc()
65 INIT_RADIX_TREE(&gmap->host_to_guest, GFP_ATOMIC | __GFP_ACCOUNT); in gmap_alloc()
66 INIT_RADIX_TREE(&gmap->host_to_rmap, GFP_ATOMIC | __GFP_ACCOUNT); in gmap_alloc()
67 spin_lock_init(&gmap->guest_table_lock); in gmap_alloc()
68 spin_lock_init(&gmap->shadow_lock); in gmap_alloc()
69 refcount_set(&gmap->ref_count, 1); in gmap_alloc()
74 list_add(&page->lru, &gmap->crst_list); in gmap_alloc()
77 gmap->table = table; in gmap_alloc()
78 gmap->asce = atype | _ASCE_TABLE_LENGTH | in gmap_alloc()
80 gmap->asce_end = limit; in gmap_alloc()
81 return gmap; in gmap_alloc()
84 kfree(gmap); in gmap_alloc()
96 struct gmap *gmap_create(struct mm_struct *mm, unsigned long limit) in gmap_create()
98 struct gmap *gmap; in gmap_create() local
101 gmap = gmap_alloc(limit); in gmap_create()
102 if (!gmap) in gmap_create()
104 gmap->mm = mm; in gmap_create()
106 list_add_rcu(&gmap->list, &mm->context.gmap_list); in gmap_create()
108 gmap_asce = gmap->asce; in gmap_create()
113 return gmap; in gmap_create()
117 static void gmap_flush_tlb(struct gmap *gmap) in gmap_flush_tlb() argument
120 __tlb_flush_idte(gmap->asce); in gmap_flush_tlb()
182 static void gmap_free(struct gmap *gmap) in gmap_free() argument
187 if (!(gmap_is_shadow(gmap) && gmap->removed)) in gmap_free()
188 gmap_flush_tlb(gmap); in gmap_free()
190 list_for_each_entry_safe(page, next, &gmap->crst_list, lru) in gmap_free()
192 gmap_radix_tree_free(&gmap->guest_to_host); in gmap_free()
193 gmap_radix_tree_free(&gmap->host_to_guest); in gmap_free()
196 if (gmap_is_shadow(gmap)) { in gmap_free()
198 list_for_each_entry_safe(page, next, &gmap->pt_list, lru) in gmap_free()
200 gmap_rmap_radix_tree_free(&gmap->host_to_rmap); in gmap_free()
202 gmap_put(gmap->parent); in gmap_free()
205 kfree(gmap); in gmap_free()
214 struct gmap *gmap_get(struct gmap *gmap) in gmap_get() argument
216 refcount_inc(&gmap->ref_count); in gmap_get()
217 return gmap; in gmap_get()
227 void gmap_put(struct gmap *gmap) in gmap_put() argument
229 if (refcount_dec_and_test(&gmap->ref_count)) in gmap_put()
230 gmap_free(gmap); in gmap_put()
238 void gmap_remove(struct gmap *gmap) in gmap_remove() argument
240 struct gmap *sg, *next; in gmap_remove()
244 if (!list_empty(&gmap->children)) { in gmap_remove()
245 spin_lock(&gmap->shadow_lock); in gmap_remove()
246 list_for_each_entry_safe(sg, next, &gmap->children, list) { in gmap_remove()
250 spin_unlock(&gmap->shadow_lock); in gmap_remove()
253 spin_lock(&gmap->mm->context.lock); in gmap_remove()
254 list_del_rcu(&gmap->list); in gmap_remove()
255 if (list_empty(&gmap->mm->context.gmap_list)) in gmap_remove()
257 else if (list_is_singular(&gmap->mm->context.gmap_list)) in gmap_remove()
258 gmap_asce = list_first_entry(&gmap->mm->context.gmap_list, in gmap_remove()
259 struct gmap, list)->asce; in gmap_remove()
262 WRITE_ONCE(gmap->mm->context.gmap_asce, gmap_asce); in gmap_remove()
263 spin_unlock(&gmap->mm->context.lock); in gmap_remove()
266 gmap_put(gmap); in gmap_remove()
274 void gmap_enable(struct gmap *gmap) in gmap_enable() argument
276 S390_lowcore.gmap = (unsigned long) gmap; in gmap_enable()
284 void gmap_disable(struct gmap *gmap) in gmap_disable() argument
286 S390_lowcore.gmap = 0UL; in gmap_disable()
295 struct gmap *gmap_get_enabled(void) in gmap_get_enabled()
297 return (struct gmap *) S390_lowcore.gmap; in gmap_get_enabled()
304 static int gmap_alloc_table(struct gmap *gmap, unsigned long *table, in gmap_alloc_table() argument
316 spin_lock(&gmap->guest_table_lock); in gmap_alloc_table()
318 list_add(&page->lru, &gmap->crst_list); in gmap_alloc_table()
324 spin_unlock(&gmap->guest_table_lock); in gmap_alloc_table()
354 static int __gmap_unlink_by_vmaddr(struct gmap *gmap, unsigned long vmaddr) in __gmap_unlink_by_vmaddr() argument
359 BUG_ON(gmap_is_shadow(gmap)); in __gmap_unlink_by_vmaddr()
360 spin_lock(&gmap->guest_table_lock); in __gmap_unlink_by_vmaddr()
361 entry = radix_tree_delete(&gmap->host_to_guest, vmaddr >> PMD_SHIFT); in __gmap_unlink_by_vmaddr()
366 spin_unlock(&gmap->guest_table_lock); in __gmap_unlink_by_vmaddr()
377 static int __gmap_unmap_by_gaddr(struct gmap *gmap, unsigned long gaddr) in __gmap_unmap_by_gaddr() argument
381 vmaddr = (unsigned long) radix_tree_delete(&gmap->guest_to_host, in __gmap_unmap_by_gaddr()
383 return vmaddr ? __gmap_unlink_by_vmaddr(gmap, vmaddr) : 0; in __gmap_unmap_by_gaddr()
394 int gmap_unmap_segment(struct gmap *gmap, unsigned long to, unsigned long len) in gmap_unmap_segment() argument
399 BUG_ON(gmap_is_shadow(gmap)); in gmap_unmap_segment()
406 mmap_write_lock(gmap->mm); in gmap_unmap_segment()
408 flush |= __gmap_unmap_by_gaddr(gmap, to + off); in gmap_unmap_segment()
409 mmap_write_unlock(gmap->mm); in gmap_unmap_segment()
411 gmap_flush_tlb(gmap); in gmap_unmap_segment()
425 int gmap_map_segment(struct gmap *gmap, unsigned long from, in gmap_map_segment() argument
431 BUG_ON(gmap_is_shadow(gmap)); in gmap_map_segment()
435 from + len - 1 > TASK_SIZE_MAX || to + len - 1 > gmap->asce_end) in gmap_map_segment()
439 mmap_write_lock(gmap->mm); in gmap_map_segment()
442 flush |= __gmap_unmap_by_gaddr(gmap, to + off); in gmap_map_segment()
444 if (radix_tree_insert(&gmap->guest_to_host, in gmap_map_segment()
449 mmap_write_unlock(gmap->mm); in gmap_map_segment()
451 gmap_flush_tlb(gmap); in gmap_map_segment()
454 gmap_unmap_segment(gmap, to, len); in gmap_map_segment()
472 unsigned long __gmap_translate(struct gmap *gmap, unsigned long gaddr) in __gmap_translate() argument
477 radix_tree_lookup(&gmap->guest_to_host, gaddr >> PMD_SHIFT); in __gmap_translate()
492 unsigned long gmap_translate(struct gmap *gmap, unsigned long gaddr) in gmap_translate() argument
496 mmap_read_lock(gmap->mm); in gmap_translate()
497 rc = __gmap_translate(gmap, gaddr); in gmap_translate()
498 mmap_read_unlock(gmap->mm); in gmap_translate()
512 struct gmap *gmap; in gmap_unlink() local
516 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_unlink()
517 flush = __gmap_unlink_by_vmaddr(gmap, vmaddr); in gmap_unlink()
519 gmap_flush_tlb(gmap); in gmap_unlink()
524 static void gmap_pmdp_xchg(struct gmap *gmap, pmd_t *old, pmd_t new,
538 int __gmap_link(struct gmap *gmap, unsigned long gaddr, unsigned long vmaddr) in __gmap_link() argument
550 BUG_ON(gmap_is_shadow(gmap)); in __gmap_link()
552 table = gmap->table; in __gmap_link()
553 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION1) { in __gmap_link()
556 gmap_alloc_table(gmap, table, _REGION2_ENTRY_EMPTY, in __gmap_link()
561 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION2) { in __gmap_link()
564 gmap_alloc_table(gmap, table, _REGION3_ENTRY_EMPTY, in __gmap_link()
569 if ((gmap->asce & _ASCE_TYPE_MASK) >= _ASCE_TYPE_REGION3) { in __gmap_link()
572 gmap_alloc_table(gmap, table, _SEGMENT_ENTRY_EMPTY, in __gmap_link()
579 mm = gmap->mm; in __gmap_link()
592 if (pmd_large(*pmd) && !gmap->mm->context.allow_gmap_hpage_1m) in __gmap_link()
599 spin_lock(&gmap->guest_table_lock); in __gmap_link()
601 rc = radix_tree_insert(&gmap->host_to_guest, in __gmap_link()
617 gmap_pmdp_xchg(gmap, (pmd_t *)table, __pmd(unprot), gaddr); in __gmap_link()
619 spin_unlock(&gmap->guest_table_lock); in __gmap_link()
634 int gmap_fault(struct gmap *gmap, unsigned long gaddr, in gmap_fault() argument
641 mmap_read_lock(gmap->mm); in gmap_fault()
645 vmaddr = __gmap_translate(gmap, gaddr); in gmap_fault()
650 if (fixup_user_fault(gmap->mm, vmaddr, fault_flags, in gmap_fault()
662 rc = __gmap_link(gmap, gaddr, vmaddr); in gmap_fault()
664 mmap_read_unlock(gmap->mm); in gmap_fault()
672 void __gmap_zap(struct gmap *gmap, unsigned long gaddr) in __gmap_zap() argument
680 vmaddr = (unsigned long) radix_tree_lookup(&gmap->guest_to_host, in __gmap_zap()
685 vma = vma_lookup(gmap->mm, vmaddr); in __gmap_zap()
690 ptep = get_locked_pte(gmap->mm, vmaddr, &ptl); in __gmap_zap()
692 ptep_zap_unused(gmap->mm, vmaddr, ptep, 0); in __gmap_zap()
699 void gmap_discard(struct gmap *gmap, unsigned long from, unsigned long to) in gmap_discard() argument
704 mmap_read_lock(gmap->mm); in gmap_discard()
709 radix_tree_lookup(&gmap->guest_to_host, in gmap_discard()
715 vma = find_vma(gmap->mm, vmaddr); in gmap_discard()
727 mmap_read_unlock(gmap->mm); in gmap_discard()
765 static void gmap_call_notifier(struct gmap *gmap, unsigned long start, in gmap_call_notifier() argument
771 nb->notifier_call(gmap, start, end); in gmap_call_notifier()
792 static inline unsigned long *gmap_table_walk(struct gmap *gmap, in gmap_table_walk() argument
795 const int asce_type = gmap->asce & _ASCE_TYPE_MASK; in gmap_table_walk()
796 unsigned long *table = gmap->table; in gmap_table_walk()
798 if (gmap_is_shadow(gmap) && gmap->removed) in gmap_table_walk()
854 static pte_t *gmap_pte_op_walk(struct gmap *gmap, unsigned long gaddr, in gmap_pte_op_walk() argument
859 BUG_ON(gmap_is_shadow(gmap)); in gmap_pte_op_walk()
861 table = gmap_table_walk(gmap, gaddr, 1); /* get segment pointer */ in gmap_pte_op_walk()
864 return pte_alloc_map_lock(gmap->mm, (pmd_t *) table, gaddr, ptl); in gmap_pte_op_walk()
878 static int gmap_pte_op_fixup(struct gmap *gmap, unsigned long gaddr, in gmap_pte_op_fixup() argument
881 struct mm_struct *mm = gmap->mm; in gmap_pte_op_fixup()
885 BUG_ON(gmap_is_shadow(gmap)); in gmap_pte_op_fixup()
893 return __gmap_link(gmap, gaddr, vmaddr); in gmap_pte_op_fixup()
914 static inline pmd_t *gmap_pmd_op_walk(struct gmap *gmap, unsigned long gaddr) in gmap_pmd_op_walk() argument
918 BUG_ON(gmap_is_shadow(gmap)); in gmap_pmd_op_walk()
919 pmdp = (pmd_t *) gmap_table_walk(gmap, gaddr, 1); in gmap_pmd_op_walk()
924 if (!gmap->mm->context.allow_gmap_hpage_1m) in gmap_pmd_op_walk()
927 spin_lock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
929 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
935 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_walk()
944 static inline void gmap_pmd_op_end(struct gmap *gmap, pmd_t *pmdp) in gmap_pmd_op_end() argument
947 spin_unlock(&gmap->guest_table_lock); in gmap_pmd_op_end()
964 static int gmap_protect_pmd(struct gmap *gmap, unsigned long gaddr, in gmap_protect_pmd() argument
977 gmap_pmdp_xchg(gmap, pmdp, new, gaddr); in gmap_protect_pmd()
983 gmap_pmdp_xchg(gmap, pmdp, new, gaddr); in gmap_protect_pmd()
1009 static int gmap_protect_pte(struct gmap *gmap, unsigned long gaddr, in gmap_protect_pte() argument
1020 ptep = pte_alloc_map_lock(gmap->mm, pmdp, gaddr, &ptl); in gmap_protect_pte()
1027 rc = ptep_force_prot(gmap->mm, gaddr, ptep, prot, pbits); in gmap_protect_pte()
1045 static int gmap_protect_range(struct gmap *gmap, unsigned long gaddr, in gmap_protect_range() argument
1052 BUG_ON(gmap_is_shadow(gmap)); in gmap_protect_range()
1055 pmdp = gmap_pmd_op_walk(gmap, gaddr); in gmap_protect_range()
1058 rc = gmap_protect_pte(gmap, gaddr, pmdp, prot, in gmap_protect_range()
1065 rc = gmap_protect_pmd(gmap, gaddr, pmdp, prot, in gmap_protect_range()
1073 gmap_pmd_op_end(gmap, pmdp); in gmap_protect_range()
1080 vmaddr = __gmap_translate(gmap, gaddr); in gmap_protect_range()
1083 rc = gmap_pte_op_fixup(gmap, gaddr, vmaddr, prot); in gmap_protect_range()
1105 int gmap_mprotect_notify(struct gmap *gmap, unsigned long gaddr, in gmap_mprotect_notify() argument
1110 if ((gaddr & ~PAGE_MASK) || (len & ~PAGE_MASK) || gmap_is_shadow(gmap)) in gmap_mprotect_notify()
1114 mmap_read_lock(gmap->mm); in gmap_mprotect_notify()
1115 rc = gmap_protect_range(gmap, gaddr, len, prot, GMAP_NOTIFY_MPROT); in gmap_mprotect_notify()
1116 mmap_read_unlock(gmap->mm); in gmap_mprotect_notify()
1134 int gmap_read_table(struct gmap *gmap, unsigned long gaddr, unsigned long *val) in gmap_read_table() argument
1141 if (gmap_is_shadow(gmap)) in gmap_read_table()
1146 ptep = gmap_pte_op_walk(gmap, gaddr, &ptl); in gmap_read_table()
1161 vmaddr = __gmap_translate(gmap, gaddr); in gmap_read_table()
1166 rc = gmap_pte_op_fixup(gmap, gaddr, vmaddr, PROT_READ); in gmap_read_table()
1182 static inline void gmap_insert_rmap(struct gmap *sg, unsigned long vmaddr, in gmap_insert_rmap()
1217 static int gmap_protect_rmap(struct gmap *sg, unsigned long raddr, in gmap_protect_rmap()
1220 struct gmap *parent; in gmap_protect_rmap()
1298 static void gmap_unshadow_page(struct gmap *sg, unsigned long raddr) in gmap_unshadow_page()
1318 static void __gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_pgt()
1335 static void gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr) in gmap_unshadow_pgt()
1365 static void __gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_sgt()
1393 static void gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr) in gmap_unshadow_sgt()
1423 static void __gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r3t()
1451 static void gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr) in gmap_unshadow_r3t()
1481 static void __gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r2t()
1509 static void gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr) in gmap_unshadow_r2t()
1539 static void __gmap_unshadow_r1t(struct gmap *sg, unsigned long raddr, in __gmap_unshadow_r1t()
1570 static void gmap_unshadow(struct gmap *sg) in gmap_unshadow()
1607 static struct gmap *gmap_find_shadow(struct gmap *parent, unsigned long asce, in gmap_find_shadow()
1610 struct gmap *sg; in gmap_find_shadow()
1636 int gmap_shadow_valid(struct gmap *sg, unsigned long asce, int edat_level) in gmap_shadow_valid()
1659 struct gmap *gmap_shadow(struct gmap *parent, unsigned long asce, in gmap_shadow()
1662 struct gmap *sg, *new; in gmap_shadow()
1751 int gmap_shadow_r2t(struct gmap *sg, unsigned long saddr, unsigned long r2t, in gmap_shadow_r2t()
1835 int gmap_shadow_r3t(struct gmap *sg, unsigned long saddr, unsigned long r3t, in gmap_shadow_r3t()
1919 int gmap_shadow_sgt(struct gmap *sg, unsigned long saddr, unsigned long sgt, in gmap_shadow_sgt()
2003 int gmap_shadow_pgt_lookup(struct gmap *sg, unsigned long saddr, in gmap_shadow_pgt_lookup()
2043 int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt, in gmap_shadow_pgt()
2122 int gmap_shadow_page(struct gmap *sg, unsigned long saddr, pte_t pte) in gmap_shadow_page()
2124 struct gmap *parent; in gmap_shadow_page()
2190 static void gmap_shadow_notify(struct gmap *sg, unsigned long vmaddr, in gmap_shadow_notify()
2257 struct gmap *gmap, *sg, *next; in ptep_notify() local
2262 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in ptep_notify()
2263 spin_lock(&gmap->guest_table_lock); in ptep_notify()
2264 table = radix_tree_lookup(&gmap->host_to_guest, in ptep_notify()
2268 spin_unlock(&gmap->guest_table_lock); in ptep_notify()
2272 if (!list_empty(&gmap->children) && (bits & PGSTE_VSIE_BIT)) { in ptep_notify()
2273 spin_lock(&gmap->shadow_lock); in ptep_notify()
2275 &gmap->children, list) in ptep_notify()
2277 spin_unlock(&gmap->shadow_lock); in ptep_notify()
2280 gmap_call_notifier(gmap, gaddr, gaddr + PAGE_SIZE - 1); in ptep_notify()
2286 static void pmdp_notify_gmap(struct gmap *gmap, pmd_t *pmdp, in pmdp_notify_gmap() argument
2290 gmap_call_notifier(gmap, gaddr, gaddr + HPAGE_SIZE - 1); in pmdp_notify_gmap()
2303 static void gmap_pmdp_xchg(struct gmap *gmap, pmd_t *pmdp, pmd_t new, in gmap_pmdp_xchg() argument
2307 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_xchg()
2310 __pmdp_idte(gaddr, (pmd_t *)pmdp, IDTE_GUEST_ASCE, gmap->asce, in gmap_pmdp_xchg()
2323 struct gmap *gmap; in gmap_pmdp_clear() local
2327 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_clear()
2328 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_clear()
2329 pmdp = (pmd_t *)radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_clear()
2333 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_clear()
2340 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_clear()
2376 struct gmap *gmap; in gmap_pmdp_idte_local() local
2380 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_idte_local()
2381 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_idte_local()
2382 entry = radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_idte_local()
2387 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_idte_local()
2392 gmap->asce, IDTE_LOCAL); in gmap_pmdp_idte_local()
2397 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_idte_local()
2411 struct gmap *gmap; in gmap_pmdp_idte_global() local
2415 list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { in gmap_pmdp_idte_global()
2416 spin_lock(&gmap->guest_table_lock); in gmap_pmdp_idte_global()
2417 entry = radix_tree_delete(&gmap->host_to_guest, in gmap_pmdp_idte_global()
2422 pmdp_notify_gmap(gmap, pmdp, gaddr); in gmap_pmdp_idte_global()
2427 gmap->asce, IDTE_GLOBAL); in gmap_pmdp_idte_global()
2434 spin_unlock(&gmap->guest_table_lock); in gmap_pmdp_idte_global()
2449 static bool gmap_test_and_clear_dirty_pmd(struct gmap *gmap, pmd_t *pmdp, in gmap_test_and_clear_dirty_pmd() argument
2462 gmap_protect_pmd(gmap, gaddr, pmdp, PROT_READ, 0); in gmap_test_and_clear_dirty_pmd()
2476 void gmap_sync_dirty_log_pmd(struct gmap *gmap, unsigned long bitmap[4], in gmap_sync_dirty_log_pmd() argument
2484 pmdp = gmap_pmd_op_walk(gmap, gaddr); in gmap_sync_dirty_log_pmd()
2489 if (gmap_test_and_clear_dirty_pmd(gmap, pmdp, gaddr)) in gmap_sync_dirty_log_pmd()
2493 ptep = pte_alloc_map_lock(gmap->mm, pmdp, vmaddr, &ptl); in gmap_sync_dirty_log_pmd()
2496 if (ptep_test_and_clear_uc(gmap->mm, vmaddr, ptep)) in gmap_sync_dirty_log_pmd()
2501 gmap_pmd_op_end(gmap, pmdp); in gmap_sync_dirty_log_pmd()
2806 void s390_unlist_old_asce(struct gmap *gmap) in s390_unlist_old_asce() argument
2810 old = virt_to_page(gmap->table); in s390_unlist_old_asce()
2811 spin_lock(&gmap->guest_table_lock); in s390_unlist_old_asce()
2828 spin_unlock(&gmap->guest_table_lock); in s390_unlist_old_asce()
2842 int s390_replace_asce(struct gmap *gmap) in s390_replace_asce() argument
2848 s390_unlist_old_asce(gmap); in s390_replace_asce()
2854 memcpy(table, gmap->table, 1UL << (CRST_ALLOC_ORDER + PAGE_SHIFT)); in s390_replace_asce()
2861 spin_lock(&gmap->guest_table_lock); in s390_replace_asce()
2862 list_add(&page->lru, &gmap->crst_list); in s390_replace_asce()
2863 spin_unlock(&gmap->guest_table_lock); in s390_replace_asce()
2866 asce = (gmap->asce & ~_ASCE_ORIGIN) | __pa(table); in s390_replace_asce()
2867 WRITE_ONCE(gmap->asce, asce); in s390_replace_asce()
2868 WRITE_ONCE(gmap->mm->context.gmap_asce, asce); in s390_replace_asce()
2869 WRITE_ONCE(gmap->table, table); in s390_replace_asce()