Lines Matching refs:folio

136 int uv_destroy_folio(struct folio *folio)  in uv_destroy_folio()  argument
141 if (unlikely(folio_test_large(folio))) in uv_destroy_folio()
144 folio_get(folio); in uv_destroy_folio()
145 rc = uv_destroy(folio_to_phys(folio)); in uv_destroy_folio()
147 clear_bit(PG_arch_1, &folio->flags); in uv_destroy_folio()
148 folio_put(folio); in uv_destroy_folio()
185 int uv_convert_from_secure_folio(struct folio *folio) in uv_convert_from_secure_folio() argument
190 if (unlikely(folio_test_large(folio))) in uv_convert_from_secure_folio()
193 folio_get(folio); in uv_convert_from_secure_folio()
194 rc = uv_convert_from_secure(folio_to_phys(folio)); in uv_convert_from_secure_folio()
196 clear_bit(PG_arch_1, &folio->flags); in uv_convert_from_secure_folio()
197 folio_put(folio); in uv_convert_from_secure_folio()
250 static int expected_folio_refs(struct folio *folio) in expected_folio_refs() argument
254 res = folio_mapcount(folio); in expected_folio_refs()
255 if (folio_test_swapcache(folio)) { in expected_folio_refs()
257 } else if (folio_mapping(folio)) { in expected_folio_refs()
259 if (folio->private) in expected_folio_refs()
283 static int __make_folio_secure(struct folio *folio, struct uv_cb_header *uvcb) in __make_folio_secure() argument
287 if (folio_test_writeback(folio)) in __make_folio_secure()
289 expected = expected_folio_refs(folio) + 1; in __make_folio_secure()
290 if (!folio_ref_freeze(folio, expected)) in __make_folio_secure()
292 set_bit(PG_arch_1, &folio->flags); in __make_folio_secure()
302 folio_ref_unfreeze(folio, expected); in __make_folio_secure()
314 static int make_folio_secure(struct mm_struct *mm, struct folio *folio, struct uv_cb_header *uvcb) in make_folio_secure() argument
318 if (!folio_trylock(folio)) in make_folio_secure()
321 uv_convert_from_secure(folio_to_phys(folio)); in make_folio_secure()
322 rc = __make_folio_secure(folio, uvcb); in make_folio_secure()
323 folio_unlock(folio); in make_folio_secure()
341 static int s390_wiggle_split_folio(struct mm_struct *mm, struct folio *folio) in s390_wiggle_split_folio() argument
346 folio_wait_writeback(folio); in s390_wiggle_split_folio()
349 if (!folio_test_large(folio)) in s390_wiggle_split_folio()
357 folio_lock(folio); in s390_wiggle_split_folio()
358 rc = split_folio(folio); in s390_wiggle_split_folio()
360 folio_unlock(folio); in s390_wiggle_split_folio()
386 if (!folio_test_dirty(folio) || folio_test_anon(folio) || in s390_wiggle_split_folio()
387 !folio->mapping || !mapping_can_writeback(folio->mapping)) { in s390_wiggle_split_folio()
388 folio_unlock(folio); in s390_wiggle_split_folio()
399 mapping = folio->mapping; in s390_wiggle_split_folio()
400 lstart = folio_pos(folio); in s390_wiggle_split_folio()
401 lend = lstart + folio_size(folio) - 1; in s390_wiggle_split_folio()
403 folio_unlock(folio); in s390_wiggle_split_folio()
418 struct folio *folio; in make_hva_secure() local
427 folio = folio_walk_start(&fw, vma, hva, 0); in make_hva_secure()
428 if (!folio) { in make_hva_secure()
433 folio_get(folio); in make_hva_secure()
442 if (folio_test_hugetlb(folio)) in make_hva_secure()
444 else if (folio_test_large(folio)) in make_hva_secure()
449 rc = make_folio_secure(mm, folio, uvcb); in make_hva_secure()
454 rc = s390_wiggle_split_folio(mm, folio); in make_hva_secure()
458 folio_put(folio); in make_hva_secure()
470 int arch_make_folio_accessible(struct folio *folio) in arch_make_folio_accessible() argument
475 if (unlikely(folio_test_large(folio))) in arch_make_folio_accessible()
486 if (!test_bit(PG_arch_1, &folio->flags)) in arch_make_folio_accessible()
489 rc = uv_pin_shared(folio_to_phys(folio)); in arch_make_folio_accessible()
491 clear_bit(PG_arch_1, &folio->flags); in arch_make_folio_accessible()
495 rc = uv_convert_from_secure(folio_to_phys(folio)); in arch_make_folio_accessible()
497 clear_bit(PG_arch_1, &folio->flags); in arch_make_folio_accessible()