Home
last modified time | relevance | path

Searched refs:xas (Results 1 – 25 of 44) sorted by relevance

12

/linux-6.3-rc2/lib/
A Dxarray.c149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node); in xas_set_offset()
1168 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in __xas_prev()
1174 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in __xas_prev()
1207 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in __xas_next()
1213 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in __xas_next()
1265 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in xas_find()
1269 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in xas_find()
1335 xas->xa_offset = xas->xa_index >> xas->xa_node->shift; in xas_find_marked()
1341 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in xas_find_marked()
1368 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in xas_find_marked()
[all …]
A Dtest_xarray.c78 xas_lock(&xas); in xa_store_order()
80 xas_unlock(&xas); in xa_store_order()
116 xas_reset(&xas); in check_xas_retry()
131 xas_lock(&xas); in check_xas_retry()
132 xas_set(&xas, 0); in check_xas_retry()
139 xas_store(&xas, xa_mk_index(xas.xa_index)); in check_xas_retry()
263 xas_lock(&xas); in check_xa_mark_2()
264 xas_load(&xas); in check_xa_mark_2()
281 xas_lock(&xas); in check_xa_mark_2()
339 xas_lock(&xas); in check_xa_shrink()
[all …]
A Didr.c395 xas_lock_irqsave(&xas, flags); in ida_alloc_range()
422 xas_store(&xas, bitmap); in ida_alloc_range()
423 if (xas_error(&xas)) { in ida_alloc_range()
450 xas_store(&xas, bitmap); in ida_alloc_range()
454 if (xas_nomem(&xas, gfp)) { in ida_alloc_range()
461 if (xas_error(&xas)) in ida_alloc_range()
462 return xas_error(&xas); in ida_alloc_range()
498 bitmap = xas_load(&xas); in ida_free()
518 xas_store(&xas, NULL); in ida_free()
543 XA_STATE(xas, &ida->xa, 0); in ida_destroy()
[all …]
A Diov_iter.c85 XA_STATE(xas, i->xarray, index); \
89 xas_for_each(&xas, folio, ULONG_MAX) { \
92 if (xas_retry(&xas, folio)) \
1339 XA_STATE(xas, xa, index); in iter_xarray_populate_pages()
1344 for (page = xas_load(&xas); page; page = xas_next(&xas)) { in iter_xarray_populate_pages()
1345 if (xas_retry(&xas, page)) in iter_xarray_populate_pages()
1350 xas_reset(&xas); in iter_xarray_populate_pages()
1939 XA_STATE(xas, i->xarray, index); in iov_iter_extract_xarray_pages()
1950 for (page = xas_load(&xas); page; page = xas_next(&xas)) { in iov_iter_extract_xarray_pages()
1951 if (xas_retry(&xas, page)) in iov_iter_extract_xarray_pages()
[all …]
/linux-6.3-rc2/include/linux/
A Dxarray.h1389 #define xas_trylock(xas) xa_trylock((xas)->xa) argument
1390 #define xas_lock(xas) xa_lock((xas)->xa) argument
1391 #define xas_unlock(xas) xa_unlock((xas)->xa) argument
1392 #define xas_lock_bh(xas) xa_lock_bh((xas)->xa) argument
1393 #define xas_unlock_bh(xas) xa_unlock_bh((xas)->xa) argument
1394 #define xas_lock_irq(xas) xa_lock_irq((xas)->xa) argument
1395 #define xas_unlock_irq(xas) xa_unlock_irq((xas)->xa) argument
1456 return xas_valid(xas) && xas->xa_node; in xas_is_node()
1676 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK))) in xas_next_entry()
1836 return xa_entry(xas->xa, node, xas->xa_offset); in xas_prev()
[all …]
/linux-6.3-rc2/fs/
A Ddax.c170 key->xa = xas->xa; in dax_entry_waitqueue()
245 xas_reset(xas); in get_unlocked_entry()
248 xas_lock_irq(xas); in get_unlocked_entry()
295 xas_reset(xas); in dax_unlock_entry()
296 xas_lock_irq(xas); in dax_unlock_entry()
596 xas_lock_irq(xas); in grab_mapping_entry()
633 xas_reset(xas); in grab_mapping_entry()
743 xas_pause(&xas); in dax_layout_busy_page_range()
879 xas_reset(xas); in dax_insert_entry()
993 xas_reset(xas); in dax_writeback_one()
[all …]
/linux-6.3-rc2/tools/testing/radix-tree/
A Dmultiorder.c22 xas_lock(&xas); in item_insert_order()
23 xas_store(&xas, item); in item_insert_order()
24 xas_unlock(&xas); in item_insert_order()
27 if (!xas_error(&xas)) in item_insert_order()
31 return xas_error(&xas); in item_insert_order()
36 XA_STATE(xas, xa, 0); in multiorder_iteration()
56 xas_set(&xas, j); in multiorder_iteration()
76 XA_STATE(xas, xa, 0); in multiorder_tagged_iteration()
107 xas_set(&xas, j); in multiorder_tagged_iteration()
135 xas_set(&xas, j); in multiorder_tagged_iteration()
[all …]
A Diteration_check.c23 XA_STATE(xas, xa, index); in my_item_insert()
28 xas_lock(&xas); in my_item_insert()
34 xas_store(&xas, item); in my_item_insert()
35 xas_set_mark(&xas, TAG); in my_item_insert()
38 xas_unlock(&xas); in my_item_insert()
69 XA_STATE(xas, &array, 0); in tagged_iteration_fn()
75 xas_set(&xas, 0); in tagged_iteration_fn()
82 xas_pause(&xas); in tagged_iteration_fn()
102 XA_STATE(xas, &array, 0); in untagged_iteration_fn()
108 xas_set(&xas, 0); in untagged_iteration_fn()
[all …]
A Dtest.c176 XA_STATE(xas, xa, start); in tag_tagged_items()
183 xas_lock_irq(&xas); in tag_tagged_items()
185 xas_set_mark(&xas, thentag); in tag_tagged_items()
189 xas_pause(&xas); in tag_tagged_items()
190 xas_unlock_irq(&xas); in tag_tagged_items()
192 xas_lock_irq(&xas); in tag_tagged_items()
194 xas_unlock_irq(&xas); in tag_tagged_items()
257 XA_STATE(xas, xa, 0); in item_kill_tree()
260 xas_for_each(&xas, entry, ULONG_MAX) { in item_kill_tree()
262 item_free(entry, xas.xa_index); in item_kill_tree()
[all …]
A Diteration_check_2.c15 XA_STATE(xas, arg, 0); in iterator()
21 xas_set(&xas, 0); in iterator()
23 xas_for_each_marked(&xas, entry, ULONG_MAX, XA_MARK_0) in iterator()
26 assert(xas.xa_index >= 100); in iterator()
A Dregression1.c82 XA_STATE(xas, &mt_tree, start); in find_get_pages()
87 xas_for_each(&xas, page, ULONG_MAX) { in find_get_pages()
88 if (xas_retry(&xas, page)) in find_get_pages()
99 if (unlikely(page != xas_reload(&xas))) in find_get_pages()
108 xas_reset(&xas); in find_get_pages()
/linux-6.3-rc2/mm/
A Dmemfd.c40 xas_lock_irq(xas); in memfd_tag_pins()
58 xas_pause(xas); in memfd_tag_pins()
59 xas_unlock_irq(xas); in memfd_tag_pins()
61 xas_lock_irq(xas); in memfd_tag_pins()
63 xas_unlock_irq(xas); in memfd_tag_pins()
81 memfd_tag_pins(&xas); in memfd_wait_for_pins()
96 xas_set(&xas, 0); in memfd_wait_for_pins()
97 xas_lock_irq(&xas); in memfd_wait_for_pins()
126 xas_pause(&xas); in memfd_wait_for_pins()
129 xas_lock_irq(&xas); in memfd_wait_for_pins()
[all …]
A Dfilemap.c143 xas_init_marks(&xas); in page_cache_delete()
872 unsigned int order = xa_get_order(xas.xa, xas.xa_index); in __filemap_add_folio()
876 xas_split_alloc(&xas, xa_load(xas.xa, xas.xa_index), in __filemap_add_folio()
891 order = xa_get_order(xas.xa, xas.xa_index); in __filemap_add_folio()
1857 xas_reset(&xas); in mapping_get_entry()
2032 xas_reset(xas); in find_get_entry()
2239 for (folio = xas_load(&xas); folio && xas.xa_index <= end; in filemap_get_folios_contig()
2269 xas_reset(&xas); in filemap_get_folios_contig()
2378 for (folio = xas_load(&xas); folio; folio = xas_next(&xas)) { in filemap_get_read_batch()
2984 xas_pause(xas); in folio_seek_hole_data()
[all …]
A Dlist_lru.c382 xas_lock_irq(&xas); in memcg_destroy_list_lru()
385 xas_store(&xas, NULL); in memcg_destroy_list_lru()
387 xas_unlock_irq(&xas); in memcg_destroy_list_lru()
515 xas_set(&xas, index); in memcg_list_lru_alloc()
517 if (unlikely(index < 0 || xas_error(&xas) || xas_load(&xas))) { in memcg_list_lru_alloc()
520 xas_store(&xas, mlru); in memcg_list_lru_alloc()
524 xas_set_err(&xas, 0); in memcg_list_lru_alloc()
535 else if (!xas_error(&xas) && index != xas.xa_index) in memcg_list_lru_alloc()
542 if (xas.xa_alloc) in memcg_list_lru_alloc()
543 xas_nomem(&xas, gfp); in memcg_list_lru_alloc()
[all …]
A Dswap_state.c107 xas_lock_irq(&xas); in add_to_swap_cache()
108 xas_create_range(&xas); in add_to_swap_cache()
109 if (xas_error(&xas)) in add_to_swap_cache()
113 old = xas_load(&xas); in add_to_swap_cache()
119 xas_store(&xas, folio); in add_to_swap_cache()
120 xas_next(&xas); in add_to_swap_cache()
126 xas_unlock_irq(&xas); in add_to_swap_cache()
129 if (!xas_error(&xas)) in add_to_swap_cache()
134 return xas_error(&xas); in add_to_swap_cache()
160 xas_next(&xas); in __delete_from_swap_cache()
[all …]
A Dkhugepaged.c1795 xas_lock_irq(&xas); in collapse_file()
1797 if (!xas_error(&xas)) in collapse_file()
1799 xas_unlock_irq(&xas); in collapse_file()
1818 xas_set(&xas, start); in collapse_file()
1969 xas_lock_irq(&xas); in collapse_file()
1970 xas_set(&xas, index); in collapse_file()
2033 xas_unlock_irq(&xas); in collapse_file()
2092 xas_lock_irq(&xas); in collapse_file()
2117 xas_pause(&xas); in collapse_file()
2121 xas_lock_irq(&xas); in collapse_file()
[all …]
A Dpage-writeback.c2344 XA_STATE(xas, &mapping->i_pages, start); in tag_pages_for_writeback()
2348 xas_lock_irq(&xas); in tag_pages_for_writeback()
2350 xas_set_mark(&xas, PAGECACHE_TAG_TOWRITE); in tag_pages_for_writeback()
2354 xas_pause(&xas); in tag_pages_for_writeback()
2355 xas_unlock_irq(&xas); in tag_pages_for_writeback()
2357 xas_lock_irq(&xas); in tag_pages_for_writeback()
2359 xas_unlock_irq(&xas); in tag_pages_for_writeback()
3061 xas_lock_irqsave(&xas, flags); in __folio_start_writeback()
3062 xas_load(&xas); in __folio_start_writeback()
3088 xas_clear_mark(&xas, PAGECACHE_TAG_DIRTY); in __folio_start_writeback()
[all …]
/linux-6.3-rc2/fs/cachefiles/
A Dondemand.c14 XA_STATE(xas, &cache->reqs, 0); in cachefiles_ondemand_fd_release()
28 xas_store(&xas, NULL); in cachefiles_ondemand_fd_release()
255 xas_set(&xas, 0); in cachefiles_ondemand_daemon_read()
275 id = xas.xa_index; in cachefiles_ondemand_daemon_read()
355 xas_lock(&xas); in cachefiles_ondemand_send_req()
358 xas_unlock(&xas); in cachefiles_ondemand_send_req()
368 xas_unlock(&xas); in cachefiles_ondemand_send_req()
373 xas.xa_index = 0; in cachefiles_ondemand_send_req()
377 xas_store(&xas, req); in cachefiles_ondemand_send_req()
380 xas_unlock(&xas); in cachefiles_ondemand_send_req()
[all …]
/linux-6.3-rc2/drivers/infiniband/sw/rxe/
A Drxe_mr.c111 xas_lock(&xas); in rxe_mr_fill_pages_from_sgt()
121 xas_store(&xas, page); in rxe_mr_fill_pages_from_sgt()
122 if (xas_error(&xas)) in rxe_mr_fill_pages_from_sgt()
124 xas_next(&xas); in rxe_mr_fill_pages_from_sgt()
128 xas_unlock(&xas); in rxe_mr_fill_pages_from_sgt()
131 return xas_error(&xas); in rxe_mr_fill_pages_from_sgt()
173 xas_lock(&xas); in rxe_mr_alloc()
176 if (xas_error(&xas)) in rxe_mr_alloc()
178 xas_next(&xas); in rxe_mr_alloc()
181 xas_unlock(&xas); in rxe_mr_alloc()
[all …]
/linux-6.3-rc2/drivers/infiniband/core/
A Dib_core_uverbs.c268 XA_STATE(xas, &ucontext->mmap_xa, min_pgoff); in rdma_user_mmap_entry_insert_range()
294 xas_find_marked(&xas, max_pgoff, XA_FREE_MARK); in rdma_user_mmap_entry_insert_range()
295 if (xas.xa_node == XAS_RESTART) in rdma_user_mmap_entry_insert_range()
298 xa_first = xas.xa_index; in rdma_user_mmap_entry_insert_range()
308 xas_next_entry(&xas, xa_last - 1); in rdma_user_mmap_entry_insert_range()
309 if (xas.xa_node == XAS_BOUNDS || xas.xa_index >= xa_last) in rdma_user_mmap_entry_insert_range()
/linux-6.3-rc2/drivers/iommu/iommufd/
A Dpages.c547 xas_lock(&xas); in batch_from_xarray_clear()
555 xas_store(&xas, NULL); in batch_from_xarray_clear()
560 xas_unlock(&xas); in batch_from_xarray_clear()
569 xas_lock(&xas); in clear_xarray()
572 xas_unlock(&xas); in clear_xarray()
585 xas_lock(&xas); in pages_to_xarray()
590 xas_unlock(&xas); in pages_to_xarray()
597 if (xas_error(&xas)) in pages_to_xarray()
601 xas_next(&xas); in pages_to_xarray()
603 xas_unlock(&xas); in pages_to_xarray()
[all …]
/linux-6.3-rc2/arch/x86/kernel/cpu/sgx/
A Dencl.c525 xas_lock(&xas); in sgx_encl_may_map()
534 xas_pause(&xas); in sgx_encl_may_map()
535 xas_unlock(&xas); in sgx_encl_may_map()
541 xas_lock(&xas); in sgx_encl_may_map()
544 xas_unlock(&xas); in sgx_encl_may_map()
690 xas_lock(&xas); in sgx_encl_release()
691 xas_for_each(&xas, entry, max_page_index) { in sgx_encl_release()
711 xas_pause(&xas); in sgx_encl_release()
712 xas_unlock(&xas); in sgx_encl_release()
716 xas_lock(&xas); in sgx_encl_release()
[all …]
/linux-6.3-rc2/fs/afs/
A Dwrite.c296 XA_STATE(xas, &mapping->i_pages, start / PAGE_SIZE); in afs_pages_written_back()
304 xas_for_each(&xas, folio, end) { in afs_pages_written_back()
478 XA_STATE(xas, &mapping->i_pages, index); in afs_extend_writeback()
488 xas_for_each(&xas, folio, ULONG_MAX) { in afs_extend_writeback()
490 if (xas_retry(&xas, folio)) in afs_extend_writeback()
498 xas_reset(&xas); in afs_extend_writeback()
503 if (unlikely(folio != xas_reload(&xas))) { in afs_extend_writeback()
545 xas_pause(&xas); in afs_extend_writeback()
/linux-6.3-rc2/fs/netfs/
A Dbuffered_read.c25 XA_STATE(xas, &rreq->mapping->i_pages, start_page); in netfs_rreq_unlock_folios()
47 xas_for_each(&xas, folio, last_page) { in netfs_rreq_unlock_folios()
51 if (xas_retry(&xas, folio)) in netfs_rreq_unlock_folios()
A Diterator.c282 XA_STATE(xas, xa, index); in netfs_extract_xarray_to_sg()
286 xas_for_each(&xas, folio, ULONG_MAX) { in netfs_extract_xarray_to_sg()
287 if (xas_retry(&xas, folio)) in netfs_extract_xarray_to_sg()

Completed in 68 milliseconds

12