Home
last modified time | relevance | path

Searched refs:xa (Results 1 – 25 of 44) sorted by relevance

12

/drivers/infiniband/core/
A Drestrack.c35 xa_init_flags(&rt[i].xa, XA_FLAGS_ALLOC); in rdma_restrack_init()
50 struct xarray *xa = &dev->res[i].xa; in rdma_restrack_clean() local
52 WARN_ON(!xa_empty(xa)); in rdma_restrack_clean()
53 xa_destroy(xa); in rdma_restrack_clean()
69 XA_STATE(xas, &rt->xa, 0); in rdma_restrack_count()
72 xa_lock(&rt->xa); in rdma_restrack_count()
78 xa_unlock(&rt->xa); in rdma_restrack_count()
251 xa_lock(&rt->xa); in rdma_restrack_get_byid()
252 res = xa_load(&rt->xa, id); in rdma_restrack_get_byid()
255 xa_unlock(&rt->xa); in rdma_restrack_get_byid()
[all …]
A Dcounters.c299 xa_lock(&rt->xa); in rdma_get_counter_auto_mode()
300 xa_for_each(&rt->xa, id, res) { in rdma_get_counter_auto_mode()
314 xa_unlock(&rt->xa); in rdma_get_counter_auto_mode()
412 xa_lock(&rt->xa); in get_running_counters_hwstat_sum()
413 xa_for_each(&rt->xa, id, res) { in get_running_counters_hwstat_sum()
417 xa_unlock(&rt->xa); in get_running_counters_hwstat_sum()
427 xa_lock(&rt->xa); in get_running_counters_hwstat_sum()
431 xa_unlock(&rt->xa); in get_running_counters_hwstat_sum()
A Drestrack.h19 struct xarray xa; member
A Dnldev.c809 xa_lock(&rt->xa); in fill_res_srq_qps()
810 xa_for_each(&rt->xa, id, res) { in fill_res_srq_qps()
836 xa_unlock(&rt->xa); in fill_res_srq_qps()
846 xa_unlock(&rt->xa); in fill_res_srq_qps()
954 xa_lock(&rt->xa); in fill_stat_counter_qps()
965 xa_unlock(&rt->xa); in fill_stat_counter_qps()
970 xa_unlock(&rt->xa); in fill_stat_counter_qps()
1626 xa_lock(&rt->xa); in res_get_common_dumpit()
1639 xa_unlock(&rt->xa); in res_get_common_dumpit()
1663 again: xa_lock(&rt->xa); in res_get_common_dumpit()
[all …]
/drivers/gpu/drm/xe/
A Dxe_reg_sr.c35 xa_for_each(&sr->xa, reg, entry) in reg_sr_fini()
38 xa_destroy(&sr->xa); in reg_sr_fini()
43 xa_init(&sr->xa); in xe_reg_sr_init()
79 struct xe_reg_sr_entry *pentry = xa_load(&sr->xa, idx); in xe_reg_sr_add()
102 ret = xa_err(xa_store(&sr->xa, idx, pentry, GFP_KERNEL)); in xe_reg_sr_add()
173 if (xa_empty(&sr->xa)) in xe_reg_sr_apply_mmio()
185 xa_for_each(&sr->xa, reg, entry) in xe_reg_sr_apply_mmio()
207 if (!sr->name || xa_empty(&sr->xa)) in xe_reg_sr_dump()
211 xa_for_each(&sr->xa, reg, entry) in xe_reg_sr_dump()
A Dxe_reg_whitelist.c105 xa_for_each(&sr->xa, reg, entry) { in whitelist_apply_to_hwe()
207 if (!sr->name || xa_empty(&sr->xa)) in xe_reg_whitelist_dump()
211 xa_for_each(&sr->xa, reg, entry) in xe_reg_whitelist_dump()
A Dxe_device.c97 xa_init_flags(&xef->vm.xa, XA_FLAGS_ALLOC1); in xe_file_open()
100 xa_init_flags(&xef->exec_queue.xa, XA_FLAGS_ALLOC1); in xe_file_open()
119 xa_destroy(&xef->exec_queue.xa); in xe_file_destroy()
121 xa_destroy(&xef->vm.xa); in xe_file_destroy()
171 xa_for_each(&xef->exec_queue.xa, idx, q) { in xe_file_close()
177 xa_for_each(&xef->vm.xa, idx, vm) in xe_file_close()
A Dxe_reg_sr_types.h23 struct xarray xa; member
A Dxe_device_types.h679 struct xarray xa; member
691 struct xarray xa; member
A Dxe_gt.c200 xa_for_each(&sr->xa, idx, entry) { in emit_wa_job()
236 xa_for_each(&sr->xa, idx, entry) { in emit_wa_job()
258 xa_for_each(&sr->xa, idx, entry) { in emit_wa_job()
/drivers/infiniband/sw/rxe/
A Drxe_pool.c109 xa_init_flags(&pool->xa, XA_FLAGS_ALLOC); in rxe_pool_init()
116 WARN_ON(!xa_empty(&pool->xa)); in rxe_pool_cleanup()
141 err = xa_alloc_cyclic(&pool->xa, &elem->index, NULL, pool->limit, in __rxe_add_to_pool()
156 struct xarray *xa = &pool->xa; in rxe_pool_get_index() local
160 elem = xa_load(xa, index); in rxe_pool_get_index()
180 struct xarray *xa = &pool->xa; in __rxe_cleanup() local
190 xa_ret = xa_erase(xa, elem->index); in __rxe_cleanup()
253 xa_ret = xa_store(&elem->pool->xa, elem->index, elem, GFP_KERNEL); in __rxe_finalize()
A Drxe_pool.h42 struct xarray xa; member
/drivers/gpu/drm/panthor/
A Dpanthor_heap.c96 struct xarray xa; member
231 heap = xa_erase(&pool->xa, handle); in panthor_heap_destroy_locked()
331 ret = xa_alloc(&pool->xa, &id, heap, in panthor_heap_create()
383 heap = xa_load(&pool->xa, heap_id); in panthor_heap_return_chunk()
447 heap = xa_load(&pool->xa, heap_id); in panthor_heap_grow()
496 xa_destroy(&pool->xa); in panthor_heap_pool_release()
553 xa_init_flags(&pool->xa, XA_FLAGS_ALLOC); in panthor_heap_pool_create()
603 xa_for_each(&pool->xa, i, heap) in panthor_heap_pool_destroy()
A Dpanthor_mmu.c110 struct xarray xa; member
1499 ret = xa_alloc(&pool->xa, &id, vm, in panthor_vm_pool_create_vm()
1548 vm = xa_erase(&pool->xa, handle); in panthor_vm_pool_destroy_vm()
1567 xa_lock(&pool->xa); in panthor_vm_pool_get_vm()
1568 vm = panthor_vm_get(xa_load(&pool->xa, handle)); in panthor_vm_pool_get_vm()
1569 xa_unlock(&pool->xa); in panthor_vm_pool_get_vm()
1591 xa_for_each(&pfile->vms->xa, i, vm) in panthor_vm_pool_destroy()
1594 xa_destroy(&pfile->vms->xa); in panthor_vm_pool_destroy()
1948 xa_lock(&pfile->vms->xa); in panthor_vm_heaps_sizes()
1949 xa_for_each(&pfile->vms->xa, i, vm) { in panthor_vm_heaps_sizes()
[all …]
A Dpanthor_sched.c773 struct xarray xa; member
2880 xa_lock(&gpool->xa); in panthor_fdinfo_gather_group_samples()
2881 xa_for_each(&gpool->xa, i, group) { in panthor_fdinfo_gather_group_samples()
2888 xa_unlock(&gpool->xa); in panthor_fdinfo_gather_group_samples()
3591 xa_lock(&pool->xa); in group_from_handle()
3593 xa_unlock(&pool->xa); in group_from_handle()
3652 xa_for_each(&gpool->xa, i, group) in panthor_group_pool_destroy()
3655 xa_destroy(&gpool->xa); in panthor_group_pool_destroy()
3678 xa_lock(&gpool->xa); in panthor_fdinfo_gather_group_mem_info()
3679 xa_for_each(&gpool->xa, i, group) { in panthor_fdinfo_gather_group_mem_info()
[all …]
/drivers/cxl/core/
A Dcdat.c390 static void discard_dsmas(struct xarray *xa) in discard_dsmas() argument
395 xa_for_each(xa, index, ent) { in discard_dsmas()
396 xa_erase(xa, index); in discard_dsmas()
399 xa_destroy(xa); in discard_dsmas()
726 static void free_perf_xa(struct xarray *xa) in free_perf_xa() argument
731 if (!xa) in free_perf_xa()
734 xa_for_each(xa, index, ctx) in free_perf_xa()
736 xa_destroy(xa); in free_perf_xa()
737 kfree(xa); in free_perf_xa()
876 xa_for_each(xa, index, ctx) { in cxl_rp_gather_bandwidth()
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_userq_fence.c116 static void amdgpu_userq_walk_and_drop_fence_drv(struct xarray *xa) in amdgpu_userq_walk_and_drop_fence_drv() argument
121 if (xa_empty(xa)) in amdgpu_userq_walk_and_drop_fence_drv()
124 xa_lock(xa); in amdgpu_userq_walk_and_drop_fence_drv()
125 xa_for_each(xa, index, fence_drv) { in amdgpu_userq_walk_and_drop_fence_drv()
126 __xa_erase(xa, index); in amdgpu_userq_walk_and_drop_fence_drv()
130 xa_unlock(xa); in amdgpu_userq_walk_and_drop_fence_drv()
180 struct xarray *xa = &adev->userq_xa; in amdgpu_userq_fence_driver_destroy() local
198 xa_lock_irqsave(xa, flags); in amdgpu_userq_fence_driver_destroy()
199 xa_for_each(xa, index, xa_fence_drv) in amdgpu_userq_fence_driver_destroy()
201 __xa_erase(xa, index); in amdgpu_userq_fence_driver_destroy()
[all …]
/drivers/infiniband/hw/hns/
A Dhns_roce_srq.c18 xa_lock(&srq_table->xa); in hns_roce_srq_event()
19 srq = xa_load(&srq_table->xa, srqn & (hr_dev->caps.num_srqs - 1)); in hns_roce_srq_event()
22 xa_unlock(&srq_table->xa); in hns_roce_srq_event()
126 ret = xa_err(xa_store_irq(&srq_table->xa, srq->srqn, srq, GFP_KERNEL)); in alloc_srqc()
139 xa_erase_irq(&srq_table->xa, srq->srqn); in alloc_srqc()
157 xa_erase_irq(&srq_table->xa, srq->srqn); in free_srqc()
545 xa_init(&srq_table->xa); in hns_roce_init_srq_table()
A Dhns_roce_qp.c309 struct xarray *xa = &hr_dev->qp_table_xa; in hns_roce_qp_store() local
315 ret = xa_err(xa_store_irq(xa, hr_qp->qpn, hr_qp, GFP_KERNEL)); in hns_roce_qp_store()
392 struct xarray *xa = &hr_dev->qp_table_xa; in hns_roce_qp_remove() local
404 xa_lock_irqsave(xa, flags); in hns_roce_qp_remove()
405 __xa_erase(xa, hr_qp->qpn); in hns_roce_qp_remove()
406 xa_unlock_irqrestore(xa, flags); in hns_roce_qp_remove()
/drivers/crypto/intel/qat/qat_common/
A Dqat_asym_algs.c85 dma_addr_t xa; member
89 dma_addr_t xa; member
105 char *xa; member
233 if (unlikely(!ctx->xa)) in qat_dh_compute_value()
265 qat_req->in.dh.in.xa = ctx->dma_xa; in qat_dh_compute_value()
270 qat_req->in.dh.in_g2.xa = ctx->dma_xa; in qat_dh_compute_value()
275 qat_req->in.dh.in.xa = ctx->dma_xa; in qat_dh_compute_value()
461 if (ctx->xa) { in qat_dh_clear_ctx()
462 memset(ctx->xa, 0, ctx->p_size); in qat_dh_clear_ctx()
464 ctx->xa = NULL; in qat_dh_clear_ctx()
[all …]
/drivers/platform/x86/intel/pmt/
A Dclass.c303 ret = xa_alloc(ns->xa, &entry->devid, entry, PMT_XA_LIMIT, GFP_KERNEL); in intel_pmt_dev_register()
366 xa_erase(ns->xa, entry->devid); in intel_pmt_dev_register()
408 xa_erase(ns->xa, entry->devid); in intel_pmt_dev_destroy()
A Dclass.h61 struct xarray *xa; member
/drivers/iommu/iommufd/
A Dpages.c534 static void batch_from_xarray(struct pfn_batch *batch, struct xarray *xa, in batch_from_xarray() argument
538 XA_STATE(xas, xa, start_index); in batch_from_xarray()
555 static void batch_from_xarray_clear(struct pfn_batch *batch, struct xarray *xa, in batch_from_xarray_clear() argument
559 XA_STATE(xas, xa, start_index); in batch_from_xarray_clear()
578 static void clear_xarray(struct xarray *xa, unsigned long start_index, in clear_xarray() argument
581 XA_STATE(xas, xa, start_index); in clear_xarray()
590 static int pages_to_xarray(struct xarray *xa, unsigned long start_index, in pages_to_xarray() argument
595 XA_STATE(xas, xa, start_index); in pages_to_xarray()
624 clear_xarray(xa, start_index, xas.xa_index - 1); in pages_to_xarray()
/drivers/nvme/target/
A Dnvmet.h37 #define nvmet_for_each_ns(xa, index, entry) \ argument
38 xa_for_each(xa, index, entry)
40 #define nvmet_for_each_enabled_ns(xa, index, entry) \ argument
41 xa_for_each_marked(xa, index, entry, NVMET_NS_ENABLED)
/drivers/net/ethernet/mellanox/mlx5/core/
A Deswitch.h701 #define mlx5_esw_for_each_entry_marked(xa, index, entry, last, filter) \ argument
702 for (index = 0, entry = xa_find(xa, &index, last, filter); \
703 entry; entry = xa_find_after(xa, &index, last, filter))

Completed in 74 milliseconds

12