Home
last modified time | relevance | path

Searched refs:batch (Results 1 – 25 of 204) sorted by relevance

123456789

/linux/mm/
A Dmmu_gather.c27 if (batch->next) { in tlb_next_batch()
36 if (!batch) in tlb_next_batch()
41 batch->nr = 0; in tlb_next_batch()
148 for (batch = &tlb->local; batch && batch->nr; batch = batch->next) in tlb_batch_pages_flush()
157 for (batch = tlb->local.next; batch; batch = next) { in tlb_batch_list_free()
195 if (batch->nr >= batch->max - 1) { in __tlb_remove_folio_pages_size()
200 VM_BUG_ON_PAGE(batch->nr > batch->max - 1, page); in __tlb_remove_folio_pages_size()
322 struct mmu_table_batch **batch = &tlb->batch; in tlb_table_flush() local
324 if (*batch) { in tlb_table_flush()
333 struct mmu_table_batch **batch = &tlb->batch; in tlb_remove_table() local
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Digt_spinner.c97 if (!spin->batch) { in igt_spinner_pin()
105 spin->batch = vaddr; in igt_spinner_pin()
131 u32 *batch; in igt_spinner_create_request() local
139 if (!spin->batch) { in igt_spinner_create_request()
160 batch = spin->batch; in igt_spinner_create_request()
168 *batch++ = 0; in igt_spinner_create_request()
172 *batch++ = 0; in igt_spinner_create_request()
178 *batch++ = rq->fence.seqno; in igt_spinner_create_request()
183 batch += 128; in igt_spinner_create_request()
229 if (!spin->batch) in igt_spinner_end()
[all …]
/linux/arch/powerpc/mm/book3s64/
A Dhash_tlb.c51 i = batch->index; in hpte_need_flush()
103 if (!batch->active) { in hpte_need_flush()
119 if (i != 0 && (mm != batch->mm || batch->psize != psize || in hpte_need_flush()
125 batch->mm = mm; in hpte_need_flush()
129 batch->pte[i] = rpte; in hpte_need_flush()
130 batch->vpn[i] = vpn; in hpte_need_flush()
131 batch->index = ++i; in hpte_need_flush()
148 i = batch->index; in __flush_tlb_pending()
151 flush_hash_page(batch->vpn[0], batch->pte[0], in __flush_tlb_pending()
152 batch->psize, batch->ssize, local); in __flush_tlb_pending()
[all …]
/linux/drivers/iommu/iommufd/
A Dpages.c294 batch->npfns[batch->end - 1] < keep_pfns); in batch_clear_carry()
297 batch->pfns[0] = batch->pfns[batch->end - 1] + in batch_clear_carry()
298 (batch->npfns[batch->end - 1] - keep_pfns); in batch_clear_carry()
308 WARN_ON(batch->total_pfns != batch->npfns[0]); in batch_skip_carry()
327 batch->npfns = (u32 *)(batch->pfns + batch->array_size); in __batch_init()
355 pfn == batch->pfns[batch->end - 1] + batch->npfns[batch->end - 1] && in batch_add_pfn()
357 batch->npfns[batch->end - 1]++; in batch_add_pfn()
361 if (batch->end == batch->array_size) in batch_add_pfn()
364 batch->pfns[batch->end] = pfn; in batch_add_pfn()
365 batch->npfns[batch->end] = 1; in batch_add_pfn()
[all …]
/linux/include/trace/events/
A Dintel_ifs.h13 TP_PROTO(int batch, int start, int stop, u64 status),
15 TP_ARGS(batch, start, stop, status),
18 __field( int, batch )
25 __entry->batch = batch;
32 __entry->batch,
40 TP_PROTO(int batch, union ifs_sbaf activate, union ifs_sbaf_status status),
42 TP_ARGS(batch, activate, status),
46 __field( int, batch )
53 __entry->batch = batch;
59 __entry->batch,
/linux/tools/testing/selftests/bpf/progs/
A Dtest_bpf_ma.c62 for (i = 0; i < batch; i++) { in batch_alloc()
89 for (i = 0; i < batch; i++) { in batch_free()
112 for (i = 0; i < batch; i++) { in batch_percpu_alloc()
140 for (i = 0; i < batch; i++) { in batch_percpu_free()
154 #define CALL_BATCH_ALLOC(size, batch, idx) \ argument
155 batch_alloc((struct bpf_map *)(&array_##size), batch, idx)
157 #define CALL_BATCH_ALLOC_FREE(size, batch, idx) \ argument
159 batch_alloc((struct bpf_map *)(&array_##size), batch, idx); \
160 batch_free((struct bpf_map *)(&array_##size), batch, idx); \
163 #define CALL_BATCH_PERCPU_ALLOC(size, batch, idx) \ argument
[all …]
/linux/drivers/gpu/drm/i915/gt/
A Dgen7_renderclear.c257 batch_advance(batch, cs); in gen7_emit_state_base_address()
286 batch_advance(batch, cs); in gen7_emit_vfe_state()
294 u32 *cs = batch_alloc_items(batch, 8, 4); in gen7_emit_interface_descriptor_load()
305 batch_advance(batch, cs); in gen7_emit_interface_descriptor_load()
317 cs = batch_alloc_items(batch, 8, pkt); in gen7_emit_media_object()
337 batch_advance(batch, cs); in gen7_emit_media_object()
352 batch_advance(batch, cs); in gen7_emit_pipeline_flush()
373 batch_advance(batch, cs); in gen7_emit_pipeline_invalidate()
435 u32 *batch; in gen7_setup_clear_gpr_bb() local
444 if (IS_ERR(batch)) in gen7_setup_clear_gpr_bb()
[all …]
A Dintel_lrc.c1642 *batch++ = 0; in gen8_emit_flush_coherentl3_wa()
1648 batch = gen8_emit_pipe_control(batch, in gen8_emit_flush_coherentl3_wa()
1657 *batch++ = 0; in gen8_emit_flush_coherentl3_wa()
1659 return batch; in gen8_emit_flush_coherentl3_wa()
1684 batch = gen8_emit_flush_coherentl3_wa(engine, batch); in gen8_init_indirectctx_bb()
1688 batch = gen8_emit_pipe_control(batch, in gen8_init_indirectctx_bb()
1707 return batch; in gen8_init_indirectctx_bb()
1726 return batch; in emit_lri()
1757 batch = gen8_emit_flush_coherentl3_wa(engine, batch); in gen9_init_indirectctx_bb()
1760 batch = gen8_emit_pipe_control(batch, in gen9_init_indirectctx_bb()
[all …]
A Dgen8_engine_cs.h53 __gen8_emit_pipe_control(u32 *batch, u32 bit_group_0, in __gen8_emit_pipe_control() argument
56 memset(batch, 0, 6 * sizeof(u32)); in __gen8_emit_pipe_control()
58 batch[0] = GFX_OP_PIPE_CONTROL(6) | bit_group_0; in __gen8_emit_pipe_control()
59 batch[1] = bit_group_1; in __gen8_emit_pipe_control()
60 batch[2] = offset; in __gen8_emit_pipe_control()
62 return batch + 6; in __gen8_emit_pipe_control()
65 static inline u32 *gen8_emit_pipe_control(u32 *batch, in gen8_emit_pipe_control() argument
68 return __gen8_emit_pipe_control(batch, 0, bit_group_1, offset); in gen8_emit_pipe_control()
71 static inline u32 *gen12_emit_pipe_control(u32 *batch, u32 bit_group_0, in gen12_emit_pipe_control() argument
74 return __gen8_emit_pipe_control(batch, bit_group_0, in gen12_emit_pipe_control()
A Dselftest_hangcheck.c38 u32 *batch; member
81 h->batch = vaddr; in hang_init()
113 u32 *batch; in hang_create_request() local
133 h->batch = vaddr; in hang_create_request()
171 batch = h->batch; in hang_create_request()
180 batch += 1024 / sizeof(*batch); in hang_create_request()
188 *batch++ = 0; in hang_create_request()
194 batch += 1024 / sizeof(*batch); in hang_create_request()
201 *batch++ = 0; in hang_create_request()
207 batch += 1024 / sizeof(*batch); in hang_create_request()
[all …]
A Dselftest_workarounds.c504 struct i915_vma *batch; in check_dirty_whitelist() local
513 batch = create_batch(ce->vm); in check_dirty_whitelist()
514 if (IS_ERR(batch)) { in check_dirty_whitelist()
515 err = PTR_ERR(batch); in check_dirty_whitelist()
750 i915_vma_unpin_and_release(&batch, 0); in check_dirty_whitelist()
890 struct i915_vma *batch; in scrub_whitelisted_registers() local
894 batch = create_batch(ce->vm); in scrub_whitelisted_registers()
895 if (IS_ERR(batch)) in scrub_whitelisted_registers()
896 return PTR_ERR(batch); in scrub_whitelisted_registers()
919 i915_gem_object_flush_map(batch->obj); in scrub_whitelisted_registers()
[all …]
A Dselftest_tlb.c42 struct drm_i915_gem_object *batch; in pte_tlbinv() local
50 batch = i915_gem_object_create_internal(ce->vm->i915, 4096); in pte_tlbinv()
51 if (IS_ERR(batch)) in pte_tlbinv()
52 return PTR_ERR(batch); in pte_tlbinv()
54 vma = i915_vma_instance(batch, ce->vm, NULL); in pte_tlbinv()
100 cs = i915_gem_object_pin_map_unlocked(batch, I915_MAP_WC); in pte_tlbinv()
122 i915_gem_object_flush_map(batch); in pte_tlbinv()
184 cs = page_mask_bits(batch->mm.mapping); in pte_tlbinv()
195 i915_gem_object_put(batch); in pte_tlbinv()
/linux/arch/powerpc/include/asm/book3s/64/
A Dtlbflush-hash.h25 extern void __flush_tlb_pending(struct ppc64_tlb_batch *batch);
31 struct ppc64_tlb_batch *batch; in arch_enter_lazy_mmu_mode() local
40 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_enter_lazy_mmu_mode()
41 batch->active = 1; in arch_enter_lazy_mmu_mode()
46 struct ppc64_tlb_batch *batch; in arch_leave_lazy_mmu_mode() local
50 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_leave_lazy_mmu_mode()
52 if (batch->index) in arch_leave_lazy_mmu_mode()
53 __flush_tlb_pending(batch); in arch_leave_lazy_mmu_mode()
54 batch->active = 0; in arch_leave_lazy_mmu_mode()
/linux/drivers/iommu/intel/
A Dcache.c267 if (!iommu || !batch->index) in qi_batch_flush_descs()
270 qi_submit_sync(iommu, batch->descs, batch->index, 0); in qi_batch_flush_descs()
273 memset(batch, 0, sizeof(*batch)); in qi_batch_flush_descs()
279 qi_batch_flush_descs(iommu, batch); in qi_batch_increment_index()
284 struct qi_batch *batch) in qi_batch_add_iotlb() argument
286 qi_desc_iotlb(iommu, did, addr, size_order, type, &batch->descs[batch->index]); in qi_batch_add_iotlb()
292 struct qi_batch *batch) in qi_batch_add_dev_iotlb() argument
301 qi_desc_dev_iotlb(sid, pfsid, qdep, addr, mask, &batch->descs[batch->index]); in qi_batch_add_dev_iotlb()
307 struct qi_batch *batch) in qi_batch_add_piotlb() argument
317 qi_desc_piotlb(did, pasid, addr, npages, ih, &batch->descs[batch->index]); in qi_batch_add_piotlb()
[all …]
/linux/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_mob.c238 struct vmw_otable_batch *batch) in vmw_otable_batch_setup() argument
257 &batch->otable_bo); in vmw_otable_batch_setup()
263 if (!batch->otables[i].enabled) in vmw_otable_batch_setup()
267 &batch->otable_bo->tbo, in vmw_otable_batch_setup()
279 if (batch->otables[i].enabled) in vmw_otable_batch_setup()
281 &batch->otables[i]); in vmw_otable_batch_setup()
285 ttm_bo_put(&batch->otable_bo->tbo); in vmw_otable_batch_setup()
286 batch->otable_bo = NULL; in vmw_otable_batch_setup()
340 if (batch->otables[i].enabled) in vmw_otable_batch_takedown()
342 &batch->otables[i]); in vmw_otable_batch_takedown()
[all …]
/linux/drivers/xen/
A Dgntdev.c811 batch->pages[batch->nr_pages++] = page; in gntdev_get_page()
821 unpin_user_pages_dirty_lock(batch->pages, batch->nr_pages, batch->writeable); in gntdev_put_pages()
822 batch->nr_pages = 0; in gntdev_put_pages()
830 gnttab_batch_copy(batch->ops, batch->nr_ops); in gntdev_copy()
854 batch->nr_ops = 0; in gntdev_copy()
900 op = &batch->ops[batch->nr_ops]; in gntdev_grant_copy_seg()
946 batch->status[batch->nr_ops] = status; in gntdev_grant_copy_seg()
947 batch->nr_ops++; in gntdev_grant_copy_seg()
963 batch.nr_ops = 0; in gntdev_ioctl_grant_copy()
964 batch.nr_pages = 0; in gntdev_ioctl_grant_copy()
[all …]
/linux/drivers/gpu/drm/i915/gem/selftests/
A Digt_gem_utils.c116 struct i915_vma *batch; in igt_gpu_fill_dw() local
123 batch = igt_emit_store_dw(vma, offset, count, val); in igt_gpu_fill_dw()
124 if (IS_ERR(batch)) in igt_gpu_fill_dw()
125 return PTR_ERR(batch); in igt_gpu_fill_dw()
133 err = igt_vma_move_to_active_unlocked(batch, rq, 0); in igt_gpu_fill_dw()
146 i915_vma_offset(batch), in igt_gpu_fill_dw()
147 i915_vma_size(batch), in igt_gpu_fill_dw()
155 i915_vma_unpin_and_release(&batch, 0); in igt_gpu_fill_dw()
A Di915_gem_client_blt.c103 struct i915_vma *batch; member
142 struct drm_i915_gem_object *batch) in prepare_blit() argument
251 i915_gem_object_flush_map(batch); in prepare_blit()
252 i915_gem_object_unpin_map(batch); in prepare_blit()
265 i915_vma_put(t->batch); in tiled_blits_destroy_buffers()
305 if (IS_ERR(t->batch)) in tiled_blits_create_buffers()
306 return PTR_ERR(t->batch); in tiled_blits_create_buffers()
310 i915_vma_put(t->batch); in tiled_blits_create_buffers()
512 i915_vma_offset(t->batch), in tiled_blit()
513 i915_vma_size(t->batch), in tiled_blit()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
A Drx.c26 int batch, i; in mlx5e_xsk_alloc_rx_mpwqe() local
35 batch = xsk_buff_alloc_batch(rq->xsk_pool, xsk_buffs, in mlx5e_xsk_alloc_rx_mpwqe()
44 for (; batch < rq->mpwqe.pages_per_wqe; batch++) { in mlx5e_xsk_alloc_rx_mpwqe()
45 xsk_buffs[batch] = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_alloc_rx_mpwqe()
46 if (unlikely(!xsk_buffs[batch])) in mlx5e_xsk_alloc_rx_mpwqe()
55 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
65 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
78 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
105 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
152 while (--batch >= 0) in mlx5e_xsk_alloc_rx_mpwqe()
[all …]
/linux/tools/testing/selftests/bpf/map_tests/
A Dhtab_map_batch_ops.c79 __u32 batch, count, total, total_success; in __test_map_lookup_and_delete_batch() local
109 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
119 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
127 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
153 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
154 &batch, keys + total, in __test_map_lookup_and_delete_batch()
216 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
217 &batch, keys + total, in __test_map_lookup_and_delete_batch()
/linux/arch/riscv/mm/
A Dtlbflush.c188 void arch_tlbbatch_add_pending(struct arch_tlbflush_unmap_batch *batch, in arch_tlbbatch_add_pending() argument
192 cpumask_or(&batch->cpumask, &batch->cpumask, mm_cpumask(mm)); in arch_tlbbatch_add_pending()
200 void arch_tlbbatch_flush(struct arch_tlbflush_unmap_batch *batch) in arch_tlbbatch_flush() argument
202 __flush_tlb_range(&batch->cpumask, FLUSH_TLB_NO_ASID, 0, in arch_tlbbatch_flush()
204 cpumask_clear(&batch->cpumask); in arch_tlbbatch_flush()
/linux/lib/
A Dpercpu_counter.c93 void percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch) in percpu_counter_add_batch() argument
100 if (unlikely(abs(count + amount) >= batch)) { in percpu_counter_add_batch()
120 void percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch) in percpu_counter_add_batch() argument
127 if (abs(count) >= batch) { in percpu_counter_add_batch()
292 int __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch) in __percpu_counter_compare() argument
298 if (abs(count - rhs) > (batch * num_online_cpus())) { in __percpu_counter_compare()
328 s64 limit, s64 amount, s32 batch) in __percpu_counter_limited_add() argument
339 unknown = batch * num_online_cpus(); in __percpu_counter_limited_add()
343 if (abs(count + amount) <= batch && in __percpu_counter_limited_add()
/linux/tools/virtio/
A Dvirtio_test.c170 bool delayed, int batch, int reset_n, int bufs) in run_test() argument
178 const bool random_batch = batch == RANDOM_BATCH; in run_test()
193 batch = (random() % vq->vring.num) + 1; in run_test()
196 (started - completed) < batch) { in run_test()
349 long batch = 1, reset = 0; in main() local
376 batch = RANDOM_BATCH; in main()
378 batch = strtol(optarg, NULL, 10); in main()
379 assert(batch > 0); in main()
380 assert(batch < (long)INT_MAX + 1); in main()
401 run_test(&dev, &dev.vqs[0], delayed, batch, reset, 0x100000); in main()
/linux/drivers/net/ethernet/netronome/nfp/flower/
A Dlag_conf.c234 unsigned int member_cnt, enum nfp_fl_lag_batch *batch) in nfp_fl_lag_config_group() argument
254 if (*batch == NFP_FL_LAG_BATCH_FIRST) { in nfp_fl_lag_config_group()
257 *batch = NFP_FL_LAG_BATCH_MEMBER; in nfp_fl_lag_config_group()
263 *batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_config_group()
269 if (*batch == NFP_FL_LAG_BATCH_FINISHED) { in nfp_fl_lag_config_group()
296 enum nfp_fl_lag_batch batch = NFP_FL_LAG_BATCH_FIRST; in nfp_fl_lag_do_work() local
318 &batch); in nfp_fl_lag_do_work()
391 active_count, &batch); in nfp_fl_lag_do_work()
405 if (batch == NFP_FL_LAG_BATCH_MEMBER) { in nfp_fl_lag_do_work()
406 batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_do_work()
[all …]
/linux/tools/virtio/ringtest/
A Dmain.c22 int batch = 1; variable
116 int tokick = batch; in run_guest()
129 tokick = batch; in run_guest()
348 batch = c; in main()
372 if (batch > max_outstanding) in main()
373 batch = max_outstanding; in main()

Completed in 59 milliseconds

123456789