Home
last modified time | relevance | path

Searched refs:nents (Results 1 – 25 of 239) sorted by relevance

12345678910

/linux/lib/
A Dscatterlist.c47 int nents; in sg_nents() local
49 nents++; in sg_nents()
50 return nents; in sg_nents()
70 int nents; in sg_nents_for_len() local
77 nents++; in sg_nents_for_len()
80 return nents; in sg_nents_for_len()
106 for_each_sg(sgl, sg, nents, i) in sg_last()
127 sg_init_marker(sgl, nents); in sg_init_table()
294 if (nents == 0) in __sg_alloc_table()
301 left = nents; in __sg_alloc_table()
[all …]
A Dsg_pool.c40 static inline unsigned int sg_pool_index(unsigned short nents) in sg_pool_index() argument
44 BUG_ON(nents > SG_CHUNK_SIZE); in sg_pool_index()
46 if (nents <= 8) in sg_pool_index()
49 index = get_count_order(nents) - 3; in sg_pool_index()
58 sgp = sg_pools + sg_pool_index(nents); in sg_pool_free()
66 sgp = sg_pools + sg_pool_index(nents); in sg_pool_alloc()
117 BUG_ON(!nents); in sg_alloc_table_chained()
120 if (nents <= nents_first_chunk) { in sg_alloc_table_chained()
121 table->nents = table->orig_nents = nents; in sg_alloc_table_chained()
122 sg_init_table(table->sgl, nents); in sg_alloc_table_chained()
[all …]
A Dsg_split.c13 int nents; member
20 static int sg_calculate_split(struct scatterlist *in, int nents, int nb_splits, in sg_calculate_split() argument
32 splitters[i].nents = 0; in sg_calculate_split()
35 for_each_sg(in, sg, nents, i) { in sg_calculate_split()
48 curr->nents++; in sg_calculate_split()
59 curr->nents = 1; in sg_calculate_split()
86 for (j = 0; j < split->nents; j++, out_sg++) { in sg_split_phys()
112 for (j = 0; j < split->nents; j++, out_sg++) { in sg_split_mapped()
168 splitters[i].out_sg = kmalloc_array(splitters[i].nents, in sg_split()
190 out_mapped_nents[i] = splitters[i].nents; in sg_split()
A Dkfifo.c296 int nents, unsigned int len) in setup_sgl_buf() argument
303 if (!nents) in setup_sgl_buf()
323 if (++n == nents || sgl == NULL) in setup_sgl_buf()
335 int nents, unsigned int len, unsigned int off) in setup_sgl() argument
350 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl()
357 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_in_prepare() argument
365 return setup_sgl(fifo, sgl, nents, len, fifo->in); in __kfifo_dma_in_prepare()
370 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_out_prepare() argument
378 return setup_sgl(fifo, sgl, nents, len, fifo->out); in __kfifo_dma_out_prepare()
551 BUG_ON(!nents); in __kfifo_dma_in_prepare_r()
[all …]
/linux/drivers/gpu/drm/virtio/
A Dvirtgpu_gem.c172 objs->nents = 0; in virtio_gpu_array_alloc()
173 objs->total = nents; in virtio_gpu_array_alloc()
192 for (i = 0; i < nents; i++) { in virtio_gpu_array_from_handles()
195 objs->nents = i; in virtio_gpu_array_from_handles()
200 objs->nents = i; in virtio_gpu_array_from_handles()
211 objs->objs[objs->nents] = obj; in virtio_gpu_array_add_obj()
212 objs->nents++; in virtio_gpu_array_add_obj()
219 if (objs->nents == 1) { in virtio_gpu_array_lock_resv()
230 if (objs->nents == 1) { in virtio_gpu_array_unlock_resv()
243 for (i = 0; i < objs->nents; i++) in virtio_gpu_array_add_fence()
[all …]
A Dvirtgpu_object.c153 unsigned int *nents) in virtio_gpu_object_shmem_init() argument
181 *nents = shmem->mapped = shmem->pages->nents; in virtio_gpu_object_shmem_init()
183 *nents = shmem->pages->orig_nents; in virtio_gpu_object_shmem_init()
186 *ents = kvmalloc_array(*nents, in virtio_gpu_object_shmem_init()
220 unsigned int nents; in virtio_gpu_object_create() local
249 ret = virtio_gpu_object_shmem_init(vgdev, bo, &ents, &nents); in virtio_gpu_object_create()
261 ents, nents); in virtio_gpu_object_create()
265 virtio_gpu_object_attach(vgdev, bo, ents, nents); in virtio_gpu_object_create()
269 virtio_gpu_object_attach(vgdev, bo, ents, nents); in virtio_gpu_object_create()
/linux/samples/kfifo/
A Ddma-example.c25 unsigned int nents; in example_init() local
63 nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); in example_init()
64 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init()
65 if (!nents) { in example_init()
73 for (i = 0; i < nents; i++) { in example_init()
93 nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); in example_init()
94 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init()
95 if (!nents) { in example_init()
102 for (i = 0; i < nents; i++) { in example_init()
/linux/include/linux/
A Dscatterlist.h38 unsigned int nents; /* number of mapped entries */ member
166 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i)
268 unsigned int nents) in sg_init_marker() argument
270 sg_mark_end(&sgl[nents - 1]); in sg_init_marker()
341 void sgl_free_n_order(struct scatterlist *sgl, int nents, int order);
358 size_t sg_zero_buffer(struct scatterlist *sgl, unsigned int nents,
389 int sg_alloc_table_chained(struct sg_table *table, int nents,
428 struct scatterlist *sglist, unsigned int nents,
461 #define for_each_sg_page(sglist, piter, nents, pgoffset) \ argument
506 for_each_sg_dma_page((sgt)->sgl, dma_iter, (sgt)->nents, pgoffset)
[all …]
A Dpci-p2pdma.h30 unsigned int *nents, u32 length);
34 int nents, enum dma_data_direction dir, unsigned long attrs);
36 int nents, enum dma_data_direction dir, unsigned long attrs);
75 unsigned int *nents, u32 length) in pci_p2pmem_alloc_sgl() argument
87 struct scatterlist *sg, int nents, enum dma_data_direction dir, in pci_p2pdma_map_sg_attrs() argument
93 struct scatterlist *sg, int nents, enum dma_data_direction dir, in pci_p2pdma_unmap_sg_attrs() argument
123 int nents, enum dma_data_direction dir) in pci_p2pdma_map_sg() argument
125 return pci_p2pdma_map_sg_attrs(dev, sg, nents, dir, 0); in pci_p2pdma_map_sg()
129 struct scatterlist *sg, int nents, enum dma_data_direction dir) in pci_p2pdma_unmap_sg() argument
131 pci_p2pdma_unmap_sg_attrs(dev, sg, nents, dir, 0); in pci_p2pdma_unmap_sg()
/linux/drivers/gpu/drm/i915/
A Di915_scatterlist.c22 if (orig_st->nents == orig_st->orig_nents) in i915_sg_trim()
25 if (sg_alloc_table(&new_st, orig_st->nents, GFP_KERNEL | __GFP_NOWARN)) in i915_sg_trim()
29 for_each_sg(orig_st->sgl, sg, orig_st->nents, i) { in i915_sg_trim()
76 st->nents = 0; in i915_sg_from_mm_node()
85 if (st->nents) in i915_sg_from_mm_node()
91 st->nents++; in i915_sg_from_mm_node()
148 st->nents = 0; in i915_sg_from_buddy_resource()
161 if (st->nents) in i915_sg_from_buddy_resource()
167 st->nents++; in i915_sg_from_buddy_resource()
/linux/drivers/crypto/ccree/
A Dcc_buffer_mgr.c77 unsigned int nents = 0; in cc_get_sgl_nents() local
82 nents++; in cc_get_sgl_nents()
91 return nents; in cc_get_sgl_nents()
109 u32 nents; in cc_copy_sg_portion() local
244 sgl_data->nents[index] = nents; in cc_add_sg_entry()
263 *nents = 0; in cc_map_sg()
265 *nents, max_sg_nents); in cc_map_sg()
271 *nents = 0; in cc_map_sg()
507 sg_virt(req->src), areq_ctx->src.nents, areq_ctx->assoc.nents, in cc_unmap_aead_request()
596 areq_ctx->assoc.nents); in cc_aead_chain_assoc()
[all …]
/linux/drivers/spi/
A Dspi-dw-dma.c237 u32 nents; in dw_spi_dma_wait_tx_done() local
239 nents = dw_readl(dws, DW_SPI_TXFLR); in dw_spi_dma_wait_tx_done()
241 delay.value = nents * dws->n_bytes * BITS_PER_BYTE; in dw_spi_dma_wait_tx_done()
285 unsigned int nents) in dw_spi_dma_submit_tx() argument
322 u32 nents; in dw_spi_dma_wait_rx_done() local
333 nents = dw_readl(dws, DW_SPI_RXFLR); in dw_spi_dma_wait_rx_done()
334 ns = 4U * NSEC_PER_SEC / dws->max_freq * nents; in dw_spi_dma_wait_rx_done()
386 unsigned int nents) in dw_spi_dma_submit_rx() argument
464 xfer->rx_sg.nents); in dw_spi_dma_transfer_all()
585 unsigned int nents; in dw_spi_dma_transfer() local
[all …]
A Dspi-ep93xx.c285 int i, ret, nents; in ep93xx_spi_dma_prepare() local
325 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare()
326 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare()
329 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare()
335 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
355 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
356 if (!nents) in ep93xx_spi_dma_prepare()
359 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare()
362 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
391 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
/linux/drivers/hwtracing/intel_th/
A Dmsu-sink.c54 unsigned int nents; in msu_sink_alloc_window() local
62 nents = DIV_ROUND_UP(size, PAGE_SIZE); in msu_sink_alloc_window()
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window()
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window()
77 return nents; in msu_sink_alloc_window()
87 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
/linux/kernel/dma/
A Ddirect.h20 int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, int nents,
27 int nents, enum dma_data_direction dir);
30 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_device() argument
39 int nents, enum dma_data_direction dir, unsigned long attrs);
41 struct scatterlist *sgl, int nents, enum dma_data_direction dir);
44 struct scatterlist *sgl, int nents, enum dma_data_direction dir, in dma_direct_unmap_sg() argument
49 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in dma_direct_sync_sg_for_cpu() argument
A Dmapping.c192 arch_dma_map_sg_direct(dev, sg, nents)) in __dma_map_sg_attrs()
195 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
262 int nents; in dma_map_sgtable() local
265 if (nents < 0) in dma_map_sgtable()
266 return nents; in dma_map_sgtable()
267 sgt->nents = nents; in dma_map_sgtable()
273 int nents, enum dma_data_direction dir, in dma_unmap_sg_attrs() argument
279 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
281 arch_dma_unmap_sg_direct(dev, sg, nents)) in dma_unmap_sg_attrs()
284 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
[all …]
/linux/drivers/parisc/
A Diommu-helpers.h15 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument
30 while (nents-- > 0) { in iommu_fill_pdir()
34 DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, in iommu_fill_pdir()
102 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument
114 while (nents > 0) { in iommu_coalesce_chunks()
131 while(--nents > 0) { in iommu_coalesce_chunks()
/linux/drivers/target/iscsi/cxgbit/
A Dcxgbit_ddp.c153 unsigned int nents) in cxgbit_ddp_sgl_check() argument
155 unsigned int last_sgidx = nents - 1; in cxgbit_ddp_sgl_check()
158 for (i = 0; i < nents; i++, sg = sg_next(sg)) { in cxgbit_ddp_sgl_check()
177 unsigned int sgcnt = ttinfo->nents; in cxgbit_ddp_reserve()
184 xferlen, ttinfo->nents); in cxgbit_ddp_reserve()
246 ttinfo->nents = cmd->se_cmd.t_data_nents; in cxgbit_get_r2t_ttt()
251 csk, cmd, cmd->se_cmd.data_length, ttinfo->nents); in cxgbit_get_r2t_ttt()
254 ttinfo->nents = 0; in cxgbit_get_r2t_ttt()
286 ttinfo->nents, DMA_FROM_DEVICE); in cxgbit_unmap_cmd()
287 ttinfo->nents = 0; in cxgbit_unmap_cmd()
/linux/drivers/crypto/cavium/nitrox/
A Dnitrox_req.h555 static inline void *alloc_req_buf(int nents, int extralen, gfp_t gfp) in alloc_req_buf() argument
559 size = sizeof(struct scatterlist) * nents; in alloc_req_buf()
621 int nents, int ivsize) in alloc_src_req_buf() argument
625 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf()
643 int nents, int ivsize, in nitrox_creq_set_src_sg() argument
652 sg_init_table(sg, nents); in nitrox_creq_set_src_sg()
667 int nents) in alloc_dst_req_buf() argument
672 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf()
701 int nents, int ivsize, in nitrox_creq_set_dst_sg() argument
710 sg_init_table(sg, nents); in nitrox_creq_set_dst_sg()
A Dnitrox_aead.c97 int nents = sg_nents_for_len(src, buflen); in alloc_src_sglist() local
100 if (nents < 0) in alloc_src_sglist()
101 return nents; in alloc_src_sglist()
104 nents += 1; in alloc_src_sglist()
106 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist()
111 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); in alloc_src_sglist()
119 int nents = sg_nents_for_len(dst, buflen); in alloc_dst_sglist() local
122 if (nents < 0) in alloc_dst_sglist()
123 return nents; in alloc_dst_sglist()
126 nents += 3; in alloc_dst_sglist()
[all …]
/linux/drivers/mmc/core/
A Dsdio_ops.c122 unsigned int nents, left_size, i; in mmc_io_rw_extended() local
152 nents = DIV_ROUND_UP(left_size, seg_size); in mmc_io_rw_extended()
153 if (nents > 1) { in mmc_io_rw_extended()
154 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended()
158 data.sg_len = nents; in mmc_io_rw_extended()
196 if (nents > 1) in mmc_io_rw_extended()
/linux/drivers/infiniband/core/
A Drw.c96 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_one_mr() local
106 if (ret < 0 || ret < nents) { in rdma_rw_init_one_mr()
141 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_mr_wrs() local
173 sg_cnt -= nents; in rdma_rw_init_mr_wrs()
174 for (j = 0; j < nents; j++) in rdma_rw_init_mr_wrs()
288 int nents; in rdma_rw_map_sgtable() local
295 if (!nents) in rdma_rw_map_sgtable()
297 sgt->nents = nents; in rdma_rw_map_sgtable()
332 sg_cnt = sgt.nents; in rdma_rw_ctx_init()
448 sgt.nents + prot_sgt.nents); in rdma_rw_ctx_signature_init()
[all …]
/linux/arch/ia64/hp/common/
A Dsba_iommu.c389 while (nents-- > 0) { in sba_dump_sg()
401 int the_nents = nents; in sba_check_sg()
1200 int nents) in sba_fill_pdir() argument
1207 while (nents-- > 0) { in sba_fill_pdir()
1295 int nents) in sba_coalesce_chunks() argument
1306 while (nents > 0) { in sba_coalesce_chunks()
1324 while (--nents > 0) { in sba_coalesce_chunks()
1457 if (nents == 1) { in sba_map_sg_attrs()
1470 sba_dump_sg(ioc, sglist, nents); in sba_map_sg_attrs()
1506 sba_dump_sg(ioc, sglist, nents); in sba_map_sg_attrs()
[all …]
/linux/arch/sparc/mm/
A Diommu.c249 int nents, enum dma_data_direction dir, unsigned long attrs, in __sbus_iommu_map_sg() argument
255 for_each_sg(sgl, sg, nents, j) { in __sbus_iommu_map_sg()
263 return nents; in __sbus_iommu_map_sg()
267 int nents, enum dma_data_direction dir, unsigned long attrs) in sbus_iommu_map_sg_gflush() argument
270 return __sbus_iommu_map_sg(dev, sgl, nents, dir, attrs, false); in sbus_iommu_map_sg_gflush()
274 int nents, enum dma_data_direction dir, unsigned long attrs) in sbus_iommu_map_sg_pflush() argument
276 return __sbus_iommu_map_sg(dev, sgl, nents, dir, attrs, true); in sbus_iommu_map_sg_pflush()
299 int nents, enum dma_data_direction dir, unsigned long attrs) in sbus_iommu_unmap_sg() argument
304 for_each_sg(sgl, sg, nents, i) { in sbus_iommu_unmap_sg()
/linux/arch/mips/jazz/
A Djazzdma.c544 int nents, enum dma_data_direction dir, unsigned long attrs) in jazz_dma_map_sg() argument
549 for_each_sg(sglist, sg, nents, i) { in jazz_dma_map_sg()
559 return nents; in jazz_dma_map_sg()
563 int nents, enum dma_data_direction dir, unsigned long attrs) in jazz_dma_unmap_sg() argument
568 for_each_sg(sglist, sg, nents, i) { in jazz_dma_unmap_sg()
588 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in jazz_dma_sync_sg_for_device() argument
593 for_each_sg(sgl, sg, nents, i) in jazz_dma_sync_sg_for_device()
598 struct scatterlist *sgl, int nents, enum dma_data_direction dir) in jazz_dma_sync_sg_for_cpu() argument
603 for_each_sg(sgl, sg, nents, i) in jazz_dma_sync_sg_for_cpu()

Completed in 41 milliseconds

12345678910