Home
last modified time | relevance | path

Searched refs:chunk (Results 1 – 25 of 159) sorted by relevance

1234567

/drivers/s390/cio/
A Ditcw.c184 void *chunk; in itcw_init() local
198 return chunk; in itcw_init()
199 itcw = chunk; in itcw_init()
214 return chunk; in itcw_init()
222 return chunk; in itcw_init()
232 return chunk; in itcw_init()
240 return chunk; in itcw_init()
246 return chunk; in itcw_init()
253 return chunk; in itcw_init()
260 return chunk; in itcw_init()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
A Dicm.c60 dma_unmap_sg(&dev->persist->pdev->dev, chunk->sg, chunk->npages, in mlx4_free_icm_pages()
92 kfree(chunk); in mlx4_free_icm()
160 if (!chunk) { in mlx4_alloc_icm()
161 chunk = kzalloc_node(sizeof(*chunk), in mlx4_alloc_icm()
165 if (!chunk) { in mlx4_alloc_icm()
166 chunk = kzalloc(sizeof(*chunk), in mlx4_alloc_icm()
169 if (!chunk) in mlx4_alloc_icm()
188 &chunk->buf[chunk->npages], in mlx4_alloc_icm()
191 ret = mlx4_alloc_icm_pages(&chunk->sg[chunk->npages], in mlx4_alloc_icm()
208 chunk->sg, chunk->npages, in mlx4_alloc_icm()
[all …]
A Dicm.h74 struct mlx4_icm_chunk *chunk; member
100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first()
108 return !iter->chunk; in mlx4_icm_last()
113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next()
115 iter->chunk = NULL; in mlx4_icm_next()
119 iter->chunk = list_entry(iter->chunk->list.next, in mlx4_icm_next()
127 if (iter->chunk->coherent) in mlx4_icm_addr()
128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr()
135 if (iter->chunk->coherent) in mlx4_icm_size()
136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size()
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ring_mux.c104 if (chunk->sync_seq > last_seq && chunk->sync_seq <= seq) { in amdgpu_mux_resubmit_chunks()
106 chunk->sync_seq, in amdgpu_mux_resubmit_chunks()
108 if (chunk->sync_seq == in amdgpu_mux_resubmit_chunks()
119 chunk->start, in amdgpu_mux_resubmit_chunks()
120 chunk->end); in amdgpu_mux_resubmit_chunks()
184 list_del(&chunk->entry); in amdgpu_ring_mux_fini()
449 if (!chunk) { in amdgpu_ring_mux_start_ib()
454 chunk->start = ring->wptr; in amdgpu_ring_mux_start_ib()
498 if (!chunk) { in amdgpu_ring_mux_ib_mark_offset()
531 if (!chunk) { in amdgpu_ring_mux_end_ib()
[all …]
/drivers/infiniband/hw/irdma/
A Dpble.c18 struct irdma_chunk *chunk; in irdma_destroy_pble_prm() local
23 list_del(&chunk->list); in irdma_destroy_pble_prm()
27 kfree(chunk->chunkmem.va); in irdma_destroy_pble_prm()
90 struct irdma_chunk *chunk = info->chunk; in add_sd_direct() local
112 chunk->size, chunk->size, chunk->vaddr, chunk->fpm_addr); in add_sd_direct()
145 struct irdma_chunk *chunk = info->chunk; in add_bp_pages() local
160 addr = chunk->vaddr; in add_bp_pages()
230 chunk = chunkmem.va; in add_pble_prm()
233 chunk->dev = dev; in add_pble_prm()
240 info.chunk = chunk; in add_pble_prm()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_dmem.c99 return chunk->drm; in page_to_drm()
121 chunk->callocated--; in nouveau_dmem_page_free()
237 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in nouveau_dmem_chunk_alloc()
238 if (chunk == NULL) { in nouveau_dmem_chunk_alloc()
251 chunk->drm = drm; in nouveau_dmem_chunk_alloc()
260 &chunk->bo); in nouveau_dmem_chunk_alloc()
282 chunk->callocated++; in nouveau_dmem_chunk_alloc()
293 release_mem_region(chunk->pagemap.range.start, range_len(&chunk->pagemap.range)); in nouveau_dmem_chunk_alloc()
295 kfree(chunk); in nouveau_dmem_chunk_alloc()
312 chunk->callocated++; in nouveau_dmem_page_alloc_locked()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/steering/sws/
A Ddr_icm_pool.c84 return (u64)offset * chunk->seg; in mlx5dr_icm_pool_get_chunk_mr_addr()
96 return (u64)chunk->buddy_mem->icm_mr->icm_start_addr + size * chunk->seg; in mlx5dr_icm_pool_get_chunk_icm_addr()
102 chunk->buddy_mem->pool->icm_type); in mlx5dr_icm_pool_get_chunk_byte_size()
221 memset(chunk->ste_arr, 0, in dr_icm_chunk_ste_init()
329 chunk->seg = seg; in dr_icm_chunk_init()
330 chunk->size = chunk_size; in dr_icm_chunk_init()
331 chunk->buddy_mem = buddy_mem_pool; in dr_icm_chunk_init()
453 if (!chunk) in mlx5dr_icm_alloc_chunk()
464 return chunk; in mlx5dr_icm_alloc_chunk()
483 hot_chunk->seg = chunk->seg; in mlx5dr_icm_free_chunk()
[all …]
/drivers/infiniband/hw/mthca/
A Dmthca_memfree.c69 dma_unmap_sg(&dev->pdev->dev, chunk->mem, chunk->npages, in mthca_free_icm_pages()
101 kfree(chunk); in mthca_free_icm()
158 if (!chunk) { in mthca_alloc_icm()
159 chunk = kmalloc(sizeof *chunk, in mthca_alloc_icm()
161 if (!chunk) in mthca_alloc_icm()
175 &chunk->mem[chunk->npages], in mthca_alloc_icm()
178 ret = mthca_alloc_icm_pages(&chunk->mem[chunk->npages], in mthca_alloc_icm()
185 ++chunk->nsg; in mthca_alloc_icm()
187 chunk->nsg = in mthca_alloc_icm()
197 chunk = NULL; in mthca_alloc_icm()
[all …]
A Dmthca_memfree.h76 struct mthca_icm_chunk *chunk; member
103 iter->chunk = list_empty(&icm->chunk_list) ? in mthca_icm_first()
111 return !iter->chunk; in mthca_icm_last()
116 if (++iter->page_idx >= iter->chunk->nsg) { in mthca_icm_next()
117 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mthca_icm_next()
118 iter->chunk = NULL; in mthca_icm_next()
122 iter->chunk = list_entry(iter->chunk->list.next, in mthca_icm_next()
130 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in mthca_icm_addr()
135 return sg_dma_len(&iter->chunk->mem[iter->page_idx]); in mthca_icm_size()
/drivers/gpu/drm/panthor/
A Dpanthor_heap.c129 list_del(&chunk->node); in panthor_free_heap_chunk()
136 kfree(chunk); in panthor_free_heap_chunk()
147 chunk = kmalloc(sizeof(*chunk), GFP_KERNEL); in panthor_alloc_heap_chunk()
148 if (!chunk) in panthor_alloc_heap_chunk()
156 if (IS_ERR(chunk->bo)) { in panthor_alloc_heap_chunk()
157 ret = PTR_ERR(chunk->bo); in panthor_alloc_heap_chunk()
165 hdr = chunk->bo->kmap; in panthor_alloc_heap_chunk()
196 kfree(chunk); in panthor_alloc_heap_chunk()
394 removed = chunk; in panthor_heap_return_chunk()
395 list_del(&chunk->node); in panthor_heap_return_chunk()
[all …]
/drivers/dma/dw-edma/
A Ddw-hdma-v0-core.c160 if (chunk->chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) { in dw_hdma_v0_write_ll_data()
200 if (chunk->cb) in dw_hdma_v0_core_write_chunk()
203 list_for_each_entry(child, &chunk->burst->list, list) in dw_hdma_v0_core_write_chunk()
208 if (!chunk->cb) in dw_hdma_v0_core_write_chunk()
211 dw_hdma_v0_write_ll_link(chunk, i, control, chunk->ll_region.paddr); in dw_hdma_v0_core_write_chunk()
225 readl(chunk->ll_region.vaddr.io); in dw_hdma_v0_sync_ll_data()
230 struct dw_edma_chan *chan = chunk->chan; in dw_hdma_v0_core_start()
234 dw_hdma_v0_core_write_chunk(chunk); in dw_hdma_v0_core_start()
252 lower_32_bits(chunk->ll_region.paddr)); in dw_hdma_v0_core_start()
254 upper_32_bits(chunk->ll_region.paddr)); in dw_hdma_v0_core_start()
[all …]
A Ddw-edma-v0-core.c322 struct dw_edma_chan *chan = chunk->chan; in dw_edma_v0_core_write_chunk()
326 if (chunk->cb) in dw_edma_v0_core_write_chunk()
329 j = chunk->bursts_alloc; in dw_edma_v0_core_write_chunk()
343 if (!chunk->cb) in dw_edma_v0_core_write_chunk()
346 dw_edma_v0_write_ll_link(chunk, i, control, chunk->ll_region.paddr); in dw_edma_v0_core_write_chunk()
360 readl(chunk->ll_region.vaddr.io); in dw_edma_v0_sync_ll_data()
365 struct dw_edma_chan *chan = chunk->chan; in dw_edma_v0_core_start()
369 dw_edma_v0_core_write_chunk(chunk); in dw_edma_v0_core_start()
425 lower_32_bits(chunk->ll_region.paddr)); in dw_edma_v0_core_start()
427 upper_32_bits(chunk->ll_region.paddr)); in dw_edma_v0_core_start()
[all …]
A Ddw-edma-core.c52 if (chunk->burst) { in dw_edma_alloc_burst()
71 chunk = kzalloc(sizeof(*chunk), GFP_NOWAIT); in dw_edma_alloc_chunk()
72 if (unlikely(!chunk)) in dw_edma_alloc_chunk()
76 chunk->chan = chan; in dw_edma_alloc_chunk()
92 if (desc->chunk) { in dw_edma_alloc_chunk()
95 kfree(chunk); in dw_edma_alloc_chunk()
99 list_add_tail(&chunk->list, &desc->chunk->list); in dw_edma_alloc_chunk()
104 desc->chunk = chunk; in dw_edma_alloc_chunk()
107 return chunk; in dw_edma_alloc_chunk()
147 if (!desc->chunk) in dw_edma_free_chunk()
[all …]
/drivers/staging/media/atomisp/pci/isp/kernels/s3a/s3a_1.0/
A Dia_css_s3a.host.c341 chunk = max(chunk, 1); in ia_css_s3a_vmem_decode()
348 kmax = (rest > chunk) ? chunk : rest; in ia_css_s3a_vmem_decode()
353 hi[elm + chunk * 0], lo[elm + chunk * 0]); in ia_css_s3a_vmem_decode()
355 hi[elm + chunk * 1], lo[elm + chunk * 1]); in ia_css_s3a_vmem_decode()
357 hi[elm + chunk * 2], lo[elm + chunk * 2]); in ia_css_s3a_vmem_decode()
359 hi[elm + chunk * 3], lo[elm + chunk * 3]); in ia_css_s3a_vmem_decode()
361 hi[elm + chunk * 4], lo[elm + chunk * 4]); in ia_css_s3a_vmem_decode()
363 hi[elm + chunk * 5], lo[elm + chunk * 5]); in ia_css_s3a_vmem_decode()
365 hi[elm + chunk * 6], lo[elm + chunk * 6]); in ia_css_s3a_vmem_decode()
367 hi[elm + chunk * 7], lo[elm + chunk * 7]); in ia_css_s3a_vmem_decode()
[all …]
/drivers/gpu/drm/panel/
A Dpanel-samsung-s6e63m0-dsi.c44 int chunk; in s6e63m0_dsi_dcs_write() local
54 chunk = remain; in s6e63m0_dsi_dcs_write()
57 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
58 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
64 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
65 seqp += chunk; in s6e63m0_dsi_dcs_write()
68 chunk = remain - cmdwritten; in s6e63m0_dsi_dcs_write()
69 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
70 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
82 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
[all …]
/drivers/gpu/drm/qxl/
A Dqxl_image.c38 struct qxl_drm_chunk *chunk; in qxl_allocate_chunk() local
42 if (!chunk) in qxl_allocate_chunk()
47 kfree(chunk); in qxl_allocate_chunk()
91 qxl_bo_unref(&chunk->bo); in qxl_image_free_objects()
92 kfree(chunk); in qxl_image_free_objects()
110 struct qxl_data_chunk *chunk; in qxl_image_init_helper() local
128 chunk = ptr; in qxl_image_init_helper()
130 chunk->prev_chunk = 0; in qxl_image_init_helper()
131 chunk->next_chunk = 0; in qxl_image_init_helper()
149 chunk = ptr; in qxl_image_init_helper()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/
A Dpool.c145 chunk->offset = mlx5hws_buddy_alloc_mem(buddy, chunk->order); in hws_pool_buddy_db_get_chunk()
146 if (chunk->offset >= 0) in hws_pool_buddy_db_get_chunk()
153 struct mlx5hws_pool_chunk *chunk) in hws_pool_buddy_db_put_chunk() argument
163 mlx5hws_buddy_free_mem(buddy, chunk->offset, chunk->order); in hws_pool_buddy_db_put_chunk()
230 struct mlx5hws_pool_chunk *chunk) in hws_pool_bitmap_db_get_chunk() argument
234 if (chunk->order != 0) { in hws_pool_bitmap_db_get_chunk()
248 if (chunk->offset >= size) in hws_pool_bitmap_db_get_chunk()
257 struct mlx5hws_pool_chunk *chunk) in hws_pool_bitmap_db_put_chunk() argument
267 bitmap_set(bitmap, chunk->offset, 1); in hws_pool_bitmap_db_put_chunk()
326 ret = pool->p_get_chunk(pool, chunk); in mlx5hws_pool_chunk_alloc()
[all …]
/drivers/firmware/qcom/
A Dqcom_tzmem.c263 struct qcom_tzmem_chunk *chunk; in qcom_tzmem_pool_free() local
278 if (chunk->owner == pool) in qcom_tzmem_pool_free()
376 struct qcom_tzmem_chunk *chunk __free(kfree) = kzalloc(sizeof(*chunk), in qcom_tzmem_alloc()
378 if (!chunk) in qcom_tzmem_alloc()
390 chunk->size = size; in qcom_tzmem_alloc()
391 chunk->owner = pool; in qcom_tzmem_alloc()
400 chunk = NULL; in qcom_tzmem_alloc()
415 struct qcom_tzmem_chunk *chunk; in qcom_tzmem_free() local
421 if (!chunk) { in qcom_tzmem_free()
429 chunk->size); in qcom_tzmem_free()
[all …]
/drivers/infiniband/hw/usnic/
A Dusnic_vnic.c120 res = chunk->res[j]; in usnic_vnic_dump()
280 if (chunk->cnt > 0) { in usnic_vnic_put_resources()
291 kfree(chunk->res); in usnic_vnic_put_resources()
292 kfree(chunk); in usnic_vnic_put_resources()
313 chunk->cnt = chunk->free_cnt = cnt; in usnic_vnic_alloc_res_chunk()
314 chunk->res = kcalloc(cnt, sizeof(*(chunk->res)), GFP_KERNEL); in usnic_vnic_alloc_res_chunk()
315 if (!chunk->res) in usnic_vnic_alloc_res_chunk()
329 chunk->res[i] = res; in usnic_vnic_alloc_res_chunk()
332 chunk->vnic = vnic; in usnic_vnic_alloc_res_chunk()
337 kfree(chunk->res); in usnic_vnic_alloc_res_chunk()
[all …]
A Dusnic_uiom.c74 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_put_pages()
80 kfree(chunk); in usnic_uiom_put_pages()
91 struct usnic_uiom_chunk *chunk; in usnic_uiom_get_pages() local
152 chunk = kmalloc(struct_size(chunk, page_list, in usnic_uiom_get_pages()
155 if (!chunk) { in usnic_uiom_get_pages()
161 sg_init_table(chunk->page_list, chunk->nents); in usnic_uiom_get_pages()
162 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_get_pages()
170 ret -= chunk->nents; in usnic_uiom_get_pages()
171 off += chunk->nents; in usnic_uiom_get_pages()
310 if (i == chunk->nents) { in usnic_uiom_map_sorted_intervals()
[all …]
A Dusnic_ib_verbs.c115 if (IS_ERR(chunk)) { in usnic_ib_fill_create_qp_resp()
119 PTR_ERR(chunk)); in usnic_ib_fill_create_qp_resp()
120 return PTR_ERR(chunk); in usnic_ib_fill_create_qp_resp()
124 resp.rq_cnt = chunk->cnt; in usnic_ib_fill_create_qp_resp()
129 if (IS_ERR(chunk)) { in usnic_ib_fill_create_qp_resp()
133 PTR_ERR(chunk)); in usnic_ib_fill_create_qp_resp()
134 return PTR_ERR(chunk); in usnic_ib_fill_create_qp_resp()
138 resp.wq_cnt = chunk->cnt; in usnic_ib_fill_create_qp_resp()
143 if (IS_ERR(chunk)) { in usnic_ib_fill_create_qp_resp()
147 PTR_ERR(chunk)); in usnic_ib_fill_create_qp_resp()
[all …]
/drivers/char/hw_random/
A Darm_smccc_trng.c39 unsigned int chunk, copied; in copy_from_registers() local
44 chunk = min(bytes, sizeof(long)); in copy_from_registers()
45 memcpy(buf, &res->a3, chunk); in copy_from_registers()
46 copied = chunk; in copy_from_registers()
50 chunk = min((bytes - copied), sizeof(long)); in copy_from_registers()
51 memcpy(&buf[copied], &res->a2, chunk); in copy_from_registers()
52 copied += chunk; in copy_from_registers()
56 chunk = min((bytes - copied), sizeof(long)); in copy_from_registers()
57 memcpy(&buf[copied], &res->a1, chunk); in copy_from_registers()
59 return copied + chunk; in copy_from_registers()
/drivers/media/usb/go7007/
A Dgo7007-fw.c382 size += chunk - 80; in gen_mjpeghdr_to_package()
392 chunk = 28; in gen_mjpeghdr_to_package()
398 if (chunk < 28) { in gen_mjpeghdr_to_package()
637 int i, off = 0, chunk; in gen_mpeg1hdr_to_package() local
681 chunk = 28; in gen_mpeg1hdr_to_package()
687 if (chunk < 28) { in gen_mpeg1hdr_to_package()
823 int i, off = 0, chunk; in gen_mpeg4hdr_to_package() local
849 chunk = 28; in gen_mpeg4hdr_to_package()
855 if (chunk < 28) { in gen_mpeg4hdr_to_package()
892 chunk = 28; in gen_mpeg4hdr_to_package()
[all …]
/drivers/media/usb/usbtv/
A Dusbtv.h65 #define USBTV_MAGIC_OK(chunk) ((be32_to_cpu(chunk[0]) & 0xff000000) \ argument
67 #define USBTV_FRAME_ID(chunk) ((be32_to_cpu(chunk[0]) & 0x00ff0000) >> 16) argument
68 #define USBTV_ODD(chunk) ((be32_to_cpu(chunk[0]) & 0x0000f000) >> 15) argument
69 #define USBTV_CHUNK_NO(chunk) (be32_to_cpu(chunk[0]) & 0x00000fff) argument
/drivers/net/ethernet/sfc/
A Defx_reflash.c276 size_t chunk, offset, next_update; in efx_reflash_erase_partition() local
293 chunk = align; in efx_reflash_erase_partition()
305 chunk = min_t(size_t, partition_size - offset, chunk); in efx_reflash_erase_partition()
310 type, offset, offset + chunk - 1); in efx_reflash_erase_partition()
327 size_t write_max, chunk, offset, next_update; in efx_reflash_write_partition() local
343 chunk = rounddown(write_max, align); in efx_reflash_write_partition()
345 for (offset = 0, next_update = 0; offset + chunk <= data_size; offset += chunk) { in efx_reflash_write_partition()
357 type, offset, offset + chunk - 1); in efx_reflash_write_partition()
372 chunk = roundup(remaining, align); in efx_reflash_write_partition()
373 buf = kmalloc(chunk, GFP_KERNEL); in efx_reflash_write_partition()
[all …]

Completed in 659 milliseconds

1234567