Home
last modified time | relevance | path

Searched refs:chunks (Results 1 – 25 of 51) sorted by relevance

123

/drivers/gpu/drm/radeon/
A Dradeon_cs.c301 if (p->chunks == NULL) { in radeon_cs_parser_init()
316 p->chunk_relocs = &p->chunks[i]; in radeon_cs_parser_init()
319 p->chunk_ib = &p->chunks[i]; in radeon_cs_parser_init()
331 p->chunk_flags = &p->chunks[i]; in radeon_cs_parser_init()
337 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
339 p->chunks[i].user_ptr = cdata; in radeon_cs_parser_init()
358 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
359 ring = p->chunks[i].kdata[1]; in radeon_cs_parser_init()
360 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
458 kvfree(parser->chunks[i].kdata); in radeon_cs_parser_fini()
[all …]
/drivers/net/ethernet/netronome/nfp/nfpcore/
A Dnfp_nsp.c505 } *chunks; in nfp_nsp_command_buf_dma_sg() local
517 chunks = kcalloc(nseg, sizeof(*chunks), GFP_KERNEL); in nfp_nsp_command_buf_dma_sg()
518 if (!chunks) in nfp_nsp_command_buf_dma_sg()
526 chunks[i].chunk = kmalloc(chunk_size, in nfp_nsp_command_buf_dma_sg()
528 if (!chunks[i].chunk) in nfp_nsp_command_buf_dma_sg()
540 off += chunks[i].len; in nfp_nsp_command_buf_dma_sg()
548 addr = dma_map_single(dev, chunks[i].chunk, chunks[i].len, in nfp_nsp_command_buf_dma_sg()
550 chunks[i].dma_addr = addr; in nfp_nsp_command_buf_dma_sg()
609 dma_unmap_single(dev, chunks[i].dma_addr, chunks[i].len, in nfp_nsp_command_buf_dma_sg()
614 kfree(chunks[i].chunk); in nfp_nsp_command_buf_dma_sg()
[all …]
/drivers/comedi/drivers/ni_routing/tools/
A Dconvert_csv_to_c.py228 chunks = [ self.output_file_top,
244 chunks.append('\t&{},'.format(dev_table_name))
273 chunks.append('\tNULL,') # terminate list
274 chunks.append('};')
275 return '\n'.join(chunks)
416 chunks = [ self.output_file_top,
432 chunks.append('\t&{},'.format(fam_table_name))
462 chunks.append('\tNULL,') # terminate list
463 chunks.append('};')
464 return '\n'.join(chunks)
/drivers/net/mctp/
A Dmctp-serial.c534 u8 chunks[MAX_CHUNKS]; member
567 .chunks = { 3, 1, 1, 0},
572 .chunks = { 3, 1, 1, 0},
577 .chunks = { 1, 2, 0},
582 .chunks = { 1, 1, 1, 0},
587 .chunks = { 1, 1, 1, 1, 0},
597 .chunks = { 1, 0 },
602 .chunks = { 1, 0 },
607 .chunks = { 3, 0 },
612 .chunks = { 7, 0 },
[all …]
/drivers/infiniband/hw/usnic/
A Dusnic_vnic.c44 struct usnic_vnic_res_chunk chunks[USNIC_VNIC_RES_TYPE_MAX]; member
117 for (i = 0; i < ARRAY_SIZE(vnic->chunks); i++) { in usnic_vnic_dump()
118 chunk = &vnic->chunks[i]; in usnic_vnic_dump()
222 return vnic->chunks[type].cnt; in usnic_vnic_res_cnt()
228 return vnic->chunks[type].free_cnt; in usnic_vnic_res_free_cnt()
254 src = &vnic->chunks[type]; in usnic_vnic_get_resources()
286 vnic->chunks[res->type].free_cnt++; in usnic_vnic_put_resources()
382 &vnic->chunks[res_type]); in usnic_vnic_discover_resources()
391 usnic_vnic_free_res_chunk(&vnic->chunks[res_type]); in usnic_vnic_discover_resources()
427 usnic_vnic_free_res_chunk(&vnic->chunks[res_type]); in usnic_vnic_release_resources()
/drivers/gpu/drm/panthor/
A Dpanthor_heap.c54 struct list_head chunks; member
168 if (initial_chunk && !list_empty(&heap->chunks)) { in panthor_alloc_heap_chunk()
172 prev_chunk = list_first_entry(&heap->chunks, in panthor_alloc_heap_chunk()
184 list_add(&chunk->node, &heap->chunks); in panthor_alloc_heap_chunk()
206 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) in panthor_free_heap_chunks()
312 INIT_LIST_HEAD(&heap->chunks); in panthor_heap_create()
321 first_chunk = list_first_entry(&heap->chunks, in panthor_heap_create()
392 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) { in panthor_heap_return_chunk()
479 chunk = list_first_entry(&heap->chunks, in panthor_heap_grow()
/drivers/net/ethernet/intel/idpf/
A Didpf_virtchnl.c1205 chunk = &chunks->chunks[num_chunks]; in idpf_vport_get_q_reg()
1304 chunks = &vc_aq->chunks; in idpf_queue_reg_init()
1307 chunks = &vport_params->chunks; in idpf_queue_reg_init()
1944 buf_sz = struct_size(eq, chunks.chunks, num_chunks); in idpf_send_ena_dis_queues_msg()
2190 chunks = &vport_config->req_qs_chunks->chunks; in idpf_send_delete_queues_msg()
2193 chunks = &vport_params->chunks; in idpf_send_delete_queues_msg()
2206 idpf_convert_reg_to_queue_chunks(eq->chunks.chunks, chunks->chunks, in idpf_send_delete_queues_msg()
2289 size = struct_size(vc_msg, chunks.chunks, in idpf_send_add_queues_msg()
3438 chunk = &chunks->chunks[num_chunks]; in idpf_vport_get_queue_ids()
3554 chunks = &vc_aq->chunks; in idpf_vport_queue_ids_init()
[all …]
A Dvirtchnl2.h599 struct virtchnl2_queue_reg_chunk chunks[] __counted_by_le(num_chunks);
692 struct virtchnl2_queue_reg_chunks chunks; member
884 struct virtchnl2_queue_reg_chunks chunks; member
1204 struct virtchnl2_queue_chunk chunks[] __counted_by_le(num_chunks);
1225 struct virtchnl2_queue_chunks chunks; member
/drivers/infiniband/ulp/rtrs/
A DREADME28 session. A session is associated with a set of memory chunks reserved on the
36 chunks reserved for him on the server side. Their number, size and addresses
45 which of the memory chunks has been accessed and at which offset the message
80 the server (number of memory chunks which are going to be allocated for that
122 1. When processing a write request client selects one of the memory chunks
139 1. When processing a write request client selects one of the memory chunks
144 using the IMM field, Server invalidate rkey associated to the memory chunks
162 1. When processing a read request client selects one of the memory chunks
181 1. When processing a read request client selects one of the memory chunks
186 Server invalidate rkey associated to the memory chunks first, when it finishes,
A Drtrs-srv.c604 int nr, nr_sgt, chunks; in map_cont_bufs() local
610 srv->queue_depth - chunks); in map_cont_bufs()
617 sg_set_page(s, srv->chunks[chunks + i], in map_cont_bufs()
1051 data = page_address(srv->chunks[buf_id]); in process_read()
1104 data = page_address(srv->chunks[buf_id]); in process_write()
1383 kfree(srv->chunks); in free_srv()
1429 srv->chunks = kcalloc(srv->queue_depth, sizeof(*srv->chunks), in get_or_create_srv()
1431 if (!srv->chunks) in get_or_create_srv()
1435 srv->chunks[i] = alloc_pages(GFP_KERNEL, in get_or_create_srv()
1437 if (!srv->chunks[i]) in get_or_create_srv()
[all …]
/drivers/infiniband/hw/efa/
A Defa_verbs.c116 struct pbl_chunk *chunks; member
1384 sizeof(*chunk_list->chunks), in pbl_chunk_list_create()
1386 if (!chunk_list->chunks) in pbl_chunk_list_create()
1396 if (!chunk_list->chunks[i].buf) in pbl_chunk_list_create()
1408 cur_chunk_buf = chunk_list->chunks[0].buf; in pbl_chunk_list_create()
1424 chunk_list->chunks[i].buf, in pbl_chunk_list_create()
1425 chunk_list->chunks[i].length, in pbl_chunk_list_create()
1460 kfree(chunk_list->chunks[i].buf); in pbl_chunk_list_create()
1462 kfree(chunk_list->chunks); in pbl_chunk_list_create()
1474 kfree(chunk_list->chunks[i].buf); in pbl_chunk_list_destroy()
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_cs.c204 if (!p->chunks) { in amdgpu_cs_pass1()
221 p->chunks[i].chunk_id = user_chunk.chunk_id; in amdgpu_cs_pass1()
224 size = p->chunks[i].length_dw; in amdgpu_cs_pass1()
229 if (p->chunks[i].kdata == NULL) { in amdgpu_cs_pass1()
242 switch (p->chunks[i].chunk_id) { in amdgpu_cs_pass1()
340 kvfree(p->chunks[i].kdata); in amdgpu_cs_pass1()
341 kvfree(p->chunks); in amdgpu_cs_pass1()
342 p->chunks = NULL; in amdgpu_cs_pass1()
624 chunk = &p->chunks[i]; in amdgpu_cs_pass2()
1431 kvfree(parser->chunks[i].kdata); in amdgpu_cs_parser_fini()
[all …]
A Damdgpu_cs.h56 struct amdgpu_cs_chunk *chunks; member
/drivers/md/
A Dmd-bitmap.c173 unsigned long chunks; member
1028 bytes = DIV_ROUND_UP(chunks, 8); in md_bitmap_storage_alloc()
1333 unsigned long chunks = bitmap->counts.chunks; in md_bitmap_init_from_disk() local
1345 for (i = 0; i < chunks ; i++) { in md_bitmap_init_from_disk()
1415 for (i = 0; i < chunks; i++) { in md_bitmap_init_from_disk()
1441 bit_cnt, chunks); in md_bitmap_init_from_disk()
2404 unsigned long chunks; in __bitmap_resize() local
2440 bytes = DIV_ROUND_UP(chunks, 8); in __bitmap_resize()
2488 bitmap->counts.chunks = chunks; in __bitmap_resize()
2493 chunks << chunkshift); in __bitmap_resize()
[all …]
/drivers/net/wireless/ti/wlcore/
A Dboot.c237 u32 chunks, addr, len; in wlcore_boot_upload_firmware() local
242 chunks = be32_to_cpup((__be32 *) fw); in wlcore_boot_upload_firmware()
245 wl1271_debug(DEBUG_BOOT, "firmware chunks to be uploaded: %u", chunks); in wlcore_boot_upload_firmware()
247 while (chunks--) { in wlcore_boot_upload_firmware()
258 chunks, addr, len); in wlcore_boot_upload_firmware()
/drivers/net/wireless/intel/iwlwifi/pcie/
A Dctxt-info-v2.c365 len0 = pnvm_data->chunks[0].len; in iwl_pcie_load_payloads_contig()
366 len1 = pnvm_data->chunks[1].len; in iwl_pcie_load_payloads_contig()
381 memcpy(dram->block, pnvm_data->chunks[0].data, len0); in iwl_pcie_load_payloads_contig()
382 memcpy((u8 *)dram->block + len0, pnvm_data->chunks[1].data, len1); in iwl_pcie_load_payloads_contig()
415 len = pnvm_data->chunks[i].len; in iwl_pcie_load_payloads_segments()
416 data = pnvm_data->chunks[i].data; in iwl_pcie_load_payloads_segments()
/drivers/gpu/drm/sprd/
A Dsprd_dsi.c460 u32 chunks = 0; in sprd_dsi_dpi_video() local
539 chunks = vm->hactive / video_size; in sprd_dsi_dpi_video()
542 if (total_bytes >= (bytes_per_chunk * chunks)) { in sprd_dsi_dpi_video()
544 bytes_per_chunk * chunks; in sprd_dsi_dpi_video()
550 if (bytes_left > (pkt_header * chunks)) { in sprd_dsi_dpi_video()
552 pkt_header * chunks) / chunks; in sprd_dsi_dpi_video()
560 chunks = 1; in sprd_dsi_dpi_video()
571 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 16, chunks); in sprd_dsi_dpi_video()
/drivers/virt/vboxguest/
A Dvboxguest_core.c361 u32 i, chunks; in vbg_balloon_work() local
389 chunks = req->balloon_chunks; in vbg_balloon_work()
390 if (chunks > gdev->mem_balloon.max_chunks) { in vbg_balloon_work()
392 __func__, chunks, gdev->mem_balloon.max_chunks); in vbg_balloon_work()
396 if (chunks > gdev->mem_balloon.chunks) { in vbg_balloon_work()
398 for (i = gdev->mem_balloon.chunks; i < chunks; i++) { in vbg_balloon_work()
403 gdev->mem_balloon.chunks++; in vbg_balloon_work()
407 for (i = gdev->mem_balloon.chunks; i-- > chunks;) { in vbg_balloon_work()
412 gdev->mem_balloon.chunks--; in vbg_balloon_work()
1667 balloon_info->u.out.balloon_chunks = gdev->mem_balloon.chunks; in vbg_ioctl_check_balloon()
/drivers/dma/sh/
A Dshdma-base.c97 if (chunk->chunks == 1) { in shdma_tx_submit()
356 if (desc->mark == DESC_COMPLETED && desc->chunks == 1) { in __ld_cleanup()
372 BUG_ON(desc->chunks != 1); in __ld_cleanup()
567 int chunks = 0; in shdma_prep_sg() local
572 chunks += DIV_ROUND_UP(sg_dma_len(sg), schan->max_xfer_len); in shdma_prep_sg()
612 new->chunks = 1; in shdma_prep_sg()
614 new->chunks = chunks--; in shdma_prep_sg()
A Drcar-dmac.c79 struct list_head chunks; member
107 DECLARE_FLEX_ARRAY(struct rcar_dmac_xfer_chunk, chunks);
115 ((PAGE_SIZE - offsetof(struct rcar_dmac_desc_page, chunks)) / \
389 list_first_entry(&desc->chunks, in rcar_dmac_chan_start_xfer()
513 desc->running = list_first_entry(&desc->chunks, in rcar_dmac_tx_submit()
546 INIT_LIST_HEAD(&desc->chunks); in rcar_dmac_desc_alloc()
673 struct rcar_dmac_xfer_chunk *chunk = &page->chunks[i]; in rcar_dmac_xfer_chunk_alloc()
771 list_for_each_entry(chunk, &desc->chunks, node) { in rcar_dmac_fill_hwdesc()
1025 list_add_tail(&chunk->node, &desc->chunks); in rcar_dmac_chan_prep_sg()
1409 list_for_each_entry_reverse(chunk, &desc->chunks, node) { in rcar_dmac_chan_get_residue()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_dmem.c83 struct list_head chunks; member
271 list_add(&chunk->list, &drm->dmem->chunks); in nouveau_dmem_chunk_alloc()
342 list_for_each_entry(chunk, &drm->dmem->chunks, list) { in nouveau_dmem_resume()
359 list_for_each_entry(chunk, &drm->dmem->chunks, list) in nouveau_dmem_suspend()
420 list_for_each_entry_safe(chunk, tmp, &drm->dmem->chunks, list) { in nouveau_dmem_fini()
596 INIT_LIST_HEAD(&drm->dmem->chunks); in nouveau_dmem_init()
/drivers/mtd/nand/raw/
A Ddavinci_nand.c647 int chunks = mtd->writesize / 512; in davinci_nand_attach_chip() local
649 if (!chunks || mtd->oobsize < 16) { in davinci_nand_attach_chip()
687 if (chunks == 1) { in davinci_nand_attach_chip()
690 } else if (chunks == 4 || chunks == 8) { in davinci_nand_attach_chip()
/drivers/dma/ioat/
A Ddma.c378 int i, chunks; in ioat_alloc_ring() local
385 chunks = (total_descs * IOAT_DESC_SZ) / IOAT_CHUNK_SIZE; in ioat_alloc_ring()
386 ioat_chan->desc_chunks = chunks; in ioat_alloc_ring()
388 for (i = 0; i < chunks; i++) { in ioat_alloc_ring()
448 if (chunks == 1) in ioat_alloc_ring()
/drivers/i3c/master/mipi-i3c-hci/
A Ddma.c616 unsigned int chunks; in hci_dma_process_ibi() local
633 chunks = FIELD_GET(IBI_CHUNKS, ibi_status); in hci_dma_process_ibi()
634 ibi_chunks += chunks; in hci_dma_process_ibi()
636 ibi_size += chunks * rh->ibi_chunk_sz; in hci_dma_process_ibi()
/drivers/accel/habanalabs/common/
A Dcommand_submission.c1409 void __user *chunks, u32 num_chunks, in hl_cs_copy_chunk_array() argument
1435 if (copy_from_user(*cs_chunk_array, chunks, size_to_copy)) { in hl_cs_copy_chunk_array()
1510 rc = hl_cs_copy_chunk_array(hdev, &cs_chunk_array, chunks, num_chunks, in cs_ioctl_default()
1694 void __user *chunks; in hl_cs_ctx_switch() local
1729 chunks = (void __user *) (uintptr_t) args->in.chunks_restore; in hl_cs_ctx_switch()
1737 rc = cs_ioctl_default(hpriv, chunks, num_chunks, in hl_cs_ctx_switch()
2199 void __user *chunks, u32 num_chunks, in cs_ioctl_signal_wait() argument
2563 void __user *chunks; in hl_cs_ioctl() local
2579 chunks = (void __user *) (uintptr_t) args->in.chunks_execute; in hl_cs_ioctl()
2596 rc = cs_ioctl_signal_wait(hpriv, cs_type, chunks, num_chunks, in hl_cs_ioctl()
[all …]

Completed in 90 milliseconds

123