Home
last modified time | relevance | path

Searched refs:num_chunks (Results 1 – 24 of 24) sorted by relevance

/linux-6.3-rc2/drivers/staging/media/atomisp/pci/
A Dsh_css_defs.h291 #define __ISP_MIN_INTERNAL_WIDTH(num_chunks, pipelining, mode) \ argument
292 ((num_chunks) * (pipelining) * (1 << _ISP_LOG_VECTOR_STEP(mode)) * \
301 #define __ISP_CHUNK_STRIDE_DDR(c_subsampling, num_chunks) \ argument
302 ((c_subsampling) * (num_chunks) * HIVE_ISP_DDR_WORD_BYTES)
308 num_chunks, \ argument
313 __ISP_MIN_INTERNAL_WIDTH(num_chunks, \
319 __ISP_CHUNK_STRIDE_DDR(c_subsampling, num_chunks) \
A Dia_css_acc_types.h120 u32 num_chunks; member
/linux-6.3-rc2/drivers/crypto/qat/qat_common/
A Dicp_qat_uclo.h252 unsigned short num_chunks; member
267 short num_chunks; member
507 unsigned short num_chunks; member
533 unsigned short num_chunks; member
549 unsigned short num_chunks; member
A Dqat_uclo.c112 if (suof_hdr->num_chunks <= 0x1) { in qat_uclo_check_suof_format()
455 for (i = 0; i < obj_hdr->num_chunks; i++) { in qat_uclo_find_chunk()
502 for (i = 0; i < file_hdr->num_chunks; i++) { in qat_uclo_map_chunk()
1179 suof_handle->img_table.num_simgs = suof_ptr->num_chunks - 1; in qat_uclo_map_suof()
1743 uobj_chunk_num = uobj_hdr->num_chunks; in qat_uclo_map_objs_from_mof()
1745 sobj_chunk_num = sobj_hdr->num_chunks; in qat_uclo_map_objs_from_mof()
1825 if (mof_hdr->num_chunks <= 0x1) { in qat_uclo_check_mof_format()
1869 chunks_num = mof_ptr->num_chunks; in qat_uclo_map_mof_obj()
/linux-6.3-rc2/drivers/platform/x86/intel/ifs/
A Dload.c94 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks() local
105 num_chunks = hashes_status.num_chunks; in copy_hashes_authenticate_chunks()
123 for (i = 0; i < num_chunks; i++) { in copy_hashes_authenticate_chunks()
A Difs.h145 u32 num_chunks :8; member
/linux-6.3-rc2/drivers/accel/habanalabs/common/
A Dcommand_submission.c1317 u32 cs_type_flags, num_chunks; in hl_cs_sanity_checks() local
1349 num_chunks = args->in.num_chunks_execute; in hl_cs_sanity_checks()
1360 if (!num_chunks) { in hl_cs_sanity_checks()
1376 void __user *chunks, u32 num_chunks, in hl_cs_copy_chunk_array() argument
1381 if (num_chunks > HL_MAX_JOBS_PER_CS) { in hl_cs_copy_chunk_array()
1510 for (i = 0 ; i < num_chunks ; i++) { in cs_ioctl_default()
1662 u32 num_chunks, tmp; in hl_cs_ctx_switch() local
1699 if (!num_chunks) { in hl_cs_ctx_switch()
1718 if (num_chunks) { in hl_cs_ctx_switch()
2166 void __user *chunks, u32 num_chunks, in cs_ioctl_signal_wait() argument
[all …]
/linux-6.3-rc2/drivers/gpu/drm/lima/
A Dlima_sched.c334 dt->num_chunks++; in lima_sched_build_error_task_list()
341 dt->num_chunks++; in lima_sched_build_error_task_list()
347 dt->num_chunks++; in lima_sched_build_error_task_list()
386 dt->num_chunks++; in lima_sched_build_error_task_list()
A Dlima_dump.h47 __u32 num_chunks; member
/linux-6.3-rc2/drivers/infiniband/sw/siw/
A Dsiw_mem.c372 int num_pages, num_chunks, i, rv = 0; in siw_umem_get() local
382 num_chunks = (num_pages >> CHUNK_SHIFT) + 1; in siw_umem_get()
408 kcalloc(num_chunks, sizeof(struct siw_page_chunk), GFP_KERNEL); in siw_umem_get()
/linux-6.3-rc2/drivers/crypto/marvell/octeontx/
A Dotx_cptvf.h37 u32 num_chunks; /* Number of command chunks */ member
A Dotx_cptvf_main.c179 queue->num_chunks = 0; in free_command_queues()
226 i, queue->num_chunks); in alloc_command_queues()
231 if (queue->num_chunks == 0) { in alloc_command_queues()
238 queue->num_chunks++; in alloc_command_queues()
/linux-6.3-rc2/drivers/gpu/drm/radeon/
A Dradeon_cs.c279 if (!cs->num_chunks) { in radeon_cs_parser_init()
291 p->chunks_array = kvmalloc_array(cs->num_chunks, sizeof(uint64_t), GFP_KERNEL); in radeon_cs_parser_init()
297 sizeof(uint64_t)*cs->num_chunks)) { in radeon_cs_parser_init()
301 p->nchunks = cs->num_chunks; in radeon_cs_parser_init()
/linux-6.3-rc2/drivers/net/dsa/sja1105/
A Dsja1105_spi.c44 int num_chunks; in sja1105_xfer() local
47 num_chunks = DIV_ROUND_UP(len, priv->max_xfer_len); in sja1105_xfer()
56 for (i = 0; i < num_chunks; i++) { in sja1105_xfer()
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_cs.c51 if (cs->in.num_chunks == 0) in amdgpu_cs_parser_init()
199 chunk_array = kvmalloc_array(cs->in.num_chunks, sizeof(uint64_t), in amdgpu_cs_pass1()
207 sizeof(uint64_t)*cs->in.num_chunks)) { in amdgpu_cs_pass1()
212 p->nchunks = cs->in.num_chunks; in amdgpu_cs_pass1()
/linux-6.3-rc2/drivers/mtd/nand/raw/
A Dmxc_nand.c253 u16 num_chunks = mtd->writesize / 512; in copy_spare() local
260 oob_chunk_size = (host->used_oobsize / num_chunks) & ~1; in copy_spare()
263 for (i = 0; i < num_chunks - 1; i++) in copy_spare()
273 for (i = 0; i < num_chunks - 1; i++) in copy_spare()
/linux-6.3-rc2/net/sctp/
A Dsocket.c6953 u32 num_chunks = 0; in sctp_getsockopt_peer_auth_chunks() local
6975 num_chunks = ntohs(ch->param_hdr.length) - sizeof(struct sctp_paramhdr); in sctp_getsockopt_peer_auth_chunks()
6976 if (len < num_chunks) in sctp_getsockopt_peer_auth_chunks()
6979 if (copy_to_user(to, ch->chunks, num_chunks)) in sctp_getsockopt_peer_auth_chunks()
6982 len = sizeof(struct sctp_authchunks) + num_chunks; in sctp_getsockopt_peer_auth_chunks()
6985 if (put_user(num_chunks, &p->gauth_number_of_chunks)) in sctp_getsockopt_peer_auth_chunks()
6998 u32 num_chunks = 0; in sctp_getsockopt_local_auth_chunks() local
7026 if (len < sizeof(struct sctp_authchunks) + num_chunks) in sctp_getsockopt_local_auth_chunks()
7029 if (copy_to_user(to, ch->chunks, num_chunks)) in sctp_getsockopt_local_auth_chunks()
7032 len = sizeof(struct sctp_authchunks) + num_chunks; in sctp_getsockopt_local_auth_chunks()
[all …]
/linux-6.3-rc2/include/uapi/drm/
A Damdgpu_drm.h607 __u32 num_chunks; member
A Dradeon_drm.h984 __u32 num_chunks; member
/linux-6.3-rc2/drivers/net/wireless/intel/ipw2x00/
A Dipw2200.c3794 if (le32_to_cpu(bd->u.data.num_chunks) > NUM_TFD_CHUNKS) { in ipw_queue_tx_free_tfd()
3796 le32_to_cpu(bd->u.data.num_chunks)); in ipw_queue_tx_free_tfd()
3802 for (i = 0; i < le32_to_cpu(bd->u.data.num_chunks); i++) { in ipw_queue_tx_free_tfd()
10190 tfd->u.data.num_chunks = cpu_to_le32(min((u8) (NUM_TFD_CHUNKS - 2), in ipw_tx_skb()
10193 txb->nr_frags, le32_to_cpu(tfd->u.data.num_chunks)); in ipw_tx_skb()
10194 for (i = 0; i < le32_to_cpu(tfd->u.data.num_chunks); i++) { in ipw_tx_skb()
10196 i, le32_to_cpu(tfd->u.data.num_chunks), in ipw_tx_skb()
10199 i, tfd->u.data.num_chunks, in ipw_tx_skb()
10243 le32_add_cpu(&tfd->u.data.num_chunks, 1); in ipw_tx_skb()
A Dipw2200.h488 __le32 num_chunks; member
/linux-6.3-rc2/fs/btrfs/
A Dinode.c520 atomic_t num_chunks; member
1526 if (atomic_dec_and_test(&async_cow->num_chunks)) in async_cow_free()
1542 u64 num_chunks = DIV_ROUND_UP(end - start, SZ_512K); in cow_file_range_async() local
1552 num_chunks = 1; in cow_file_range_async()
1559 ctx = kvmalloc(struct_size(ctx, chunks, num_chunks), GFP_KERNEL); in cow_file_range_async()
1575 atomic_set(&ctx->num_chunks, num_chunks); in cow_file_range_async()
1577 for (i = 0; i < num_chunks; i++) { in cow_file_range_async()
/linux-6.3-rc2/drivers/staging/media/atomisp/pci/runtime/binary/src/
A Dbinary.c107 info->output.num_chunks, info->pipeline.pipelining); in ia_css_binary_internal_res()
/linux-6.3-rc2/drivers/staging/media/ipu3/
A Dipu3-abi.h1559 u32 num_chunks; member

Completed in 134 milliseconds