Home
last modified time | relevance | path

Searched refs:buff_size (Results 1 – 25 of 45) sorted by relevance

12

/drivers/crypto/intel/qat/qat_common/
A Dadf_pm_dbgfs_utils.c19 u32 *pm_info_regs, size_t buff_size, int table_len, in pm_scnprint_table() argument
32 wr += scnprintf(&buff[wr], buff_size - wr, "%s: %#x\n", key, in pm_scnprint_table()
41 u32 *pm_info_regs, size_t buff_size, int table_len) in adf_pm_scnprint_table_upper_keys() argument
43 return pm_scnprint_table(buff, table, pm_info_regs, buff_size, in adf_pm_scnprint_table_upper_keys()
48 u32 *pm_info_regs, size_t buff_size, int table_len) in adf_pm_scnprint_table_lower_keys() argument
50 return pm_scnprint_table(buff, table, pm_info_regs, buff_size, in adf_pm_scnprint_table_lower_keys()
A Dadf_pm_dbgfs_utils.h31 u32 *pm_info_regs, size_t buff_size, int table_len);
34 u32 *pm_info_regs, size_t buff_size, int table_len);
A Dadf_admin.h24 int adf_get_pm_info(struct adf_accel_dev *accel_dev, dma_addr_t p_state_addr, size_t buff_size);
A Dadf_admin.c455 size_t buff_size) in adf_get_pm_info() argument
470 req.init_cfg_sz = buff_size; in adf_get_pm_info()
/drivers/net/ethernet/intel/i40e/
A Di40e_adminq.c755 u16 buff_size, in i40e_asq_send_command_atomic_exec() argument
806 if (buff_size > hw->aq.asq_buf_size) { in i40e_asq_send_command_atomic_exec()
810 buff_size); in i40e_asq_send_command_atomic_exec()
848 memcpy(dma_buff->va, buff, buff_size); in i40e_asq_send_command_atomic_exec()
849 desc_on_ring->datalen = cpu_to_le16(buff_size); in i40e_asq_send_command_atomic_exec()
863 buff, buff_size); in i40e_asq_send_command_atomic_exec()
896 memcpy(buff, dma_buff->va, buff_size); in i40e_asq_send_command_atomic_exec()
959 u16 buff_size, in i40e_asq_send_command_atomic() argument
976 void *buff, /* can be NULL */ u16 buff_size, in i40e_asq_send_command() argument
1001 u16 buff_size, in i40e_asq_send_command_atomic_v2() argument
[all …]
A Di40e_prototype.h27 void *buff, /* can be NULL */ u16 buff_size,
31 void *buff, /* can be NULL */ u16 buff_size,
38 u16 buff_size,
195 void *buff, u16 buff_size, u16 *data_size,
203 u8 mib_type, void *buff, u16 buff_size,
208 u8 mib_type, void *buff, u16 buff_size,
223 void *buff, u16 buff_size,
393 u8 table_id, u32 start_index, u16 buff_size,
436 u16 buff_size, u32 track_id,
441 u16 buff_size, u8 flags,
A Di40e_common.c2881 if (buff_size > I40E_AQ_LARGE_BUF) in i40e_aq_discover_capabilities()
2978 if (buff_size == 0 || !buff) in i40e_aq_get_lldp_mib()
2993 if (buff_size > I40E_AQ_LARGE_BUF) in i40e_aq_get_lldp_mib()
3027 if (buff_size == 0 || !buff) in i40e_aq_set_lldp_mib()
3034 if (buff_size > I40E_AQ_LARGE_BUF) in i40e_aq_set_lldp_mib()
3191 if (buff_size == 0 || !buff) in i40e_aq_get_cee_dcb_config()
3326 void *buff, u16 buff_size, in i40e_aq_tx_sched_cmd() argument
3363 if (buff_size > I40E_AQ_LARGE_BUF) in i40e_aq_tx_sched_cmd()
3938 if (buff_size == 0 || !buff) in i40e_aq_debug_dump()
3947 if (buff_size > I40E_AQ_LARGE_BUF) in i40e_aq_debug_dump()
[all …]
A Di40e_nvm.c1001 u32 buff_size = 0; in i40e_nvmupd_exec_aq() local
1029 buff_size = max_t(u32, aq_data_len, le16_to_cpu(aq_desc->datalen)); in i40e_nvmupd_exec_aq()
1030 if (buff_size) { in i40e_nvmupd_exec_aq()
1051 buff_size, &cmd_details); in i40e_nvmupd_exec_aq()
/drivers/accel/ivpu/
A Divpu_ms.c82 ms->buff_size = ivpu_bo_size(ms->bo) / MS_NUM_BUFFERS; in ivpu_ms_start_ioctl()
84 ms->inactive_buff_vpu_addr = ms->bo->vpu_addr + ms->buff_size; in ivpu_ms_start_ioctl()
86 ms->inactive_buff_ptr = ivpu_bo_vaddr(ms->bo) + ms->buff_size; in ivpu_ms_start_ioctl()
89 ms->active_buff_vpu_addr, ms->buff_size); in ivpu_ms_start_ioctl()
145 ms->buff_size, &bytes_written); in copy_samples_to_user()
A Divpu_ms.h20 u64 buff_size; member
/drivers/net/ethernet/intel/iavf/
A Diavf_adminq.c620 u16 buff_size, in iavf_asq_send_command() argument
672 if (buff_size > hw->aq.asq_buf_size) { in iavf_asq_send_command()
676 buff_size); in iavf_asq_send_command()
714 memcpy(dma_buff->va, buff, buff_size); in iavf_asq_send_command()
715 desc_on_ring->datalen = cpu_to_le16(buff_size); in iavf_asq_send_command()
729 buff, buff_size); in iavf_asq_send_command()
757 memcpy(buff, dma_buff->va, buff_size); in iavf_asq_send_command()
780 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc, buff, buff_size); in iavf_asq_send_command()
A Diavf_prototype.h27 u16 buff_size,
/drivers/net/ethernet/ibm/
A Dibmveth.c157 pool->buff_size = buff_size; in ibmveth_init_buffer_pool()
251 pool->buff_size); in ibmveth_replenish_buffer_pool()
259 pool->buff_size, in ibmveth_replenish_buffer_pool()
278 len = min(pool->buff_size, len); in ibmveth_replenish_buffer_pool()
284 pool->buff_size; in ibmveth_replenish_buffer_pool()
347 pool->buff_size, in ibmveth_replenish_buffer_pool()
427 pool->buff_size, in ibmveth_free_buffer_pool()
1727 buff_size, tbl); in ibmveth_get_desired_dma()
1998 oldbuff_size = pool->buff_size; in veth_pool_store()
2058 pool->buff_size = newbuff_size; in veth_pool_store()
[all …]
A Dibmvnic.c589 int buff_size) in alloc_ltb_set() argument
603 buff_size); in alloc_ltb_set()
606 tot_size = num_buffs * buff_size; in alloc_ltb_set()
802 pool->buff_size); in replenish_rx_pool()
816 memset(dst, 0, pool->buff_size); in replenish_rx_pool()
1065 u64 buff_size; in init_rx_pools() local
1098 i, pool_size, buff_size); in init_rx_pools()
1102 rx_pool->buff_size = ALIGN(buff_size, L1_CACHE_BYTES); in init_rx_pools()
1305 u64 buff_size; in init_tx_pools() local
1347 buff_size = ALIGN(buff_size, L1_CACHE_BYTES); in init_tx_pools()
[all …]
/drivers/net/ethernet/aquantia/atlantic/hw_atl/
A Dhw_atl_a0.c128 u32 buff_size = 0U; in hw_atl_a0_hw_qos_set() local
148 buff_size = HW_ATL_A0_TXBUF_MAX; in hw_atl_a0_hw_qos_set()
150 hw_atl_tpb_tx_pkt_buff_size_per_tc_set(self, buff_size, tc); in hw_atl_a0_hw_qos_set()
152 (buff_size * in hw_atl_a0_hw_qos_set()
156 (buff_size * in hw_atl_a0_hw_qos_set()
163 buff_size = HW_ATL_A0_RXBUF_MAX; in hw_atl_a0_hw_qos_set()
165 hw_atl_rpb_rx_pkt_buff_size_per_tc_set(self, buff_size, tc); in hw_atl_a0_hw_qos_set()
167 (buff_size * in hw_atl_a0_hw_qos_set()
171 (buff_size * in hw_atl_a0_hw_qos_set()
/drivers/net/ethernet/aquantia/atlantic/
A Daq_nic.c615 unsigned int buff_size = 0U; in aq_nic_map_xdp() local
622 buff_size = frag_len; in aq_nic_map_xdp()
634 dx_buff->len = buff_size; in aq_nic_map_xdp()
639 frag_len -= buff_size; in aq_nic_map_xdp()
640 buff_offset += buff_size; in aq_nic_map_xdp()
759 unsigned int buff_size = 0U; in aq_nic_map_skb() local
769 buff_size = frag_len; in aq_nic_map_skb()
774 buff_size, in aq_nic_map_skb()
785 dx_buff->len = buff_size; in aq_nic_map_skb()
790 frag_len -= buff_size; in aq_nic_map_skb()
[all …]
/drivers/usb/renesas_usbhs/
A Dpipe.c474 u16 buff_size; in usbhsp_setup_pipebuff() local
489 buff_size = pipe_config->bufsize; in usbhsp_setup_pipebuff()
493 bufnmb_cnt = (buff_size / 64) - 1; in usbhsp_setup_pipebuff()
496 pipe_num, buff_size, bufnmb); in usbhsp_setup_pipebuff()
/drivers/net/ethernet/qlogic/qed/
A Dqed_iwarp.c1584 (u16)buf->buff_size, buf, 1); in qed_iwarp_ll2_post_rx()
1906 buf->buff_size, fpdu->mpa_frag_len, in qed_iwarp_cp_pkt()
1942 buf->buff_size, fpdu->mpa_frag_len, tcp_payload_size, in qed_iwarp_cp_pkt()
2517 buffer->piggy_buf->buff_size, in qed_iwarp_ll2_rel_tx_pkt()
2598 int num_rx_bufs, int buff_size, u8 ll2_handle) in qed_iwarp_ll2_alloc_buffers() argument
2612 buff_size, in qed_iwarp_ll2_alloc_buffers()
2621 buffer->buff_size = buff_size; in qed_iwarp_ll2_alloc_buffers()
2642 u32 buff_size; in qed_iwarp_ll2_start() local
2694 buff_size = QED_IWARP_MAX_BUF_SIZE(params->max_mtu); in qed_iwarp_ll2_start()
2697 buff_size, in qed_iwarp_ll2_start()
[all …]
A Dqed_iwarp.h35 u32 buff_size; member
/drivers/ntb/
A Dntb_transport.c209 size_t buff_size; member
805 mw->buff_size = 0; in ntb_free_mw()
847 if (mw->alloc_size > mw->buff_size) { in ntb_alloc_mw_buffer()
873 size_t xlat_size, buff_size; in ntb_set_mw() local
887 buff_size = round_up(size, xlat_align); in ntb_set_mw()
893 if (mw->buff_size) in ntb_set_mw()
898 mw->buff_size = buff_size; in ntb_set_mw()
899 mw->alloc_size = buff_size; in ntb_set_mw()
909 mw->buff_size = 0; in ntb_set_mw()
1345 mw->buff_size = 0; in ntb_transport_probe()
/drivers/infiniband/hw/hns/
A Dhns_roce_qp.c677 hr_qp->buff_size = 0; in set_wqe_buf_attr()
687 hr_qp->buff_size += buf_size; in set_wqe_buf_attr()
691 hr_qp->sge.offset = hr_qp->buff_size; in set_wqe_buf_attr()
698 hr_qp->buff_size += buf_size; in set_wqe_buf_attr()
702 hr_qp->rq.offset = hr_qp->buff_size; in set_wqe_buf_attr()
709 hr_qp->buff_size += buf_size; in set_wqe_buf_attr()
712 if (hr_qp->buff_size < 1) in set_wqe_buf_attr()
/drivers/dma/
A Dmv_xor_v2.c129 u32 buff_size; member
421 hw_descriptor->buff_size = len; in mv_xor_v2_prep_dma_memcpy()
480 hw_descriptor->buff_size = len; in mv_xor_v2_prep_dma_xor()
/drivers/firmware/efi/libstub/
A Dmem.c54 m->buff_size = m->map_size = size; in efi_get_memory_map()
/drivers/gpu/drm/bridge/cadence/
A Dcdns-mhdp8546-hdcp.c83 u8 *buff, u16 buff_size) in cdns_mhdp_secure_mailbox_recv_data() argument
88 for (i = 0; i < buff_size; i++) { in cdns_mhdp_secure_mailbox_recv_data()
/drivers/crypto/ccree/
A Dcc_buffer_mgr.c117 u32 buff_size, u32 *curr_nents, in cc_render_buff_to_mlli() argument
124 new_nents = (*curr_nents + buff_size / CC_MAX_MLLI_ENTRY_SIZE + 1); in cc_render_buff_to_mlli()
132 while (buff_size > CC_MAX_MLLI_ENTRY_SIZE) { in cc_render_buff_to_mlli()
139 buff_size -= CC_MAX_MLLI_ENTRY_SIZE; in cc_render_buff_to_mlli()
145 cc_lli_set_size(mlli_entry_p, buff_size); in cc_render_buff_to_mlli()

Completed in 84 milliseconds

12