/linux-6.3-rc2/drivers/infiniband/ulp/isert/ |
A D | ib_isert.c | 806 struct iser_tx_desc *tx_desc) in __isert_create_send_desc() argument 812 tx_desc->num_sge = 1; in __isert_create_send_desc() 849 tx_desc->dma_addr = dma_addr; in isert_init_tx_hdrs() 850 tx_desc->tx_sg[0].addr = tx_desc->dma_addr; in isert_init_tx_hdrs() 855 tx_desc->tx_sg[0].addr, tx_desc->tx_sg[0].length, in isert_init_tx_hdrs() 856 tx_desc->tx_sg[0].lkey); in isert_init_tx_hdrs() 865 struct iser_tx_desc *tx_desc = &isert_cmd->tx_desc; in isert_init_send_wr() local 943 tx_desc->num_sge = 2; in isert_put_login_tx() 1504 if (tx_desc->dma_addr != 0) { in isert_unmap_tx_desc() 1508 tx_desc->dma_addr = 0; in isert_unmap_tx_desc() [all …]
|
/linux-6.3-rc2/drivers/crypto/ccp/ |
A D | ccp-dmaengine.c | 148 desc->tx_desc.cookie, cmd); in ccp_issue_next_cmd() 212 tx_desc = &desc->tx_desc; in ccp_handle_active_desc() 214 tx_desc = NULL; in ccp_handle_active_desc() 227 dma_cookie_complete(tx_desc); in ccp_handle_active_desc() 228 dma_descriptor_unmap(tx_desc); in ccp_handle_active_desc() 235 if (tx_desc) { in ccp_handle_active_desc() 238 dma_run_dependencies(tx_desc); in ccp_handle_active_desc() 300 tx_desc); in ccp_tx_submit() 341 desc->tx_desc.flags = flags; in ccp_alloc_dma_desc() 503 return &desc->tx_desc; in ccp_prep_dma_memcpy() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/mellanox/mlx4/ |
A D | en_tx.c | 242 __be32 *ptr = (__be32 *)tx_desc; in mlx4_en_stamp_wqe() 246 if (likely((void *)tx_desc + in mlx4_en_stamp_wqe() 310 if (likely((void *)tx_desc + in mlx4_en_free_tx_desc() 403 struct mlx4_en_tx_desc *tx_desc; in mlx4_en_handle_err_cqe() local 778 tx_desc->ctrl.qpn_vlan = qpn_vlan; in mlx4_en_tx_write_desc() 879 struct mlx4_en_tx_desc *tx_desc; in mlx4_en_xmit() local 957 data = &tx_desc->data; in mlx4_en_xmit() 1162 tx_desc->data.lkey = ring->mr_key; in mlx4_en_init_tx_xdp_ring_descs() 1173 struct mlx4_en_tx_desc *tx_desc; in mlx4_en_xmit_frame() local 1193 data = &tx_desc->data; in mlx4_en_xmit_frame() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/sunplus/ |
A D | spl2sw_desc.c | 37 if (!comm->tx_desc) in spl2sw_tx_descs_clean() 41 comm->tx_desc[i].cmd1 = 0; in spl2sw_tx_descs_clean() 43 comm->tx_desc[i].cmd2 = 0; in spl2sw_tx_descs_clean() 44 comm->tx_desc[i].addr1 = 0; in spl2sw_tx_descs_clean() 45 comm->tx_desc[i].addr2 = 0; in spl2sw_tx_descs_clean() 103 comm->tx_desc = NULL; in spl2sw_descs_free() 119 memset(comm->tx_desc, '\0', sizeof(struct spl2sw_mac_desc) * in spl2sw_tx_descs_init() 187 comm->tx_desc = comm->desc_base; in spl2sw_descs_alloc() 190 comm->rx_desc[0] = &comm->tx_desc[TX_DESC_NUM + MAC_GUARD_DESC_NUM]; in spl2sw_descs_alloc() 214 comm->tx_desc = NULL; in spl2sw_descs_init()
|
/linux-6.3-rc2/drivers/infiniband/ulp/iser/ |
A D | iser_initiator.c | 144 struct iser_tx_desc *tx_desc) in iser_create_send_desc() argument 152 tx_desc->iser_header.flags = ISER_VER; in iser_create_send_desc() 153 tx_desc->num_sge = 1; in iser_create_send_desc() 352 tx_desc->type = ISCSI_TX_SCSI_COMMAND; in iser_send_command() 353 tx_desc->cqe.done = iser_cmd_comp; in iser_send_command() 414 struct iser_tx_desc *tx_desc; in iser_send_data_out() local 430 if (!tx_desc) in iser_send_data_out() 433 tx_desc->type = ISCSI_TX_DATAOUT; in iser_send_data_out() 434 tx_desc->cqe.done = iser_dataout_comp; in iser_send_data_out() 444 tx_dsg = &tx_desc->tx_sg[1]; in iser_send_data_out() [all …]
|
A D | iscsi_iser.c | 196 struct iser_tx_desc *tx_desc) in iser_initialize_task_headers() argument 211 tx_desc->inv_wr.next = NULL; in iser_initialize_task_headers() 212 tx_desc->reg_wr.wr.next = NULL; in iser_initialize_task_headers() 213 tx_desc->mapped = true; in iser_initialize_task_headers() 214 tx_desc->dma_addr = dma_addr; in iser_initialize_task_headers() 215 tx_desc->tx_sg[0].addr = tx_desc->dma_addr; in iser_initialize_task_headers() 216 tx_desc->tx_sg[0].length = ISER_HEADERS_LEN; in iser_initialize_task_headers() 217 tx_desc->tx_sg[0].lkey = device->pd->local_dma_lkey; in iser_initialize_task_headers() 367 struct iser_tx_desc *tx_desc = &iser_task->desc; in iscsi_iser_cleanup_task() local 375 if (likely(tx_desc->mapped)) { in iscsi_iser_cleanup_task() [all …]
|
A D | iser_memory.c | 253 struct iser_tx_desc *tx_desc = &iser_task->desc; in iser_reg_sig_mr() local 257 struct ib_reg_wr *wr = &tx_desc->reg_wr; in iser_reg_sig_mr() 268 iser_inv_rkey(&tx_desc->inv_wr, mr, cqe, &wr->wr); in iser_reg_sig_mr() 281 wr->wr.next = &tx_desc->send_wr; in iser_reg_sig_mr() 310 struct iser_tx_desc *tx_desc = &iser_task->desc; in iser_fast_reg_mr() local 313 struct ib_reg_wr *wr = &tx_desc->reg_wr; in iser_fast_reg_mr() 317 iser_inv_rkey(&tx_desc->inv_wr, mr, cqe, &wr->wr); in iser_fast_reg_mr() 328 wr->wr.next = &tx_desc->send_wr; in iser_fast_reg_mr()
|
/linux-6.3-rc2/drivers/net/ethernet/intel/ice/ |
A D | ice_txrx_lib.c | 258 struct ice_tx_desc *tx_desc; in ice_clean_xdp_irq() local 267 tx_desc = ICE_TX_DESC(xdp_ring, idx); in ice_clean_xdp_irq() 268 if (tx_desc->cmd_type_offset_bsz & in ice_clean_xdp_irq() 314 tx_desc->cmd_type_offset_bsz = 0; in ice_clean_xdp_irq() 335 struct ice_tx_desc *tx_desc; in __ice_xmit_xdp_ring() local 358 tx_desc = ICE_TX_DESC(xdp_ring, ntu); in __ice_xmit_xdp_ring() 380 tx_desc->buf_addr = cpu_to_le64(dma); in __ice_xmit_xdp_ring() 381 tx_desc->cmd_type_offset_bsz = ice_build_ctob(0, 0, size, 0); in __ice_xmit_xdp_ring() 390 tx_desc = ICE_TX_DESC(xdp_ring, ntu); in __ice_xmit_xdp_ring() 408 tx_desc->cmd_type_offset_bsz |= in __ice_xmit_xdp_ring()
|
A D | ice_txrx.c | 38 struct ice_tx_desc *tx_desc; in ice_prgm_fdir_fltr() local 223 struct ice_tx_desc *tx_desc; in ice_clean_tx_irq() local 277 tx_desc++; in ice_clean_tx_irq() 298 tx_desc++; in ice_clean_tx_irq() 306 prefetch(tx_desc); in ice_clean_tx_irq() 1696 tx_desc++; in ice_tx_map() 1717 tx_desc++; in ice_tx_map() 2533 tx_desc->buf_addr = 0; in ice_clean_ctrl_tx_irq() 2538 tx_desc++; in ice_clean_ctrl_tx_irq() 2560 tx_desc->buf_addr = 0; in ice_clean_ctrl_tx_irq() [all …]
|
A D | ice_xsk.c | 607 struct ice_tx_desc *tx_desc; in ice_clean_xdp_irq_zc() local 617 if (tx_desc->cmd_type_offset_bsz & in ice_clean_xdp_irq_zc() 650 tx_desc->cmd_type_offset_bsz = 0; in ice_clean_xdp_irq_zc() 675 struct ice_tx_desc *tx_desc; in ice_xmit_xdp_tx_zc() local 693 tx_desc = ICE_TX_DESC(xdp_ring, ntu); in ice_xmit_xdp_tx_zc() 694 tx_desc->buf_addr = cpu_to_le64(dma); in ice_xmit_xdp_tx_zc() 893 struct ice_tx_desc *tx_desc; in ice_xmit_pkt() local 900 tx_desc->buf_addr = cpu_to_le64(dma); in ice_xmit_pkt() 917 struct ice_tx_desc *tx_desc; in ice_xmit_pkt_batch() local 926 tx_desc = ICE_TX_DESC(xdp_ring, ntu++); in ice_xmit_pkt_batch() [all …]
|
A D | ice_txrx_lib.h | 134 struct ice_tx_desc *tx_desc; in ice_set_rs_bit() local 136 tx_desc = ICE_TX_DESC(xdp_ring, rs_idx); in ice_set_rs_bit() 137 tx_desc->cmd_type_offset_bsz |= in ice_set_rs_bit()
|
/linux-6.3-rc2/drivers/net/ethernet/intel/fm10k/ |
A D | fm10k_main.c | 747 struct fm10k_tx_desc *tx_desc; in fm10k_tso() local 777 tx_desc->hdrlen = hdrlen; in fm10k_tso() 794 struct fm10k_tx_desc *tx_desc; in fm10k_tx_csum() local 871 tx_desc->hdrlen = 0; in fm10k_tx_csum() 872 tx_desc->mss = 0; in fm10k_tx_csum() 902 tx_desc->flags = desc_flags; in fm10k_tx_desc_push() 938 struct fm10k_tx_desc *tx_desc; in fm10k_tx_map() local 953 tx_desc->vlan = 0; in fm10k_tx_map() 1233 tx_desc++; in fm10k_clean_tx_irq() 1253 tx_desc++; in fm10k_clean_tx_irq() [all …]
|
/linux-6.3-rc2/drivers/dma/ptdma/ |
A D | ptdma-dmaengine.c | 81 struct dma_async_tx_descriptor *tx_desc; in pt_handle_active_desc() local 94 tx_desc = &desc->vd.tx; in pt_handle_active_desc() 97 tx_desc = NULL; in pt_handle_active_desc() 107 dma_cookie_complete(tx_desc); in pt_handle_active_desc() 108 dma_descriptor_unmap(tx_desc); in pt_handle_active_desc() 112 tx_desc = NULL; in pt_handle_active_desc() 120 if (tx_desc) { in pt_handle_active_desc() 121 dmaengine_desc_get_callback_invoke(tx_desc, NULL); in pt_handle_active_desc() 122 dma_run_dependencies(tx_desc); in pt_handle_active_desc()
|
/linux-6.3-rc2/drivers/net/ethernet/intel/ixgbe/ |
A D | ixgbe_xsk.c | 399 union ixgbe_adv_tx_desc *tx_desc = NULL; in ixgbe_xmit_zc() local 426 tx_desc = IXGBE_TX_DESC(xdp_ring, xdp_ring->next_to_use); in ixgbe_xmit_zc() 427 tx_desc->read.buffer_addr = cpu_to_le64(dma); in ixgbe_xmit_zc() 434 tx_desc->read.cmd_type_len = cpu_to_le32(cmd_type); in ixgbe_xmit_zc() 435 tx_desc->read.olinfo_status = in ixgbe_xmit_zc() 443 if (tx_desc) { in ixgbe_xmit_zc() 467 union ixgbe_adv_tx_desc *tx_desc; in ixgbe_clean_xdp_tx_irq() local 472 tx_desc = IXGBE_TX_DESC(tx_ring, ntc); in ixgbe_clean_xdp_tx_irq() 489 tx_desc++; in ixgbe_clean_xdp_tx_irq() 494 tx_desc = IXGBE_TX_DESC(tx_ring, 0); in ixgbe_clean_xdp_tx_irq() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/wangxun/libwx/ |
A D | wx_lib.c | 525 union wx_tx_desc *tx_desc; in wx_clean_tx_irq() local 568 while (tx_desc != eop_desc) { in wx_clean_tx_irq() 570 tx_desc++; in wx_clean_tx_irq() 590 tx_desc++; in wx_clean_tx_irq() 599 prefetch(tx_desc); in wx_clean_tx_irq() 717 union wx_tx_desc *tx_desc; in wx_tx_map() local 744 tx_desc->read.cmd_type_len = in wx_tx_map() 748 tx_desc++; in wx_tx_map() 767 tx_desc++; in wx_tx_map() 1641 while (tx_desc != eop_desc) { in wx_clean_tx_ring() [all …]
|
/linux-6.3-rc2/drivers/staging/rtl8712/ |
A D | rtl8712_xmit.h | 39 #define tx_cmd tx_desc 78 struct tx_desc { struct 91 struct tx_desc txdesc; argument
|
A D | rtl8712_xmit.c | 251 struct tx_desc *ptx_desc = (struct tx_desc *)pxmitbuf->pbuf; in r8712_construct_txaggr_cmd_desc() 285 struct tx_desc *ptx_desc = (struct tx_desc *)pxmitbuf->pbuf; in r8712_append_mpdu_unit() 344 (((struct tx_desc *)pxmitbuf->pbuf)->txdw0 & 0x0000ffff); in r8712_xmitframe_aggr_next() 351 (((struct tx_desc *)pxmitbuf->pbuf)->txdw0 & 0x0000ffff); in r8712_xmitframe_aggr_next() 359 struct tx_desc *ptxdesc = pxmitbuf->pbuf; in r8712_dump_aggr_xframe() 404 struct tx_desc *ptxdesc = (struct tx_desc *)pmem; in update_txdesc() 412 struct tx_desc txdesc_mp; in update_txdesc() 414 memcpy(&txdesc_mp, ptxdesc, sizeof(struct tx_desc)); in update_txdesc() 415 memset(ptxdesc, 0, sizeof(struct tx_desc)); in update_txdesc() 536 struct tx_desc *ptxdesc_mp; in update_txdesc()
|
/linux-6.3-rc2/drivers/net/ethernet/intel/i40e/ |
A D | i40e_xsk.c | 469 struct i40e_tx_desc *tx_desc; in i40e_xmit_pkt() local 475 tx_desc = I40E_TX_DESC(xdp_ring, xdp_ring->next_to_use++); in i40e_xmit_pkt() 476 tx_desc->buffer_addr = cpu_to_le64(dma); in i40e_xmit_pkt() 477 tx_desc->cmd_type_offset_bsz = build_ctob(I40E_TX_DESC_CMD_ICRC | I40E_TX_DESC_CMD_EOP, in i40e_xmit_pkt() 487 struct i40e_tx_desc *tx_desc; in i40e_xmit_pkt_batch() local 495 tx_desc = I40E_TX_DESC(xdp_ring, ntu++); in i40e_xmit_pkt_batch() 496 tx_desc->buffer_addr = cpu_to_le64(dma); in i40e_xmit_pkt_batch() 497 tx_desc->cmd_type_offset_bsz = build_ctob(I40E_TX_DESC_CMD_ICRC | in i40e_xmit_pkt_batch() 523 struct i40e_tx_desc *tx_desc; in i40e_set_rs_bit() local 525 tx_desc = I40E_TX_DESC(xdp_ring, ntu); in i40e_set_rs_bit() [all …]
|
A D | i40e_txrx.c | 91 struct i40e_tx_desc *tx_desc; in i40e_program_fdir_filter() local 143 tx_desc->cmd_type_offset_bsz = in i40e_program_fdir_filter() 152 first->next_to_watch = tx_desc; in i40e_program_fdir_filter() 937 struct i40e_tx_desc *tx_desc; in i40e_clean_tx_irq() local 959 if (tx_head == tx_desc) in i40e_clean_tx_irq() 986 while (tx_desc != eop_desc) { in i40e_clean_tx_irq() 991 tx_desc++; in i40e_clean_tx_irq() 1011 tx_desc++; in i40e_clean_tx_irq() 1019 prefetch(tx_desc); in i40e_clean_tx_irq() 3566 tx_desc++; in i40e_tx_map() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/seeq/ |
A D | sgiseeq.c | 96 struct sgiseeq_tx_desc *tx_desc; member 197 dma_sync_desc_dev(dev, &sp->tx_desc[i]); in seeq_init_ring() 230 if (sp->tx_desc[i].skb) { in seeq_purge_ring() 231 dev_kfree_skb(sp->tx_desc[i].skb); in seeq_purge_ring() 232 sp->tx_desc[i].skb = NULL; in seeq_purge_ring() 253 struct sgiseeq_tx_desc *t = gpriv->tx_desc; in sgiseeq_dump_rings() 443 td = &sp->tx_desc[i]; in kick_tx() 448 td = &sp->tx_desc[i]; in kick_tx() 480 td = &sp->tx_desc[j]; in sgiseeq_tx() 613 td = &sp->tx_desc[entry]; in sgiseeq_start_xmit() [all …]
|
/linux-6.3-rc2/drivers/net/wireless/ath/ath12k/ |
A D | dp_tx.c | 76 struct ath12k_tx_desc_info *tx_desc, in ath12k_dp_tx_release_txbuf() argument 130 struct ath12k_tx_desc_info *tx_desc; in ath12k_dp_tx() local 174 if (!tx_desc) in ath12k_dp_tx() 248 tx_desc->skb = skb; in ath12k_dp_tx() 249 tx_desc->mac_id = ar->pdev_idx; in ath12k_dp_tx() 250 ti.desc_id = tx_desc->desc_id; in ath12k_dp_tx() 530 struct ath12k_tx_desc_info *tx_desc = NULL; in ath12k_dp_tx_completion_handler() local 584 if (!tx_desc) { in ath12k_dp_tx_completion_handler() 589 msdu = tx_desc->skb; in ath12k_dp_tx_completion_handler() 590 mac_id = tx_desc->mac_id; in ath12k_dp_tx_completion_handler() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/intel/iavf/ |
A D | iavf_txrx.c | 198 struct iavf_tx_desc *tx_desc; in iavf_clean_tx_irq() local 243 while (tx_desc != eop_desc) { in iavf_clean_tx_irq() 245 tx_ring, tx_desc, tx_buf); in iavf_clean_tx_irq() 248 tx_desc++; in iavf_clean_tx_irq() 268 tx_desc++; in iavf_clean_tx_irq() 276 prefetch(tx_desc); in iavf_clean_tx_irq() 2287 struct iavf_tx_desc *tx_desc; in iavf_tx_map() local 2324 tx_desc++; in iavf_tx_map() 2345 tx_desc++; in iavf_tx_map() 2374 tx_desc->cmd_type_offset_bsz = in iavf_tx_map() [all …]
|
/linux-6.3-rc2/drivers/spi/ |
A D | spi-pxa2xx-dma.c | 136 struct dma_async_tx_descriptor *tx_desc, *rx_desc; in pxa2xx_spi_dma_prepare() local 139 tx_desc = pxa2xx_spi_dma_prepare_one(drv_data, DMA_MEM_TO_DEV, xfer); in pxa2xx_spi_dma_prepare() 140 if (!tx_desc) { in pxa2xx_spi_dma_prepare() 158 dmaengine_submit(tx_desc); in pxa2xx_spi_dma_prepare()
|
/linux-6.3-rc2/drivers/net/ethernet/oki-semi/pch_gbe/ |
A D | pch_gbe_main.c | 1067 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_tx_queue() local 1155 tx_desc->length = (tmp_skb->len); in pch_gbe_tx_queue() 1426 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_alloc_tx_buffers() local 1454 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_clean_tx() local 1468 tx_desc->gbec_status, tx_desc->dma_status); in pch_gbe_clean_tx() 1494 tx_desc->gbec_status); in pch_gbe_clean_tx() 1501 tx_desc->gbec_status); in pch_gbe_clean_tx() 1519 } else if ((tx_desc->gbec_status & in pch_gbe_clean_tx() 1543 tx_desc->gbec_status = DSC_INIT16; in pch_gbe_clean_tx() 1704 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_setup_tx_resources() local [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/hisilicon/ |
A D | hip04_eth.c | 173 struct tx_desc { struct 226 struct tx_desc *tx_desc; member 451 struct tx_desc *desc; in hip04_tx_reclaim() 461 desc = &priv->tx_desc[tx_tail]; in hip04_tx_reclaim() 511 struct tx_desc *desc = &priv->tx_desc[tx_head]; in hip04_mac_start_xmit() 542 offsetof(struct tx_desc, send_addr)); in hip04_mac_start_xmit() 859 priv->tx_desc = dma_alloc_coherent(d, in hip04_alloc_ring() 860 TX_DESC_NUM * sizeof(struct tx_desc), in hip04_alloc_ring() 862 if (!priv->tx_desc) in hip04_alloc_ring() 889 dma_free_coherent(d, TX_DESC_NUM * sizeof(struct tx_desc), in hip04_free_ring() [all …]
|