| /drivers/infiniband/sw/rdmavt/ |
| A D | trace_mr.h | 144 TP_PROTO(struct ib_mr *ibmr, int sg_nents, unsigned int *sg_offset), 145 TP_ARGS(ibmr, sg_nents, sg_offset), 153 __field(uint, sg_offset) 162 __entry->sg_offset = sg_offset ? *sg_offset : 0; 172 __entry->sg_offset
|
| A D | mr.h | 35 int sg_nents, unsigned int *sg_offset);
|
| A D | mr.c | 594 int sg_nents, unsigned int *sg_offset) in rvt_map_mr_sg() argument 601 ret = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, rvt_set_page); in rvt_map_mr_sg() 606 trace_rvt_map_mr_sg(ibmr, sg_nents, sg_offset); in rvt_map_mr_sg()
|
| /drivers/infiniband/hw/mlx5/ |
| A D | mr.c | 2679 unsigned int sg_offset = 0; in mlx5_ib_map_pa_mr_sg_pi() local 2687 sg_offset = *data_sg_offset; in mlx5_ib_map_pa_mr_sg_pi() 2694 sg_offset = *meta_sg_offset; in mlx5_ib_map_pa_mr_sg_pi() 2696 sg_offset = 0; in mlx5_ib_map_pa_mr_sg_pi() 2732 sg_offset = 0; in mlx5_ib_sg_to_klms() 2736 *sg_offset_p = sg_offset; in mlx5_ib_sg_to_klms() 2748 sg_offset); in mlx5_ib_sg_to_klms() 2750 sg_offset); in mlx5_ib_sg_to_klms() 2754 sg_offset = 0; in mlx5_ib_sg_to_klms() 2757 *meta_sg_offset_p = sg_offset; in mlx5_ib_sg_to_klms() [all …]
|
| /drivers/infiniband/core/ |
| A D | rw.c | 293 struct scatterlist *sg, u32 sg_cnt, u32 sg_offset, in rdma_rw_ctx_init() argument 314 if (sg_offset < len) in rdma_rw_ctx_init() 318 sg_offset -= len; in rdma_rw_ctx_init() 328 sg_offset, remote_addr, rkey, dir); in rdma_rw_ctx_init() 330 ret = rdma_rw_init_map_wrs(ctx, qp, sg, sg_cnt, sg_offset, in rdma_rw_ctx_init() 333 ret = rdma_rw_init_single_wr(ctx, qp, sg, sg_offset, in rdma_rw_ctx_init()
|
| A D | verbs.c | 2715 unsigned int *sg_offset, unsigned int page_size) in ib_map_mr_sg() argument 2722 return mr->device->ops.map_mr_sg(mr, sg, sg_nents, sg_offset); in ib_map_mr_sg() 2753 unsigned int sg_offset = sg_offset_p ? *sg_offset_p : 0; in ib_sg_to_pages() local 2758 if (unlikely(sg_nents <= 0 || sg_offset > sg_dma_len(&sgl[0]))) in ib_sg_to_pages() 2761 mr->iova = sg_dma_address(&sgl[0]) + sg_offset; in ib_sg_to_pages() 2765 u64 dma_addr = sg_dma_address(sg) + sg_offset; in ib_sg_to_pages() 2767 unsigned int dma_len = sg_dma_len(sg) - sg_offset; in ib_sg_to_pages() 2792 sg_offset = prev_addr - sg_dma_address(sg); in ib_sg_to_pages() 2795 *sg_offset_p = sg_offset; in ib_sg_to_pages() 2796 return i || sg_offset ? i : ret; in ib_sg_to_pages() [all …]
|
| /drivers/usb/storage/ |
| A D | jumpshot.c | 163 unsigned int sg_offset = 0; in jumpshot_read_data() local 215 &sg, &sg_offset, TO_XFER_BUF); in jumpshot_read_data() 240 unsigned int sg_offset = 0; in jumpshot_write_data() local 271 &sg, &sg_offset, FROM_XFER_BUF); in jumpshot_write_data()
|
| A D | datafab.c | 145 unsigned int sg_offset = 0; in datafab_read_data() local 203 &sg, &sg_offset, TO_XFER_BUF); in datafab_read_data() 229 unsigned int sg_offset = 0; in datafab_write_data() local 266 &sg, &sg_offset, FROM_XFER_BUF); in datafab_write_data()
|
| A D | shuttle_usbat.c | 1126 unsigned int sg_offset = 0; in usbat_flash_read_data() local 1181 &sg, &sg_offset, TO_XFER_BUF); in usbat_flash_read_data() 1217 unsigned int sg_offset = 0; in usbat_flash_write_data() local 1257 &sg, &sg_offset, FROM_XFER_BUF); in usbat_flash_write_data() 1297 unsigned int sg_offset = 0; in usbat_hp8200e_handle_read10() local 1386 &sg, &sg_offset, TO_XFER_BUF); in usbat_hp8200e_handle_read10()
|
| /drivers/infiniband/hw/vmw_pvrdma/ |
| A D | pvrdma_mr.c | 319 unsigned int *sg_offset) in pvrdma_map_mr_sg() argument 327 ret = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, pvrdma_set_page); in pvrdma_map_mr_sg()
|
| A D | pvrdma_verbs.h | 375 int sg_nents, unsigned int *sg_offset);
|
| /drivers/target/iscsi/cxgbit/ |
| A D | cxgbit_ddp.c | 178 unsigned int sg_offset = sgl->offset; in cxgbit_ddp_reserve() local 205 sgl->offset = sg_offset; in cxgbit_ddp_reserve()
|
| /drivers/infiniband/hw/ocrdma/ |
| A D | ocrdma_verbs.h | 106 unsigned int *sg_offset);
|
| /drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/ |
| A D | tx.c | 245 if (!next && cmd_meta->sg_offset) { in iwl_pcie_free_tso_pages() 249 cmd_meta->sg_offset); in iwl_pcie_free_tso_pages() 309 if (meta->sg_offset) in iwl_txq_gen1_tfd_unmap() 1830 unsigned int sg_offset = 0; in iwl_pcie_get_sgt_tb_phys() local 1838 if (offset >= sg_offset && in iwl_pcie_get_sgt_tb_phys() 1839 offset + len <= sg_offset + sg_dma_len(sg)) in iwl_pcie_get_sgt_tb_phys() 1840 return sg_dma_address(sg) + offset - sg_offset; in iwl_pcie_get_sgt_tb_phys() 1842 sg_offset += sg_dma_len(sg); in iwl_pcie_get_sgt_tb_phys() 1902 cmd_meta->sg_offset = (unsigned long) sgt & ~PAGE_MASK; in iwl_pcie_prep_tso()
|
| /drivers/infiniband/hw/qedr/ |
| A D | verbs.h | 86 int sg_nents, unsigned int *sg_offset);
|
| /drivers/infiniband/hw/mlx4/ |
| A D | mr.c | 443 unsigned int *sg_offset) in mlx4_ib_map_mr_sg() argument 453 rc = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, mlx4_set_page); in mlx4_ib_map_mr_sg()
|
| /drivers/crypto/ccp/ |
| A D | ccp-ops.c | 197 struct scatterlist *sg, unsigned int sg_offset, in ccp_set_dm_area() argument 205 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_set_dm_area() 211 struct scatterlist *sg, unsigned int sg_offset, in ccp_get_dm_area() argument 216 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_get_dm_area() 223 unsigned int sg_offset, in ccp_reverse_set_dm_area() argument 229 rc = ccp_set_dm_area(wa, wa_offset, sg, sg_offset, len); in ccp_reverse_set_dm_area() 248 unsigned int sg_offset, in ccp_reverse_get_dm_area() argument 263 ccp_get_dm_area(wa, wa_offset, sg, sg_offset, len); in ccp_reverse_get_dm_area()
|
| /drivers/scsi/ |
| A D | libiscsi_tcp.c | 94 segment->sg_offset = offset; in iscsi_tcp_segment_init_sg() 143 segment->data = segment->sg_mapped + sg->offset + segment->sg_offset; in iscsi_tcp_segment_map() 210 segment->sg_offset + in iscsi_tcp_segment_done()
|
| /drivers/infiniband/sw/rxe/ |
| A D | rxe_mr.c | 232 int sg_nents, unsigned int *sg_offset) in rxe_map_mr_sg() argument 242 return ib_sg_to_pages(ibmr, sgl, sg_nents, sg_offset, rxe_set_page); in rxe_map_mr_sg()
|
| A D | rxe_loc.h | 72 int sg_nents, unsigned int *sg_offset);
|
| /drivers/infiniband/hw/bnxt_re/ |
| A D | ib_verbs.h | 252 unsigned int *sg_offset);
|
| /drivers/nvme/target/ |
| A D | tcp.c | 356 u32 length, offset, sg_offset; in nvmet_tcp_build_pdu_iovec() local 363 sg_offset = offset % PAGE_SIZE; in nvmet_tcp_build_pdu_iovec() 367 u32 iov_len = min_t(u32, length, sg->length - sg_offset); in nvmet_tcp_build_pdu_iovec() 370 sg->offset + sg_offset); in nvmet_tcp_build_pdu_iovec() 375 sg_offset = 0; in nvmet_tcp_build_pdu_iovec()
|
| /drivers/infiniband/hw/cxgb4/ |
| A D | mem.c | 695 unsigned int *sg_offset) in c4iw_map_mr_sg() argument 701 return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, c4iw_set_page); in c4iw_map_mr_sg()
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_verbs.h | 473 unsigned int *sg_offset);
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_mr.c | 448 unsigned int sg_offset = sg_offset_p ? *sg_offset_p : 0; in hns_roce_map_mr_sg() local 455 if (!IS_ALIGNED(sg_offset, HNS_ROCE_FRMR_ALIGN_SIZE) || in hns_roce_map_mr_sg()
|