| /drivers/infiniband/hw/mlx5/ |
| A D | mem.c | 43 struct ib_block_iter biter; in mlx5_ib_populate_pas() local 45 rdma_umem_for_each_dma_block (umem, &biter, page_size) { in mlx5_ib_populate_pas() 46 *pas = cpu_to_be64(rdma_block_iter_dma_address(&biter) | in mlx5_ib_populate_pas()
|
| A D | umr.c | 695 struct ib_block_iter biter; in _mlx5r_umr_update_mr_pas() local 735 rdma_umem_for_each_dma_block(mr->umem, &biter, BIT(mr->page_shift)) { in _mlx5r_umr_update_mr_pas() 763 cur_ksm->va = cpu_to_be64(rdma_block_iter_dma_address(&biter)); in _mlx5r_umr_update_mr_pas() 774 cpu_to_be64(rdma_block_iter_dma_address(&biter) | in _mlx5r_umr_update_mr_pas()
|
| /drivers/dma/ |
| A D | fsl-edma-trace.h | 76 __field(u16, biter) 90 __entry->biter = fsl_edma_get_tcd_to_cpu(chan, tcd, biter); 114 __entry->biter)
|
| A D | fsl-edma-common.c | 355 len += nbytes * fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, biter); in fsl_edma_desc_residue() 378 size = nbytes * fsl_edma_get_tcd_to_cpu(fsl_chan, edesc->tcd[i].vtcd, biter); in fsl_edma_desc_residue() 448 edma_cp_tcd_to_reg(fsl_chan, tcd, biter); in fsl_edma_set_tcd_regs() 479 u16 biter, u16 doff, dma_addr_t dlast_sga, bool major_int, in fsl_edma_fill_tcd() argument 536 fsl_edma_set_tcd_to_le(fsl_chan, tcd, EDMA_TCD_BITER_BITER(biter), biter); in fsl_edma_fill_tcd()
|
| A D | mpc512x_dma.c | 165 u32 biter:9; /* Beginning "major" iteration count */ member 663 tcd->biter = 1; in mpc_dma_prep_memcpy() 770 tcd->biter = 1; in mpc_dma_prep_slave_sg() 784 tcd->biter = iter & 0x1ff; in mpc_dma_prep_slave_sg() 786 tcd->citer = tcd->biter; in mpc_dma_prep_slave_sg()
|
| A D | fsl-edma-common.h | 102 __le16 biter; member 116 __le16 biter; member
|
| /drivers/infiniband/core/ |
| A D | verbs.c | 3104 biter->__sg = sglist; in __rdma_block_iter_start() 3105 biter->__sg_nents = nents; in __rdma_block_iter_start() 3117 if (!biter->__sg_nents || !biter->__sg) in __rdma_block_iter_next() 3120 biter->__dma_addr = sg_dma_address(biter->__sg) + biter->__sg_advance; in __rdma_block_iter_next() 3121 block_offset = biter->__dma_addr & (BIT_ULL(biter->__pg_bit) - 1); in __rdma_block_iter_next() 3124 while (biter->__sg_nents && biter->__sg && in __rdma_block_iter_next() 3125 sg_dma_len(biter->__sg) - biter->__sg_advance <= delta) { in __rdma_block_iter_next() 3126 delta -= sg_dma_len(biter->__sg) - biter->__sg_advance; in __rdma_block_iter_next() 3127 biter->__sg_advance = 0; in __rdma_block_iter_next() 3128 biter->__sg = sg_next(biter->__sg); in __rdma_block_iter_next() [all …]
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_alloc.c | 159 struct ib_block_iter biter; in hns_roce_get_umem_bufs() local 163 rdma_umem_for_each_dma_block(umem, &biter, 1 << page_shift) { in hns_roce_get_umem_bufs() 164 bufs[total++] = rdma_block_iter_dma_address(&biter); in hns_roce_get_umem_bufs()
|
| /drivers/infiniband/hw/vmw_pvrdma/ |
| A D | pvrdma_misc.c | 185 struct ib_block_iter biter; in pvrdma_page_dir_insert_umem() local 192 rdma_umem_for_each_dma_block (umem, &biter, PAGE_SIZE) { in pvrdma_page_dir_insert_umem() 194 pdir, i, rdma_block_iter_dma_address(&biter)); in pvrdma_page_dir_insert_umem()
|
| /drivers/infiniband/hw/mlx4/ |
| A D | mr.c | 93 struct ib_block_iter biter; in mlx4_ib_umem_write_mtt() local 97 rdma_umem_for_each_dma_block(umem, &biter, BIT(mtt->page_shift)) { in mlx4_ib_umem_write_mtt() 98 addr = rdma_block_iter_dma_address(&biter); in mlx4_ib_umem_write_mtt()
|
| /drivers/infiniband/hw/cxgb4/ |
| A D | mem.c | 498 struct ib_block_iter biter; in c4iw_reg_user_mr() local 552 rdma_umem_for_each_dma_block(mhp->umem, &biter, 1 << shift) { in c4iw_reg_user_mr() 553 pages[i++] = cpu_to_be64(rdma_block_iter_dma_address(&biter)); in c4iw_reg_user_mr()
|
| /drivers/infiniband/hw/bnxt_re/ |
| A D | qplib_res.c | 94 struct ib_block_iter biter; in bnxt_qplib_fill_user_dma_pages() local 97 rdma_umem_for_each_dma_block(sginfo->umem, &biter, sginfo->pgsize) { in bnxt_qplib_fill_user_dma_pages() 98 pbl->pg_map_arr[i] = rdma_block_iter_dma_address(&biter); in bnxt_qplib_fill_user_dma_pages()
|
| /drivers/infiniband/hw/efa/ |
| A D | efa_verbs.c | 1326 struct ib_block_iter biter; in umem_to_page_list() local 1332 rdma_umem_for_each_dma_block(umem, &biter, BIT(hp_shift)) in umem_to_page_list() 1333 page_list[hp_idx++] = rdma_block_iter_dma_address(&biter); in umem_to_page_list() 1375 struct ib_block_iter biter; in pbl_chunk_list_create() local 1409 rdma_for_each_block(pages_sgl, &biter, sg_dma_cnt, in pbl_chunk_list_create() 1412 rdma_block_iter_dma_address(&biter); in pbl_chunk_list_create()
|
| /drivers/infiniband/hw/mthca/ |
| A D | mthca_provider.c | 832 struct ib_block_iter biter; in mthca_reg_user_mr() local 884 rdma_umem_for_each_dma_block(mr->umem, &biter, PAGE_SIZE) { in mthca_reg_user_mr() 885 pages[i++] = rdma_block_iter_dma_address(&biter); in mthca_reg_user_mr()
|
| /drivers/infiniband/hw/mana/ |
| A D | main.c | 379 struct ib_block_iter biter; in mana_ib_gd_create_dma_region() local 425 rdma_umem_for_each_dma_block(umem, &biter, page_sz) { in mana_ib_gd_create_dma_region() 428 page_addr_list[tail++] = rdma_block_iter_dma_address(&biter); in mana_ib_gd_create_dma_region()
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_verbs.c | 588 struct ib_block_iter biter; in erdma_fill_bottom_mtt() local 594 rdma_umem_for_each_dma_block(mem->umem, &biter, mem->page_size) in erdma_fill_bottom_mtt() 595 mtt->buf[idx++] = rdma_block_iter_dma_address(&biter); in erdma_fill_bottom_mtt()
|
| /drivers/infiniband/hw/ocrdma/ |
| A D | ocrdma_verbs.c | 819 struct ib_block_iter biter; in build_user_pbes() local 830 rdma_umem_for_each_dma_block (mr->umem, &biter, PAGE_SIZE) { in build_user_pbes() 832 pg_addr = rdma_block_iter_dma_address(&biter); in build_user_pbes()
|
| /drivers/infiniband/hw/qedr/ |
| A D | verbs.c | 627 struct ib_block_iter biter; in qedr_populate_pbls() local 649 rdma_umem_for_each_dma_block (umem, &biter, BIT(pg_shift)) { in qedr_populate_pbls() 650 u64 pg_addr = rdma_block_iter_dma_address(&biter); in qedr_populate_pbls()
|
| /drivers/infiniband/hw/irdma/ |
| A D | verbs.c | 2340 struct ib_block_iter biter; in irdma_copy_user_pgaddrs() local 2349 rdma_umem_for_each_dma_block(region, &biter, iwmr->page_size) { in irdma_copy_user_pgaddrs() 2350 *pbl = rdma_block_iter_dma_address(&biter); in irdma_copy_user_pgaddrs()
|