Home
last modified time | relevance | path

Searched refs:pg_size (Results 1 – 16 of 16) sorted by relevance

/drivers/iommu/amd/
A Dio_pgtable_v2.c64 static u64 set_pte_attr(u64 paddr, u64 pg_size, int prot) in set_pte_attr() argument
76 if (pg_size == IOMMU_PAGE_SIZE_1G || pg_size == IOMMU_PAGE_SIZE_2M) in set_pte_attr()
93 static inline int page_size_to_level(u64 pg_size) in page_size_to_level() argument
95 if (pg_size == IOMMU_PAGE_SIZE_1G) in page_size_to_level()
97 if (pg_size == IOMMU_PAGE_SIZE_2M) in page_size_to_level()
132 unsigned long pg_size, gfp_t gfp, bool *updated) in v2_alloc_pte() argument
138 end_level = page_size_to_level(pg_size); in v2_alloc_pte()
181 if (pg_size == IOMMU_PAGE_SIZE_1G) in v2_alloc_pte()
183 else if (pg_size == IOMMU_PAGE_SIZE_2M) in v2_alloc_pte()
A Dio_pgtable.c33 unsigned long pte_mask, pg_size, cnt; in first_pte_l7() local
36 pg_size = PTE_PAGE_SIZE(*pte); in first_pte_l7()
37 cnt = PAGE_SIZE_PTE_COUNT(pg_size); in first_pte_l7()
42 *page_size = pg_size; in first_pte_l7()
/drivers/infiniband/hw/bnxt_re/
A Dqplib_res.h158 u32 pg_size; member
361 u8 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_4K; in bnxt_qplib_base_pg_size() local
365 switch (pbl->pg_size) { in bnxt_qplib_base_pg_size()
367 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_4K; in bnxt_qplib_base_pg_size()
370 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_8K; in bnxt_qplib_base_pg_size()
373 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_64K; in bnxt_qplib_base_pg_size()
376 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_2M; in bnxt_qplib_base_pg_size()
379 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_8M; in bnxt_qplib_base_pg_size()
382 pg_size = BNXT_QPLIB_HWRM_PG_SIZE_1G; in bnxt_qplib_base_pg_size()
388 return pg_size; in bnxt_qplib_base_pg_size()
A Dqplib_res.c72 dma_free_coherent(&pdev->dev, pbl->pg_size, in __free_pbl()
88 pbl->pg_size = 0; in __free_pbl()
132 pbl->pg_size = sginfo->pgsize; in __alloc_pbl()
137 pbl->pg_size, in __alloc_pbl()
196 pg_size = hwq_attr->sginfo->pgsize; in bnxt_qplib_alloc_init_hwq()
204 aux_pages = (aux_slots * aux_size) / pg_size; in bnxt_qplib_alloc_init_hwq()
205 if ((aux_slots * aux_size) % pg_size) in bnxt_qplib_alloc_init_hwq()
211 npages = (depth * stride) / pg_size + aux_pages; in bnxt_qplib_alloc_init_hwq()
212 if ((depth * stride) % pg_size) in bnxt_qplib_alloc_init_hwq()
244 sginfo.pgsize = npde * pg_size; in bnxt_qplib_alloc_init_hwq()
[all …]
A Dqplib_sp.c627 u32 pg_size; in bnxt_qplib_reg_mr() local
664 pg_size = PAGE_SIZE; in bnxt_qplib_reg_mr()
669 pg_size = buf_pg_size ? buf_pg_size : PAGE_SIZE; in bnxt_qplib_reg_mr()
671 ((ilog2(pg_size) << in bnxt_qplib_reg_mr()
A Dib_verbs.c4327 resp.pg_size = PAGE_SIZE; in bnxt_re_alloc_ucontext()
/drivers/net/ethernet/chelsio/inline_crypto/chtls/
A Dchtls_io.c1118 int pg_size = PAGE_SIZE; in chtls_sendmsg() local
1123 pg_size = page_size(page); in chtls_sendmsg()
1124 if (off < pg_size && in chtls_sendmsg()
1134 if (page && off == pg_size) { in chtls_sendmsg()
1137 pg_size = PAGE_SIZE; in chtls_sendmsg()
1150 pg_size <<= order; in chtls_sendmsg()
1154 pg_size = PAGE_SIZE; in chtls_sendmsg()
1161 if (copy > pg_size - off) in chtls_sendmsg()
1162 copy = pg_size - off; in chtls_sendmsg()
1183 if (off + copy < pg_size) { in chtls_sendmsg()
/drivers/net/ethernet/huawei/hinic/
A Dhinic_hw_eqs.c27 #define GET_EQ_NUM_PAGES(eq, pg_size) \ argument
28 (ALIGN((eq)->q_len * (eq)->elem_size, pg_size) / (pg_size))
30 #define GET_EQ_NUM_ELEMS_IN_PG(eq, pg_size) ((pg_size) / (eq)->elem_size) argument
A Dhinic_hw_io.c38 #define HINIC_PAGE_SIZE_HW(pg_size) ((u8)ilog2((u32)((pg_size) >> 12))) argument
/drivers/crypto/caam/
A Dctrl.c871 int pg_size; in caam_probe() local
995 pg_size = (comp_params & CTPR_MS_PG_SZ_MASK) >> CTPR_MS_PG_SZ_SHIFT; in caam_probe()
996 if (pg_size == 0) in caam_probe()
/drivers/iommu/
A Diommu.c1136 unsigned long pg_size; in iommu_create_device_direct_mappings() local
1139 pg_size = domain->pgsize_bitmap ? 1UL << __ffs(domain->pgsize_bitmap) : 0; in iommu_create_device_direct_mappings()
1142 if (WARN_ON_ONCE(iommu_is_dma_domain(domain) && !pg_size)) in iommu_create_device_direct_mappings()
1160 start = ALIGN(entry->start, pg_size); in iommu_create_device_direct_mappings()
1161 end = ALIGN(entry->start + entry->length, pg_size); in iommu_create_device_direct_mappings()
1163 for (addr = start; addr <= end; addr += pg_size) { in iommu_create_device_direct_mappings()
1171 map_size += pg_size; in iommu_create_device_direct_mappings()
/drivers/infiniband/hw/irdma/
A Dverbs.c2364 static bool irdma_check_mem_contiguous(u64 *arr, u32 npages, u32 pg_size) in irdma_check_mem_contiguous() argument
2369 if ((*arr + (pg_size * pg_idx)) != arr[pg_idx]) in irdma_check_mem_contiguous()
2382 u32 pg_size) in irdma_check_mr_contiguous() argument
2394 pg_size); in irdma_check_mr_contiguous()
2402 if ((*start_addr + (i * pg_size * PBLE_PER_PAGE)) != *arr) in irdma_check_mr_contiguous()
2404 ret = irdma_check_mem_contiguous(arr, leaf->cnt, pg_size); in irdma_check_mr_contiguous()
2468 u32 pg_size, total; in irdma_handle_q_mem() local
2472 pg_size = iwmr->page_size; in irdma_handle_q_mem()
2488 pg_size); in irdma_handle_q_mem()
2492 pg_size); in irdma_handle_q_mem()
[all …]
/drivers/misc/bcm-vk/
A Dbcm_vk_dev.c1208 unsigned long pg_size; in bcm_vk_mmap() local
1213 pg_size = ((pci_resource_len(vk->pdev, VK_MMAPABLE_BAR) - 1) in bcm_vk_mmap()
1215 if (vma->vm_pgoff + vma_pages(vma) > pg_size) in bcm_vk_mmap()
/drivers/net/wireless/realtek/rtw88/
A Dfw.c1539 u32 pg_size; in rtw_download_drv_rsvd_page() local
1543 pg_size = rtwdev->chip->page_size; in rtw_download_drv_rsvd_page()
1544 pg_num = size / pg_size + ((size & (pg_size - 1)) ? 1 : 0); in rtw_download_drv_rsvd_page()
/drivers/target/sbp/
A Dsbp_target.c1242 int tcode, sg_miter_flags, max_payload, pg_size, speed, node_id, in sbp_rw_data() local
1261 pg_size = CMDBLK_ORB_PG_SIZE(be32_to_cpu(req->orb.misc)); in sbp_rw_data()
1262 if (pg_size) { in sbp_rw_data()
/drivers/net/ethernet/chelsio/cxgb3/
A Dt3_hw.c2522 unsigned int pg_size) in pm_num_pages() argument
2524 unsigned int n = mem_size / pg_size; in pm_num_pages()

Completed in 70 milliseconds