Home
last modified time | relevance | path

Searched refs:pgtable (Results 1 – 15 of 15) sorted by relevance

/drivers/iommu/amd/
A Dio_pgtable.c131 *pte = PM_LEVEL_PDE(pgtable->mode, iommu_virt_to_phys(pgtable->root)); in increase_address_space()
133 pgtable->root = pte; in increase_address_space()
134 pgtable->mode += 1; in increase_address_space()
173 level = pgtable->mode - 1; in alloc_pte()
259 level = pgtable->mode - 1; in fetch_pte()
524 if (pgtable->mode == PAGE_MODE_NONE) in v1_free_pgtable()
531 free_sub_pt(pgtable->root, pgtable->mode, &freelist); in v1_free_pgtable()
539 pgtable->root = in v1_alloc_pgtable()
541 if (!pgtable->root) in v1_alloc_pgtable()
543 pgtable->mode = PAGE_MODE_3_LEVEL; in v1_alloc_pgtable()
[all …]
A Dio_pgtable_v2.c201 pte = &pgtable->pgd[PM_LEVEL_INDEX(level, iova)]; in fetch_pte()
237 struct io_pgtable_cfg *cfg = &pgtable->pgtbl.cfg; in iommu_v2_map_pages()
254 pte = v2_alloc_pte(cfg->amd.nid, pgtable->pgd, in iommu_v2_map_pages()
290 struct io_pgtable_cfg *cfg = &pgtable->pgtbl.cfg; in iommu_v2_unmap_pages()
300 pte = fetch_pte(pgtable, iova, &unmap_size); in iommu_v2_unmap_pages()
319 pte = fetch_pte(pgtable, iova, &pte_pgsize); in iommu_v2_iova_to_phys()
336 if (!pgtable || !pgtable->pgd) in v2_free_pgtable()
340 free_pgtable(pgtable->pgd, get_pgtable_level()); in v2_free_pgtable()
341 pgtable->pgd = NULL; in v2_free_pgtable()
350 if (!pgtable->pgd) in v2_alloc_pgtable()
[all …]
A Diommu.c2528 static inline u64 dma_max_address(enum protection_domain_mode pgtable) in dma_max_address() argument
2530 if (pgtable == PD_MODE_V1) in dma_max_address()
2560 enum protection_domain_mode pgtable) in do_iommu_domain_alloc() argument
2571 domain->pd_mode = pgtable; in do_iommu_domain_alloc()
2580 domain->domain.geometry.aperture_end = dma_max_address(pgtable); in do_iommu_domain_alloc()
/drivers/accel/ivpu/
A Divpu_mmu_context.c124 kfree(pgtable->pmd_ptrs[pgd_idx]); in ivpu_mmu_pgtables_free()
125 kfree(pgtable->pte_ptrs[pgd_idx]); in ivpu_mmu_pgtables_free()
129 ivpu_pgtable_free_page(vdev, pgtable->pgd_dma_ptr, pgtable->pgd_dma); in ivpu_mmu_pgtables_free()
130 pgtable->pgd_dma_ptr = NULL; in ivpu_mmu_pgtables_free()
131 pgtable->pgd_dma = 0; in ivpu_mmu_pgtables_free()
137 u64 *pgd_dma_ptr = pgtable->pgd_dma_ptr; in ivpu_mmu_ensure_pgd()
147 pgtable->pgd_dma_ptr = pgd_dma_ptr; in ivpu_mmu_ensure_pgd()
148 pgtable->pgd_dma = pgd_dma; in ivpu_mmu_ensure_pgd()
168 if (!pgtable->pmd_ptrs[pgd_idx]) in ivpu_mmu_ensure_pud()
173 if (!pgtable->pte_ptrs[pgd_idx]) in ivpu_mmu_ensure_pud()
[all …]
A Divpu_mmu.h43 int ivpu_mmu_cd_set(struct ivpu_device *vdev, int ssid, struct ivpu_mmu_pgtable *pgtable);
A Divpu_mmu_context.h28 struct ivpu_mmu_pgtable pgtable; member
A Divpu_mmu.c1021 int ivpu_mmu_cd_set(struct ivpu_device *vdev, int ssid, struct ivpu_mmu_pgtable *pgtable) in ivpu_mmu_cd_set() argument
1023 return ivpu_mmu_cdtab_entry_set(vdev, ssid, pgtable->pgd_dma, true); in ivpu_mmu_cd_set()
/drivers/gpu/drm/etnaviv/
A Detnaviv_iommu.c93 u32 pgtable; in etnaviv_iommuv1_restore() local
107 pgtable = (u32)v1_context->pgtable_dma; in etnaviv_iommuv1_restore()
109 gpu_write(gpu, VIVS_MC_MMU_FE_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
110 gpu_write(gpu, VIVS_MC_MMU_TX_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
111 gpu_write(gpu, VIVS_MC_MMU_PE_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
112 gpu_write(gpu, VIVS_MC_MMU_PEZ_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
113 gpu_write(gpu, VIVS_MC_MMU_RA_PAGE_TABLE, pgtable); in etnaviv_iommuv1_restore()
/drivers/iommu/
A Dexynos-iommu.c183 return pgtable + lv1ent_offset(iova); in section_entry()
907 if (!domain->pgtable) in exynos_iommu_domain_alloc_paging()
916 domain->pgtable[i] = ZERO_LV2LINK; in exynos_iommu_domain_alloc_paging()
940 iommu_free_pages(domain->pgtable); in exynos_iommu_domain_alloc_paging()
960 data->pgtable = 0; in exynos_iommu_domain_free()
972 if (lv1ent_page(domain->pgtable + i)) { in exynos_iommu_domain_free()
981 iommu_free_pages(domain->pgtable); in exynos_iommu_domain_free()
1013 data->pgtable = 0; in exynos_iommu_identity_attach()
1056 data->pgtable = pagetable; in exynos_iommu_attach_device()
1245 BUG_ON(domain->pgtable == NULL); in exynos_iommu_map()
[all …]
A DMakefile12 obj-$(CONFIG_IOMMU_IO_PGTABLE) += io-pgtable.o
13 obj-$(CONFIG_IOMMU_IO_PGTABLE_ARMV7S) += io-pgtable-arm-v7s.o
14 obj-$(CONFIG_IOMMU_IO_PGTABLE_LPAE) += io-pgtable-arm.o
15 obj-$(CONFIG_IOMMU_IO_PGTABLE_DART) += io-pgtable-dart.o
A Domap-iommu.c1405 iommu->pgtable = kzalloc(IOPGD_TABLE_SIZE, GFP_ATOMIC); in omap_iommu_attach_init()
1406 if (!iommu->pgtable) in omap_iommu_attach_init()
1413 if (WARN_ON(!IS_ALIGNED((long)iommu->pgtable, in omap_iommu_attach_init()
1427 kfree(iommu->pgtable); in omap_iommu_detach_fini()
1469 ret = omap_iommu_attach(oiommu, iommu->pgtable); in omap_iommu_attach_dev()
A Domap-iommu.h34 u32 *pgtable; member
/drivers/iommu/arm/arm-smmu/
A Darm-smmu-qcom.c216 struct io_pgtable *pgtable = in qcom_adreno_smmu_get_ttbr1_cfg() local
218 return &pgtable->cfg; in qcom_adreno_smmu_get_ttbr1_cfg()
231 struct io_pgtable *pgtable = io_pgtable_ops_to_pgtable(smmu_domain->pgtbl_ops); in qcom_adreno_smmu_set_ttbr0_cfg() local
246 cb->tcr[0] = arm_smmu_lpae_tcr(&pgtable->cfg); in qcom_adreno_smmu_set_ttbr0_cfg()
A Dqcom_iommu.c475 struct io_pgtable *pgtable = container_of(qcom_domain->pgtbl_ops, in qcom_iommu_flush_iotlb_all() local
481 qcom_iommu_tlb_sync(pgtable->cookie); in qcom_iommu_flush_iotlb_all()
/drivers/iommu/intel/
A Diommu.c627 struct dma_pte *pgtable; in dmar_fault_dump_ptes() local
661 pgtable = phys_to_virt(ctx_entry->lo & VTD_PAGE_MASK); in dmar_fault_dump_ptes()
699 pgtable = phys_to_virt(pte->val[2] & VTD_PAGE_MASK); in dmar_fault_dump_ptes()
702 pgtable = phys_to_virt(pte->val[0] & VTD_PAGE_MASK); in dmar_fault_dump_ptes()
706 pgtable_walk(iommu, addr >> VTD_PAGE_SHIFT, bus, devfn, pgtable, level); in dmar_fault_dump_ptes()

Completed in 49 milliseconds