Lines Matching refs:dom
133 static struct mtk_iommu_v1_domain *to_mtk_domain(struct iommu_domain *dom) in to_mtk_domain() argument
135 return container_of(dom, struct mtk_iommu_v1_domain, domain); in to_mtk_domain()
197 struct mtk_iommu_v1_domain *dom = data->m4u_dom; in mtk_iommu_v1_isr() local
215 if (report_iommu_fault(&dom->domain, data->dev, fault_iova, in mtk_iommu_v1_isr()
257 struct mtk_iommu_v1_domain *dom = data->m4u_dom; in mtk_iommu_v1_domain_finalise() local
259 spin_lock_init(&dom->pgtlock); in mtk_iommu_v1_domain_finalise()
261 dom->pgt_va = dma_alloc_coherent(data->dev, M2701_IOMMU_PGT_SIZE, in mtk_iommu_v1_domain_finalise()
262 &dom->pgt_pa, GFP_KERNEL); in mtk_iommu_v1_domain_finalise()
263 if (!dom->pgt_va) in mtk_iommu_v1_domain_finalise()
266 writel(dom->pgt_pa, data->base + REG_MMU_PT_BASE_ADDR); in mtk_iommu_v1_domain_finalise()
268 dom->data = data; in mtk_iommu_v1_domain_finalise()
275 struct mtk_iommu_v1_domain *dom; in mtk_iommu_v1_domain_alloc() local
280 dom = kzalloc(sizeof(*dom), GFP_KERNEL); in mtk_iommu_v1_domain_alloc()
281 if (!dom) in mtk_iommu_v1_domain_alloc()
284 return &dom->domain; in mtk_iommu_v1_domain_alloc()
289 struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain); in mtk_iommu_v1_domain_free() local
290 struct mtk_iommu_v1_data *data = dom->data; in mtk_iommu_v1_domain_free()
293 dom->pgt_va, dom->pgt_pa); in mtk_iommu_v1_domain_free()
300 struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain); in mtk_iommu_v1_attach_device() local
310 data->m4u_dom = dom; in mtk_iommu_v1_attach_device()
333 struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain); in mtk_iommu_v1_map() local
336 u32 *pgt_base_iova = dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT); in mtk_iommu_v1_map()
339 spin_lock_irqsave(&dom->pgtlock, flags); in mtk_iommu_v1_map()
347 spin_unlock_irqrestore(&dom->pgtlock, flags); in mtk_iommu_v1_map()
350 mtk_iommu_v1_tlb_flush_range(dom->data, iova, *mapped); in mtk_iommu_v1_map()
359 struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain); in mtk_iommu_v1_unmap() local
361 u32 *pgt_base_iova = dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT); in mtk_iommu_v1_unmap()
364 spin_lock_irqsave(&dom->pgtlock, flags); in mtk_iommu_v1_unmap()
366 spin_unlock_irqrestore(&dom->pgtlock, flags); in mtk_iommu_v1_unmap()
368 mtk_iommu_v1_tlb_flush_range(dom->data, iova, size); in mtk_iommu_v1_unmap()
375 struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain); in mtk_iommu_v1_iova_to_phys() local
379 spin_lock_irqsave(&dom->pgtlock, flags); in mtk_iommu_v1_iova_to_phys()
380 pa = *(dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT)); in mtk_iommu_v1_iova_to_phys()
382 spin_unlock_irqrestore(&dom->pgtlock, flags); in mtk_iommu_v1_iova_to_phys()