Searched refs:dev_iommu_priv_get (Results 1 – 22 of 22) sorted by relevance
265 struct mtk_iommu_data *data = dev_iommu_priv_get(dev); in mtk_iommu_attach_device()291 struct mtk_iommu_data *data = dev_iommu_priv_get(dev); in mtk_iommu_detach_device()388 if (!dev_iommu_priv_get(dev)) { in mtk_iommu_create_mapping()401 data = dev_iommu_priv_get(dev); in mtk_iommu_create_mapping()445 data = dev_iommu_priv_get(dev); in mtk_iommu_probe_device()456 data = dev_iommu_priv_get(dev); in mtk_iommu_probe_finalize()
493 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_attach_dev()535 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_detach_dev()548 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_probe_device()565 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_release_device()607 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_of_xlate()671 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_device_group()731 struct apple_dart_master_cfg *cfg = dev_iommu_priv_get(dev); in apple_dart_def_domain_type()
236 struct sprd_iommu_device *sdev = dev_iommu_priv_get(dev); in sprd_iommu_attach_device()381 sdev = dev_iommu_priv_get(dev); in sprd_iommu_probe_device()398 struct sprd_iommu_device *sdev = dev_iommu_priv_get(dev); in sprd_iommu_device_group()407 if (!dev_iommu_priv_get(dev)) { in sprd_iommu_of_xlate()
174 #define has_sysmmu(dev) (dev_iommu_priv_get(dev) != NULL)668 struct exynos_iommu_owner *owner = dev_iommu_priv_get(master); in exynos_sysmmu_suspend()686 struct exynos_iommu_owner *owner = dev_iommu_priv_get(master); in exynos_sysmmu_resume()826 struct exynos_iommu_owner *owner = dev_iommu_priv_get(dev); in exynos_iommu_detach_device()864 struct exynos_iommu_owner *owner = dev_iommu_priv_get(dev); in exynos_iommu_attach_device()1225 struct exynos_iommu_owner *owner = dev_iommu_priv_get(dev); in exynos_iommu_probe_device()1251 struct exynos_iommu_owner *owner = dev_iommu_priv_get(dev); in exynos_iommu_release_device()1276 struct exynos_iommu_owner *owner = dev_iommu_priv_get(dev); in exynos_iommu_of_xlate()
73 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_save_ctx()103 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_restore_ctx()1397 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_count()1458 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_attach_dev()1523 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in _omap_iommu_detach_dev()1711 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_release_device()1723 struct omap_iommu_arch_data *arch_data = dev_iommu_priv_get(dev); in omap_iommu_device_group()
116 } else if (dev_iommu_priv_get(dev) != domain) { in gart_iommu_attach_dev()134 if (dev_iommu_priv_get(dev) == domain) { in gart_iommu_detach_dev()
454 struct mtk_iommu_data *data = dev_iommu_priv_get(dev); in mtk_iommu_attach_device()493 struct mtk_iommu_data *data = dev_iommu_priv_get(dev); in mtk_iommu_detach_device()569 data = dev_iommu_priv_get(dev); in mtk_iommu_probe_device()618 if (!dev_iommu_priv_get(dev)) { in mtk_iommu_of_xlate()633 struct mtk_iommu_data *data = dev_iommu_priv_get(dev); in mtk_iommu_get_resv_regions()
147 old_domain_info = dev_iommu_priv_get(dev); in attach_device()166 if (!dev_iommu_priv_get(dev)) in attach_device()
469 struct viommu_endpoint *vdev = dev_iommu_priv_get(dev); in viommu_probe_endpoint()655 struct viommu_endpoint *vdev = dev_iommu_priv_get(dev); in viommu_attach_dev()814 struct viommu_endpoint *vdev = dev_iommu_priv_get(dev); in viommu_get_resv_regions()916 vdev = dev_iommu_priv_get(dev); in viommu_release_device()
489 struct tegra_smmu *smmu = dev_iommu_priv_get(dev); in tegra_smmu_attach_dev()863 smmu = dev_iommu_priv_get(dev); in tegra_smmu_probe_device()898 struct tegra_smmu *smmu = dev_iommu_priv_get(dev); in tegra_smmu_device_group()
928 struct rk_iommudata *data = dev_iommu_priv_get(dev); in rk_iommu_from_dev()1140 data = dev_iommu_priv_get(dev); in rk_iommu_probe_device()1154 struct rk_iommudata *data = dev_iommu_priv_get(dev); in rk_iommu_release_device()
131 return dev_iommu_priv_get(dev); in sun50i_iommu_from_dev()694 struct sun50i_iommu *iommu = dev_iommu_priv_get(dev); in sun50i_iommu_detach_device()
595 struct msm_iommu_ctx_dev *master = dev_iommu_priv_get(dev); in insert_iommu_master()
89 return dev_iommu_priv_get(dev); in to_ipmmu()
16 return dev_iommu_priv_get(dev) != NULL; in exynos_is_iommu_available()
323 if (dev_iommu_priv_get(dev)) in iommu_init_device()373 dev_data = dev_iommu_priv_get(dev); in amd_iommu_uninit_device()427 dev_data = dev_iommu_priv_get(&pdev->dev); in amd_iommu_report_rmp_hw_error()460 dev_data = dev_iommu_priv_get(&pdev->dev); in amd_iommu_report_rmp_fault()492 dev_data = dev_iommu_priv_get(&pdev->dev); in amd_iommu_report_page_fault()1644 dev_data = dev_iommu_priv_get(dev); in attach_device()1708 dev_data = dev_iommu_priv_get(dev); in detach_device()1754 if (dev_iommu_priv_get(dev)) in amd_iommu_probe_device()2041 dev_data = dev_iommu_priv_get(dev); in amd_iommu_attach_device()2254 dev_data = dev_iommu_priv_get(dev); in amd_iommu_def_domain_type()[all …]
89 return dev_iommu_priv_get(dev); in to_iommu()574 if (!dev_iommu_priv_get(dev)) { in qcom_iommu_of_xlate()581 if (WARN_ON(qcom_iommu != dev_iommu_priv_get(dev))) { in qcom_iommu_of_xlate()
1042 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_master_alloc_smes()1150 cfg = dev_iommu_priv_get(dev); in arm_smmu_attach_dev()1442 cfg = dev_iommu_priv_get(dev); in arm_smmu_release_device()1463 cfg = dev_iommu_priv_get(dev); in arm_smmu_probe_finalize()1472 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_device_group()1574 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_def_domain_type()
913 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in arm_smmu_page_response()2397 master = dev_iommu_priv_get(dev); in arm_smmu_attach_dev()2641 if (WARN_ON_ONCE(dev_iommu_priv_get(dev))) in arm_smmu_probe_device()2699 master = dev_iommu_priv_get(dev); in arm_smmu_release_device()2765 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in arm_smmu_dev_has_feature()2783 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in arm_smmu_dev_feature_enabled()2801 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in arm_smmu_dev_enable_feature()2823 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in arm_smmu_dev_disable_feature()
312 struct arm_smmu_master *master = dev_iommu_priv_get(dev); in __arm_smmu_sva_bind()
655 static inline void *dev_iommu_priv_get(struct device *dev) in dev_iommu_priv_get() function
358 info = dev_iommu_priv_get(dev); in get_domain_info()804 return dev_iommu_priv_get(dev) == DEFER_DEVICE_DOMAIN_INFO; in attach_deferred()
Completed in 77 milliseconds