| /drivers/iommu/arm/arm-smmu-v3/ |
| A D | arm-smmu-v3-sva.c | 53 u16 asid) in arm_smmu_make_sva_cd() argument 75 FIELD_PREP(CTXDESC_CD_0_ASID, asid)); in arm_smmu_make_sva_cd() 158 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_domain->cd.asid); in arm_smmu_mm_arch_invalidate_secondary_tlbs() 160 arm_smmu_tlb_inv_range_asid(start, size, smmu_domain->cd.asid, in arm_smmu_mm_arch_invalidate_secondary_tlbs() 188 smmu_domain->cd.asid); in arm_smmu_mm_release() 194 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_domain->cd.asid); in arm_smmu_mm_release() 312 xa_erase(&arm_smmu_asid_xa, smmu_domain->cd.asid); in arm_smmu_sva_domain_free() 332 u32 asid; in arm_smmu_sva_domain_alloc() local 351 ret = xa_alloc(&arm_smmu_asid_xa, &asid, smmu_domain, in arm_smmu_sva_domain_alloc() 356 smmu_domain->cd.asid = asid; in arm_smmu_sva_domain_alloc() [all …]
|
| A D | arm-smmu-v3-test.c | 452 static void arm_smmu_test_make_s1_cd(struct arm_smmu_cd *cd, unsigned int asid) in arm_smmu_test_make_s1_cd() argument 461 .asid = asid, in arm_smmu_test_make_s1_cd() 502 static void arm_smmu_test_make_sva_cd(struct arm_smmu_cd *cd, unsigned int asid) in arm_smmu_test_make_sva_cd() argument 508 arm_smmu_make_sva_cd(cd, &master, &sva_mm, asid); in arm_smmu_test_make_sva_cd() 512 unsigned int asid) in arm_smmu_test_make_sva_release_cd() argument 518 arm_smmu_make_sva_cd(cd, &master, NULL, asid); in arm_smmu_test_make_sva_release_cd()
|
| A D | arm-smmu-v3.h | 555 u16 asid; member 664 u16 asid; member 924 u16 asid); 970 void arm_smmu_tlb_inv_asid(struct arm_smmu_device *smmu, u16 asid); 971 void arm_smmu_tlb_inv_range_asid(unsigned long iova, size_t size, int asid,
|
| /drivers/misc/sgi-gru/ |
| A D | grumain.c | 96 if (asid >= limit) in gru_reset_asid_limit() 109 asid += ASID_INC; in gru_reset_asid_limit() 126 gru->gs_asid = asid; in gru_reset_asid_limit() 128 asid, limit); in gru_reset_asid_limit() 129 return asid; in gru_reset_asid_limit() 135 int asid; in gru_assign_asid() local 140 asid = gru_reset_asid_limit(gru, asid); in gru_assign_asid() 143 return asid; in gru_assign_asid() 226 int asid; in gru_load_mm_tracker() local 253 return asid; in gru_load_mm_tracker() [all …]
|
| A D | grutlbpurge.c | 150 int grupagesize, pagesize, pageshift, gid, asid; in gru_flush_tlb_range() local 167 asid = asids->mt_asid; in gru_flush_tlb_range() 168 if (asids->mt_ctxbitmap && asid) { in gru_flush_tlb_range() 170 asid = GRUASID(asid, start); in gru_flush_tlb_range() 173 gid, asid, start, grupagesize, num, asids->mt_ctxbitmap); in gru_flush_tlb_range() 175 tgh_invalidate(tgh, start, ~0, asid, grupagesize, 0, in gru_flush_tlb_range() 184 gid, asid, asids->mt_ctxbitmap, in gru_flush_tlb_range()
|
| A D | gruhandles.c | 135 int asid, int pagesize, int global, int n, in tgh_invalidate() argument 139 tgh->asid = asid; in tgh_invalidate() 152 unsigned long vaddr, int asid, int dirty, in tfh_write_only() argument 155 tfh->fillasid = asid; in tfh_write_only() 168 unsigned long vaddr, int asid, int dirty, in tfh_write_restart() argument 171 tfh->fillasid = asid; in tfh_write_restart()
|
| A D | gruhandles.h | 201 unsigned int asid:24; /* DW 2 */ member 375 unsigned int asid[8]; /* DW 2 - 5 */ member 508 unsigned long vaddrmask, int asid, int pagesize, int global, int n, 511 int gaa, unsigned long vaddr, int asid, int dirty, int pagesize); 513 int gaa, unsigned long vaddr, int asid, int dirty, int pagesize);
|
| A D | grufault.c | 311 unsigned long fault_vaddr, int asid, int write, in gru_preload_tlb() argument 333 if (ret || tfh_write_only(tfh, gpa, GAA_RAM, vaddr, asid, write, in gru_preload_tlb() 339 vaddr, asid, write, pageshift, gpa); in gru_preload_tlb() 362 int pageshift = 0, asid, write, ret, atomic = !cbk, indexway; in gru_try_dropin() local 399 asid = tfh->missasid; in gru_try_dropin() 401 if (asid == 0) in gru_try_dropin() 428 gru_preload_tlb(gru, gts, atomic, vaddr, asid, write, tlb_preload_count, tfh, cbe); in gru_try_dropin() 434 tfh_write_restart(tfh, gpa, GAA_RAM, vaddr, asid, write, in gru_try_dropin() 439 atomic ? "atomic" : "non-atomic", gru->gs_gid, gts, tfh, vaddr, asid, in gru_try_dropin()
|
| /drivers/vhost/ |
| A D | vdpa.c | 71 u64 last, u32 asid); 86 if (as->id == asid) in asid_to_as() 107 if (asid_to_as(v, asid)) in vhost_vdpa_alloc_as() 110 if (asid >= v->vdpa->nas) in vhost_vdpa_alloc_as() 118 as->id = asid; in vhost_vdpa_alloc_as() 125 u32 asid) in vhost_vdpa_find_alloc_as() argument 141 ops->reset_map(vdpa, asid); in vhost_vdpa_reset_map() 1262 asid); in vhost_vdpa_process_iotlb_msg() 1289 v->batch_asid = asid; in vhost_vdpa_process_iotlb_msg() 1386 u32 asid; in vhost_vdpa_cleanup() local [all …]
|
| /drivers/vdpa/vdpa_sim/ |
| A D | vdpa_sim.c | 603 unsigned int asid) in vdpasim_set_group_asid() argument 612 if (asid >= vdpasim->dev_attr.nas) in vdpasim_set_group_asid() 615 iommu = &vdpasim->iommu[asid]; in vdpasim_set_group_asid() 643 iommu = &vdpasim->iommu[asid]; in vdpasim_set_map() 645 vdpasim->iommu_pt[asid] = false; in vdpasim_set_map() 671 if (vdpasim->iommu_pt[asid]) in vdpasim_reset_map() 676 vdpasim->iommu_pt[asid] = true; in vdpasim_reset_map() 717 if (vdpasim->iommu_pt[asid]) { in vdpasim_dma_map() 719 vdpasim->iommu_pt[asid] = false; in vdpasim_dma_map() 736 if (vdpasim->iommu_pt[asid]) { in vdpasim_dma_unmap() [all …]
|
| /drivers/accel/habanalabs/gaudi/ |
| A D | gaudi.c | 6210 asid); in gaudi_mmu_prepare() 6212 asid); in gaudi_mmu_prepare() 6214 asid); in gaudi_mmu_prepare() 6216 asid); in gaudi_mmu_prepare() 6218 asid); in gaudi_mmu_prepare() 6223 asid); in gaudi_mmu_prepare() 6225 asid); in gaudi_mmu_prepare() 6227 asid); in gaudi_mmu_prepare() 6229 asid); in gaudi_mmu_prepare() 6231 asid); in gaudi_mmu_prepare() [all …]
|
| /drivers/accel/habanalabs/common/ |
| A D | asid.c | 50 void hl_asid_free(struct hl_device *hdev, unsigned long asid) in hl_asid_free() argument 52 if (asid == HL_KERNEL_ASID_ID || asid >= hdev->asic_prop.max_asid) { in hl_asid_free() 53 dev_crit(hdev->dev, "Invalid ASID %lu", asid); in hl_asid_free() 57 clear_bit(asid, hdev->asid_bitmap); in hl_asid_free()
|
| A D | context.c | 104 if (ctx->asid != HL_KERNEL_ASID_ID) { in hl_ctx_fini() 105 dev_dbg(hdev->dev, "closing user context, asid=%u\n", ctx->asid); in hl_ctx_fini() 120 hl_asid_free(hdev, ctx->asid); in hl_ctx_fini() 228 ctx->asid = HL_KERNEL_ASID_ID; /* Kernel driver gets ASID 0 */ in hl_ctx_init() 242 ctx->asid = hl_asid_alloc(hdev); in hl_ctx_init() 243 if (!ctx->asid) { in hl_ctx_init() 274 current->comm, ctx->asid); in hl_ctx_init() 284 if (ctx->asid != HL_KERNEL_ASID_ID) in hl_ctx_init() 285 hl_asid_free(hdev, ctx->asid); in hl_ctx_init()
|
| A D | memory.c | 135 phys_pg_pack->asid = ctx->asid; in alloc_device_memory() 859 phys_pg_pack->asid = ctx->asid; in init_phys_pg_pack_from_userptr() 1147 phys_pg_pack->asid != ctx->asid) { in map_device_va() 1196 ctx->asid, ret_vaddr, phys_pg_pack->total_size); in map_device_va() 1206 rc = hl_mmu_prefetch_cache_range(ctx, *vm_type, ctx->asid, ret_vaddr, in map_device_va() 1362 rc = hl_mmu_invalidate_cache_range(hdev, true, *vm_type, ctx->asid, vaddr, in unmap_device_va() 2618 dev_err(hdev->dev, "failed to init context %d\n", ctx->asid); in vm_ctx_init_with_ranges() 2770 hnode->vaddr, ctx->asid); in hl_vm_ctx_fini() 2787 if (phys_pg_list->asid == ctx->asid) { in hl_vm_ctx_fini() 2790 phys_pg_list, ctx->asid); in hl_vm_ctx_fini() [all …]
|
| /drivers/iommu/arm/arm-smmu/ |
| A D | qcom_iommu.c | 87 return qcom_iommu->ctxs[asid]; in to_ctx() 161 iova |= ctx->asid; in qcom_iommu_tlb_inv_range_nosync() 207 fsr, iova, fsynr, ctx->asid); in qcom_iommu_fault() 553 unsigned asid = args->args[0]; in qcom_iommu_of_xlate() local 659 int asid; in get_asid() local 673 asid = val; in get_asid() 675 asid = reg / 0x1000; in get_asid() 677 return asid; in get_asid() 727 ctx->asid = ret; in qcom_iommu_ctx_probe() 731 qcom_iommu->ctxs[ctx->asid] = ctx; in qcom_iommu_ctx_probe() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_gt_pagefault.c | 29 u32 asid; member 56 u32 asid; member 175 vm = xa_load(&xe->usm.asid_to_vm, asid); in asid_to_vm() 197 vm = asid_to_vm(xe, pf->asid); in handle_pagefault() 283 pf->asid = FIELD_GET(PFD_ASID, desc->dw1); in get_pagefault() 315 u32 asid; in xe_guc_pagefault_handler() local 321 asid = FIELD_GET(PFD_ASID, msg[1]); in xe_guc_pagefault_handler() 369 FIELD_PREP(PFR_ASID, pf.asid); in pf_queue_work_func() 554 vm = asid_to_vm(xe, acc->asid); in handle_acc() 661 u32 asid; in xe_guc_access_counter_notify_handler() local [all …]
|
| A D | xe_trace_bo.h | 96 __field(u32, asid) 106 __entry->asid = xe_vma_vm(vma)->usm.asid; 114 __entry->asid, __entry->start, 195 __field(u32, asid) 202 __entry->asid = vm->usm.asid; 207 __get_str(dev), __entry->vm, __entry->asid,
|
| A D | xe_svm.c | 56 (operaton__), range_to_vm(&(r__)->base)->usm.asid, \ 188 vm->usm.asid, gpusvm, notifier->notifier.invalidate_seq, in xe_svm_invalidate() 861 vm->usm.asid, ERR_PTR(err)); in xe_svm_handle_pagefault() 866 vm->usm.asid, ERR_PTR(err)); in xe_svm_handle_pagefault() 880 vm->usm.asid, &vm->svm.gpusvm, ERR_PTR(err)); in xe_svm_handle_pagefault() 886 vm->usm.asid, &vm->svm.gpusvm, ERR_PTR(err)); in xe_svm_handle_pagefault()
|
| /drivers/iommu/ |
| A D | tegra-smmu.c | 221 unsigned long asid) in smmu_flush_tlb_asid() argument 226 value = (asid & 0x3) << 29; in smmu_flush_tlb_asid() 228 value = (asid & 0x7f) << 24; in smmu_flush_tlb_asid() 235 unsigned long asid, in smmu_flush_tlb_section() argument 241 value = (asid & 0x3) << 29; in smmu_flush_tlb_section() 250 unsigned long asid, in smmu_flush_tlb_group() argument 256 value = (asid & 0x3) << 29; in smmu_flush_tlb_group() 360 unsigned int asid) in tegra_smmu_enable() argument 393 unsigned int asid) in tegra_smmu_disable() argument 1043 unsigned int asid; in tegra_smmu_swgroups_show() local [all …]
|
| /drivers/accel/habanalabs/common/mmu/ |
| A D | mmu.c | 657 u32 flags, u32 asid, u64 va, u64 size) in hl_mmu_invalidate_cache_range() argument 662 asid, va, size); in hl_mmu_invalidate_cache_range() 683 hdev->asic_funcs->mmu_prefetch_cache_range(ctx, pfw->flags, pfw->asid, pfw->va, pfw->size); in hl_mmu_prefetch_work_function() 696 int hl_mmu_prefetch_cache_range(struct hl_ctx *ctx, u32 flags, u32 asid, u64 va, u64 size) in hl_mmu_prefetch_cache_range() argument 709 handle_prefetch_work->asid = asid; in hl_mmu_prefetch_cache_range() 785 int asid; in hl_mmu_hr_pool_destroy() local 792 for (asid = 0 ; asid < prop->max_asid ; asid++) { in hl_mmu_hr_pool_destroy() 793 hop0_pgt = &hr_priv->mmu_asid_hop0[asid]; in hl_mmu_hr_pool_destroy() 1252 (ctx->asid * ctx->hdev->asic_prop.dmmu.hop_table_size); in hl_mmu_dr_get_phys_hop0_addr() 1258 (ctx->asid * ctx->hdev->asic_prop.dmmu.hop_table_size); in hl_mmu_dr_get_hop0_addr()
|
| A D | mmu_v1.c | 36 (ctx->asid == HL_KERNEL_ASID_ID)) in dram_default_mapping_init() 136 (ctx->asid == HL_KERNEL_ASID_ID)) in dram_default_mapping_fini() 209 ctx->asid); in hl_mmu_v1_ctx_fini() 214 pgt_info->phys_addr, ctx->asid, pgt_info->num_of_ptes); in hl_mmu_v1_ctx_fini()
|
| A D | mmu_v2_hr.c | 33 return &ctx->hdev->mmu_priv.hr.mmu_asid_hop0[ctx->asid]; in hl_mmu_v2_hr_get_hop0_pgt_info() 104 ctx->asid); in hl_mmu_v2_hr_ctx_fini() 109 pgt_info->phys_addr, ctx->asid, pgt_info->num_of_ptes); in hl_mmu_v2_hr_ctx_fini()
|
| /drivers/vdpa/mlx5/core/ |
| A D | mlx5_vdpa.h | 160 unsigned int asid); 163 unsigned int asid); 165 int mlx5_vdpa_reset_mr(struct mlx5_vdpa_dev *mvdev, unsigned int asid);
|
| A D | mr.c | 732 unsigned int asid) in mlx5_vdpa_update_mr() argument 734 struct mlx5_vdpa_mr *old_mr = mvdev->mres.mr[asid]; in mlx5_vdpa_update_mr() 739 mvdev->mres.mr[asid] = new_mr; in mlx5_vdpa_update_mr() 842 unsigned int asid) in mlx5_vdpa_update_cvq_iotlb() argument 846 if (mvdev->mres.group2asid[MLX5_VDPA_CVQ_GROUP] != asid) in mlx5_vdpa_update_cvq_iotlb() 872 int mlx5_vdpa_reset_mr(struct mlx5_vdpa_dev *mvdev, unsigned int asid) in mlx5_vdpa_reset_mr() argument 874 if (asid >= MLX5_VDPA_NUM_AS) in mlx5_vdpa_reset_mr() 877 mlx5_vdpa_update_mr(mvdev, NULL, asid); in mlx5_vdpa_reset_mr() 879 if (asid == 0 && MLX5_CAP_GEN(mvdev->mdev, umem_uid_0)) { in mlx5_vdpa_reset_mr() 883 mlx5_vdpa_update_cvq_iotlb(mvdev, NULL, asid); in mlx5_vdpa_reset_mr()
|
| /drivers/gpu/drm/msm/ |
| A D | msm_iommu.c | 33 u32 asid; member 245 phys_addr_t *ttbr, int *asid) in msm_iommu_pagetable_params() argument 257 if (asid) in msm_iommu_pagetable_params() 258 *asid = pagetable->asid; in msm_iommu_pagetable_params() 611 pagetable->asid = 0; in msm_iommu_pagetable_create()
|