Home
last modified time | relevance | path

Searched refs:aspace (Results 1 – 25 of 46) sorted by relevance

12

/linux-6.3-rc2/drivers/gpu/drm/msm/
A Dmsm_gem_vma.c19 if (aspace->mmu) in msm_gem_address_space_destroy()
20 aspace->mmu->funcs->destroy(aspace->mmu); in msm_gem_address_space_destroy()
22 kfree(aspace); in msm_gem_address_space_destroy()
28 if (aspace) in msm_gem_address_space_put()
38 return aspace; in msm_gem_address_space_get()
72 aspace->mmu->funcs->unmap(aspace->mmu, vma->iova, size); in msm_gem_purge_vma()
114 if (aspace && aspace->mmu) in msm_gem_map_vma()
115 ret = aspace->mmu->funcs->map(aspace->mmu, vma->iova, sgt, in msm_gem_map_vma()
178 aspace = kzalloc(sizeof(*aspace), GFP_KERNEL); in msm_gem_address_space_create()
179 if (!aspace) in msm_gem_address_space_create()
[all …]
A Dmsm_gem.h63 struct msm_gem_address_space *aspace; member
80 int msm_gem_map_vma(struct msm_gem_address_space *aspace,
128 struct msm_gem_address_space *aspace);
130 struct msm_gem_address_space *aspace, uint64_t *iova);
132 struct msm_gem_address_space *aspace, uint64_t iova);
134 struct msm_gem_address_space *aspace, uint64_t *iova,
137 struct msm_gem_address_space *aspace, uint64_t *iova);
139 struct msm_gem_address_space *aspace);
160 uint32_t flags, struct msm_gem_address_space *aspace,
163 struct msm_gem_address_space *aspace);
[all …]
A Dmsm_gem.c316 vma->aspace = aspace; in add_vma()
332 if (vma->aspace == aspace) in lookup_vma()
363 if (vma->aspace) { in put_iova_spaces()
393 vma = lookup_vma(obj, aspace); in get_vma_locked()
398 vma = add_vma(obj, aspace); in get_vma_locked()
570 clear_iova(obj, aspace); in msm_gem_set_iova()
590 vma = lookup_vma(obj, aspace); in msm_gem_unpin_iova()
922 if (vma->aspace) { in msm_gem_describe()
923 struct msm_gem_address_space *aspace = vma->aspace; in msm_gem_describe() local
932 name = aspace->name; in msm_gem_describe()
[all …]
A Dmsm_gpu.c375 if (submit->aspace) in recover_worker()
376 submit->aspace->faults++; in recover_worker()
485 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in fault_worker()
839 if (!IS_ERR(aspace)) in msm_gpu_create_private_address_space()
843 if (IS_ERR_OR_NULL(aspace)) in msm_gpu_create_private_address_space()
844 aspace = msm_gem_address_space_get(gpu->aspace); in msm_gpu_create_private_address_space()
846 return aspace; in msm_gpu_create_private_address_space()
949 if (gpu->aspace == NULL) in msm_gpu_init()
951 else if (IS_ERR(gpu->aspace)) { in msm_gpu_init()
952 ret = PTR_ERR(gpu->aspace); in msm_gpu_init()
[all …]
A Dmsm_fb.c79 struct msm_gem_address_space *aspace, in msm_framebuffer_prepare() argument
91 ret = msm_gem_get_and_pin_iova(fb->obj[i], aspace, &msm_fb->iova[i]); in msm_framebuffer_prepare()
102 struct msm_gem_address_space *aspace, in msm_framebuffer_cleanup() argument
112 msm_gem_unpin_iova(fb->obj[i], aspace); in msm_framebuffer_cleanup()
119 struct msm_gem_address_space *aspace, int plane) in msm_framebuffer_iova() argument
A Dmsm_drv.c282 struct msm_gem_address_space *aspace; in msm_kms_init_aspace() local
306 aspace = msm_gem_address_space_create(mmu, "mdp_kms", in msm_kms_init_aspace()
308 if (IS_ERR(aspace)) { in msm_kms_init_aspace()
309 dev_err(mdp_dev, "aspace create, error %pe\n", aspace); in msm_kms_init_aspace()
313 return aspace; in msm_kms_init_aspace()
585 ctx->aspace = msm_gpu_create_private_address_space(priv->gpu, current); in context_init()
787 return msm_gem_get_iova(obj, ctx->aspace, iova); in msm_ioctl_gem_info_iova()
801 if (priv->gpu->aspace == ctx->aspace) in msm_ioctl_gem_info_set_iova()
807 return msm_gem_set_iova(obj, ctx->aspace, iova); in msm_ioctl_gem_info_set_iova()
A Dmsm_ringbuffer.c78 gpu->aspace, &ring->bo, &ring->iova); in msm_ringbuffer_new()
129 msm_gem_kernel_put(ring->bo, ring->gpu->aspace); in msm_ringbuffer_destroy()
/linux-6.3-rc2/drivers/staging/vme_user/
A Dvme_fake.c212 bridge->slaves[i].aspace = aspace; in fake_slave_set()
240 *aspace = bridge->slaves[i].aspace; in fake_slave_get()
320 bridge->masters[i].aspace = aspace; in fake_master_set()
352 *aspace = bridge->masters[i].aspace; in __fake_master_get()
428 if (aspace != bridge->slaves[i].aspace) in fake_vmeread8()
458 if (aspace != bridge->slaves[i].aspace) in fake_vmeread16()
491 if (aspace != bridge->slaves[i].aspace) in fake_vmeread32()
533 aspace = priv->masters[i].aspace; in fake_master_read()
622 if (aspace != bridge->slaves[i].aspace) in fake_vmewrite8()
726 aspace = bridge->masters[i].aspace; in fake_master_write()
[all …]
A Dvme.c167 u32 aspace, cycle, dwidth; in vme_get_size() local
201 switch (aspace) { in vme_check_window()
381 if (!(((image->address_attr & aspace) == aspace) && in vme_slave_set()
392 aspace, cycle); in vme_slave_set()
431 aspace, cycle); in vme_slave_get()
585 if (!(((image->address_attr & aspace) == aspace) && in vme_master_set()
1065 vme_attr->aspace = aspace; in vme_dma_vme_attribute()
1256 if ((aspace == handler->aspace) && in vme_bus_error_handler()
1284 handler->aspace = aspace; in vme_register_error_handler()
1564 u32 aspace, u32 cycle) in vme_lm_set() argument
[all …]
A Dvme_tsi148.c484 switch (aspace) { in tsi148_slave_set()
649 *aspace = 0; in tsi148_slave_get()
657 *aspace |= VME_A16; in tsi148_slave_get()
661 *aspace |= VME_A24; in tsi148_slave_get()
665 *aspace |= VME_A32; in tsi148_slave_get()
669 *aspace |= VME_A64; in tsi148_slave_get()
951 switch (aspace) { in tsi148_master_set()
1075 *aspace = 0; in __tsi148_master_get()
1457 switch (aspace) { in tsi148_dma_set_vme_src_attributes()
1555 switch (aspace) { in tsi148_dma_set_vme_dest_attributes()
[all …]
A Dvme_user.h14 __u32 aspace; /* Address Space */ member
36 __u32 aspace; /* Address Space */ member
A Dvme_bridge.h49 u32 aspace; member
86 u32 aspace; /* Address space of error window*/ member
186 struct vme_bridge *bridge, u32 aspace,
/linux-6.3-rc2/drivers/gpu/drm/msm/disp/mdp4/
A Dmdp4_kms.c127 struct msm_gem_address_space *aspace = kms->aspace; in mdp4_destroy() local
130 msm_gem_unpin_iova(mdp4_kms->blank_cursor_bo, kms->aspace); in mdp4_destroy()
133 if (aspace) { in mdp4_destroy()
134 aspace->mmu->funcs->detach(aspace->mmu); in mdp4_destroy()
135 msm_gem_address_space_put(aspace); in mdp4_destroy()
391 struct msm_gem_address_space *aspace; in mdp4_kms_init() local
509 aspace = NULL; in mdp4_kms_init()
511 aspace = msm_gem_address_space_create(mmu, in mdp4_kms_init()
514 if (IS_ERR(aspace)) { in mdp4_kms_init()
517 ret = PTR_ERR(aspace); in mdp4_kms_init()
[all …]
A Dmdp4_plane.c106 return msm_framebuffer_prepare(new_state->fb, kms->aspace, false); in mdp4_plane_prepare_fb()
121 msm_framebuffer_cleanup(fb, kms->aspace, false); in mdp4_plane_cleanup_fb()
172 msm_framebuffer_iova(fb, kms->aspace, 0)); in mdp4_plane_set_scanout()
174 msm_framebuffer_iova(fb, kms->aspace, 1)); in mdp4_plane_set_scanout()
176 msm_framebuffer_iova(fb, kms->aspace, 2)); in mdp4_plane_set_scanout()
178 msm_framebuffer_iova(fb, kms->aspace, 3)); in mdp4_plane_set_scanout()
/linux-6.3-rc2/drivers/gpu/drm/msm/adreno/
A Da5xx_preempt.c232 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->aspace, &bo, &iova); in preempt_init_ring()
240 MSM_BO_WC, gpu->aspace, &counters_bo, &counters_iova); in preempt_init_ring()
242 msm_gem_kernel_put(bo, gpu->aspace); in preempt_init_ring()
273 msm_gem_kernel_put(a5xx_gpu->preempt_bo[i], gpu->aspace); in a5xx_preempt_fini()
274 msm_gem_kernel_put(a5xx_gpu->preempt_counters_bo[i], gpu->aspace); in a5xx_preempt_fini()
A Dadreno_gpu.c208 struct msm_gem_address_space *aspace; in adreno_iommu_create_address_space() local
227 aspace = msm_gem_address_space_create(mmu, "gpu", in adreno_iommu_create_address_space()
230 if (IS_ERR(aspace) && !IS_ERR(mmu)) in adreno_iommu_create_address_space()
233 return aspace; in adreno_iommu_create_address_space()
297 if (ctx->aspace) in adreno_get_param()
298 *value = gpu->global_faults + ctx->aspace->faults; in adreno_get_param()
306 if (ctx->aspace == gpu->aspace) in adreno_get_param()
308 *value = ctx->aspace->va_start; in adreno_get_param()
311 if (ctx->aspace == gpu->aspace) in adreno_get_param()
313 *value = ctx->aspace->va_size; in adreno_get_param()
[all …]
A Da2xx_gpu.c116 msm_gpummu_params(gpu->aspace->mmu, &pt_base, &tran_error); in a2xx_hw_init()
473 struct msm_gem_address_space *aspace; in a2xx_create_address_space() local
475 aspace = msm_gem_address_space_create(mmu, "gpu", SZ_16M, in a2xx_create_address_space()
478 if (IS_ERR(aspace) && !IS_ERR(mmu)) in a2xx_create_address_space()
481 return aspace; in a2xx_create_address_space()
554 if (!gpu->aspace) { in a2xx_gpu_init()
A Da6xx_gmu.c1165 msm_gem_kernel_put(gmu->hfi.obj, gmu->aspace); in a6xx_gmu_memory_free()
1166 msm_gem_kernel_put(gmu->debug.obj, gmu->aspace); in a6xx_gmu_memory_free()
1167 msm_gem_kernel_put(gmu->icache.obj, gmu->aspace); in a6xx_gmu_memory_free()
1168 msm_gem_kernel_put(gmu->dcache.obj, gmu->aspace); in a6xx_gmu_memory_free()
1169 msm_gem_kernel_put(gmu->dummy.obj, gmu->aspace); in a6xx_gmu_memory_free()
1170 msm_gem_kernel_put(gmu->log.obj, gmu->aspace); in a6xx_gmu_memory_free()
1172 gmu->aspace->mmu->funcs->detach(gmu->aspace->mmu); in a6xx_gmu_memory_free()
1173 msm_gem_address_space_put(gmu->aspace); in a6xx_gmu_memory_free()
1202 ret = msm_gem_get_and_pin_iova_range(bo->obj, gmu->aspace, &bo->iova, in a6xx_gmu_memory_alloc()
1228 if (IS_ERR(gmu->aspace)) in a6xx_gmu_memory_probe()
[all …]
A Da5xx_debugfs.c119 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in reset_set()
125 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in reset_set()
A Da5xx_gpu.c925 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init()
1036 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
1041 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
1046 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
1051 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
1446 SZ_1M, MSM_BO_WC, gpu->aspace, in a5xx_crashdumper_init()
1546 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1554 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1772 if (gpu->aspace) in a5xx_gpu_init()
1773 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, a5xx_fault_handler); in a5xx_gpu_init()
/linux-6.3-rc2/drivers/gpu/drm/msm/disp/mdp5/
A Dmdp5_kms.c211 struct msm_gem_address_space *aspace = kms->aspace; in mdp5_kms_destroy() local
220 if (aspace) { in mdp5_kms_destroy()
221 aspace->mmu->funcs->detach(aspace->mmu); in mdp5_kms_destroy()
222 msm_gem_address_space_put(aspace); in mdp5_kms_destroy()
558 struct msm_gem_address_space *aspace; in mdp5_kms_init() local
605 aspace = msm_kms_init_aspace(mdp5_kms->dev); in mdp5_kms_init()
606 if (IS_ERR(aspace)) { in mdp5_kms_init()
607 ret = PTR_ERR(aspace); in mdp5_kms_init()
611 kms->aspace = aspace; in mdp5_kms_init()
/linux-6.3-rc2/drivers/gpu/drm/msm/disp/dpu1/
A Ddpu_encoder_phys_wb.c582 struct msm_gem_address_space *aspace; in dpu_encoder_phys_wb_prepare_wb_job() local
592 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_prepare_wb_job()
598 ret = msm_framebuffer_prepare(job->fb, aspace, false); in dpu_encoder_phys_wb_prepare_wb_job()
614 ret = dpu_format_populate_layout(aspace, job->fb, &wb_cfg->dest); in dpu_encoder_phys_wb_prepare_wb_job()
641 struct msm_gem_address_space *aspace; in dpu_encoder_phys_wb_cleanup_wb_job() local
646 aspace = phys_enc->dpu_kms->base.aspace; in dpu_encoder_phys_wb_cleanup_wb_job()
648 msm_framebuffer_cleanup(job->fb, aspace, false); in dpu_encoder_phys_wb_cleanup_wb_job()
A Ddpu_formats.c796 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_ubwc() argument
808 if (aspace) in _dpu_format_populate_addrs_ubwc()
809 base_addr = msm_framebuffer_iova(fb, aspace, 0); in _dpu_format_populate_addrs_ubwc()
887 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_linear() argument
904 if (aspace) in _dpu_format_populate_addrs_linear()
906 msm_framebuffer_iova(fb, aspace, i); in _dpu_format_populate_addrs_linear()
917 struct msm_gem_address_space *aspace, in dpu_format_populate_layout() argument
949 ret = _dpu_format_populate_addrs_ubwc(aspace, fb, layout); in dpu_format_populate_layout()
951 ret = _dpu_format_populate_addrs_linear(aspace, fb, layout); in dpu_format_populate_layout()
A Ddpu_kms.c976 if (!dpu_kms->base.aspace) in _dpu_kms_mmu_destroy()
979 mmu = dpu_kms->base.aspace->mmu; in _dpu_kms_mmu_destroy()
982 msm_gem_address_space_put(dpu_kms->base.aspace); in _dpu_kms_mmu_destroy()
984 dpu_kms->base.aspace = NULL; in _dpu_kms_mmu_destroy()
989 struct msm_gem_address_space *aspace; in _dpu_kms_mmu_init() local
991 aspace = msm_kms_init_aspace(dpu_kms->dev); in _dpu_kms_mmu_init()
992 if (IS_ERR(aspace)) in _dpu_kms_mmu_init()
993 return PTR_ERR(aspace); in _dpu_kms_mmu_init()
995 dpu_kms->base.aspace = aspace; in _dpu_kms_mmu_init()
A Ddpu_formats.h84 struct msm_gem_address_space *aspace,

Completed in 56 milliseconds

12