/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
A D | vmm.c | 131 it->vmm->func->flush(it->vmm, it->flush); in nvkm_vmm_flush() 145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local 203 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_sptes() local 302 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_ref_sptes() local 419 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_ref_hwpt() local 513 it.vmm = vmm; in nvkm_vmm_iter() 1007 vmm->nullp, vmm->null); in nvkm_vmm_dtor() 1123 if (vmm->start > vmm->limit || vmm->limit > (1ULL << bits)) in nvkm_vmm_ctor() 1126 if (!(vma = nvkm_vma_new(vmm->start, vmm->limit - vmm->start))) in nvkm_vmm_ctor() 1783 vmm->func->part(vmm, inst); in nvkm_vmm_part() [all …]
|
A D | vmmnv44.c | 66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill() 67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill() 68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill() 82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte() 147 nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv44_vmm_pgt_unmap() argument 213 struct nvkm_vmm *vmm; in nv44_vmm_new() local 217 argv, argc, key, name, &vmm); in nv44_vmm_new() 218 *pvmm = vmm; in nv44_vmm_new() 223 &vmm->null, GFP_KERNEL); in nv44_vmm_new() 224 if (!vmm->nullp) { in nv44_vmm_new() [all …]
|
A D | uvmm.c | 51 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnclr() local 76 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnmap() local 106 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_unmap() local 116 mutex_lock(&vmm->mutex); in nvkm_uvmm_mthd_unmap() 149 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_map() local 225 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_put() local 261 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_get() local 331 return uvmm->vmm->func->mthd(uvmm->vmm, in nvkm_uvmm_mthd() 383 if (!mmu->vmm) { in nvkm_uvmm_new() 389 uvmm->vmm->debug = max(uvmm->vmm->debug, oclass->client->debug); in nvkm_uvmm_new() [all …]
|
A D | vmmnv50.c | 148 u32 pdeo = vmm->mmu->func->vmm.pd_offset + (pdei * 8); in nv50_vmm_pgd_pde() 187 mutex_lock(&vmm->mmu->mutex); in nv50_vmm_flush() 189 if (!atomic_read(&vmm->engref[i])) in nv50_vmm_flush() 222 mutex_unlock(&vmm->mmu->mutex); in nv50_vmm_flush() 256 VMM_DEBUG(vmm, "args"); in nv50_vmm_valid() 280 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in nv50_vmm_valid() 282 VMM_DEBUG(vmm, "kind %02x", kind); in nv50_vmm_valid() 302 VMM_DEBUG(vmm, "comp %d", ret); in nv50_vmm_valid() 340 const u32 pd_offset = vmm->mmu->func->vmm.pd_offset; in nv50_vmm_join() 352 for (pdei = vmm->start >> 29; pdei <= (vmm->limit - 1) >> 29; pdei++) { in nv50_vmm_join() [all …]
|
A D | vmmgp100.c | 34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, in gp100_vmm_pfn_unmap() argument 109 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pfn() 373 VMM_WO064(pd, vmm, pdei * 8, data); in gp100_vmm_pd1_pde() 435 VMM_DEBUG(vmm, "args"); in gp100_vmm_valid() 439 aper = vmm->func->aper(target); in gp100_vmm_valid() 443 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in gp100_vmm_valid() 445 VMM_DEBUG(vmm, "kind %02x", kind); in gp100_vmm_valid() 460 VMM_DEBUG(vmm, "comp %d", ret); in gp100_vmm_valid() 534 gp100_vmm_mthd(struct nvkm_vmm *vmm, in gp100_vmm_mthd() argument 563 gf100_vmm_invalidate(vmm, type); in gp100_vmm_flush() [all …]
|
A D | vmmgf100.c | 44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 142 VMM_WO064(pd, vmm, pdei * 8, data); in gf100_vmm_pgd_pde() 194 mutex_lock(&vmm->mmu->mutex); in gf100_vmm_invalidate() 214 vmm->func->invalidate_pdb(vmm, addr); in gf100_vmm_invalidate() 224 mutex_unlock(&vmm->mmu->mutex); in gf100_vmm_invalidate() 233 gf100_vmm_invalidate(vmm, type); in gf100_vmm_flush() 268 VMM_DEBUG(vmm, "args"); in gf100_vmm_valid() 272 aper = vmm->func->aper(target); in gf100_vmm_valid() 276 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in gf100_vmm_valid() 278 VMM_DEBUG(vmm, "kind %02x", kind); in gf100_vmm_valid() [all …]
|
A D | vmmnv04.c | 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument 33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte() 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap() 80 nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, in nv04_vmm_valid() argument 88 VMM_DEBUG(vmm, "args"); in nv04_vmm_valid() 126 struct nvkm_vmm *vmm; in nv04_vmm_new() local 130 argv, argc, key, name, &vmm); in nv04_vmm_new() 131 *pvmm = vmm; in nv04_vmm_new() 135 mem = vmm->pd->pt[0]->memory; in nv04_vmm_new() [all …]
|
A D | vmmnv41.c | 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument 32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte() 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma() 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap() 81 nv41_vmm_flush(struct nvkm_vmm *vmm, int level) in nv41_vmm_flush() argument 83 struct nvkm_device *device = vmm->mmu->subdev.device; in nv41_vmm_flush() 85 mutex_lock(&vmm->mmu->mutex); in nv41_vmm_flush() [all …]
|
A D | vmmtu102.c | 27 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) in tu102_vmm_flush() argument 29 struct nvkm_device *device = vmm->mmu->subdev.device; in tu102_vmm_flush() 33 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) in tu102_vmm_flush() 36 mutex_lock(&vmm->mmu->mutex); in tu102_vmm_flush() 38 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); in tu102_vmm_flush() 48 mutex_unlock(&vmm->mmu->mutex); in tu102_vmm_flush()
|
A D | vmmgm200.c | 28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument 32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse() 53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument 57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse() 96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument 98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_() 100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_() 104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument 106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
|
A D | nv44.c | 35 struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; in nv44_mmu_init() 46 nvkm_wr32(device, 0x100818, mmu->vmm->null); in nv44_mmu_init() 61 .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv44_vmm_new, true },
|
/linux-6.3-rc2/drivers/gpu/drm/nouveau/ |
A D | nouveau_vmm.c | 32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap() 41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 54 if (vma->vmm == vmm) in nouveau_vma_find() 68 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del() 92 vma->vmm = vmm; in nouveau_vma_new() 101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new() 109 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new() 124 nvif_vmm_dtor(&vmm->vmm); in nouveau_vmm_fini() 125 vmm->cli = NULL; in nouveau_vmm_fini() 132 0, NULL, 0, &vmm->vmm); in nouveau_vmm_init() [all …]
|
A D | nouveau_svm.c | 213 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part() 246 nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, in nouveau_svmm_invalidate() 269 if (unlikely(!svmm->vmm)) in nouveau_svmm_invalidate_range_start() 311 svmm->vmm = NULL; in nouveau_svmm_fini() 334 svmm->vmm = &cli->svm; in nouveau_svmm_init() 353 cli->vmm.vmm.object.oclass, true, in nouveau_svmm_init() 387 WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, in nouveau_svm_fault_replay() 403 WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, in nouveau_svm_fault_cancel() 640 ret = nvif_object_ioctl(&svmm->vmm->vmm.object, args, size, NULL); in nouveau_atomic_range_fault() 706 ret = nvif_object_ioctl(&svmm->vmm->vmm.object, args, size, NULL); in nouveau_range_fault() [all …]
|
A D | nouveau_chan.c | 91 nouveau_svmm_part(chan->vmm->svmm, chan->inst); in nouveau_channel_del() 151 chan->vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_channel_prep() 187 ret = nouveau_vma_new(chan->push.buffer, chan->vmm, in nouveau_channel_prep() 202 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep() 232 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep() 308 args.chan.vmm = 0; in nouveau_channel_ctor() 313 args.chan.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ctor() 396 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() 414 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() 426 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() [all …]
|
A D | nouveau_mem.c | 38 struct nvif_vmm *vmm, struct nvif_vma *vma) in nouveau_mem_map() argument 46 switch (vmm->object.oclass) { in nouveau_mem_map() 75 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, 0); in nouveau_mem_map() 81 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 82 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini()
|
A D | nouveau_gem.c | 106 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_gem_object_open() local 110 if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_open() 183 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : & cli->vmm; in nouveau_gem_object_close() local 187 if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) in nouveau_gem_object_close() 194 vma = nouveau_vma_find(nvbo, vmm); in nouveau_gem_object_close() 272 struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; in nouveau_gem_info() local 282 if (vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in nouveau_gem_info() 283 vma = nouveau_vma_find(nvbo, vmm); in nouveau_gem_info() 386 if (chan->vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in validate_fini_no_ticket() 481 if (chan->vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { in validate_init() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvif/ |
A D | vmm.c | 110 kfree(vmm->page); in nvif_vmm_dtor() 111 nvif_object_dtor(&vmm->object); in nvif_vmm_dtor() 122 vmm->object.client = NULL; in nvif_vmm_ctor() 123 vmm->page = NULL; in nvif_vmm_ctor() 138 vmm->start = args->addr; in nvif_vmm_ctor() 139 vmm->limit = args->size; in nvif_vmm_ctor() 141 vmm->page_nr = args->page_nr; in nvif_vmm_ctor() 142 vmm->page = kmalloc_array(vmm->page_nr, sizeof(*vmm->page), in nvif_vmm_ctor() 144 if (!vmm->page) { in nvif_vmm_ctor() 159 vmm->page[i].vram = args.vram; in nvif_vmm_ctor() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/subdev/bar/ |
A D | gf100.c | 34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm() 63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm() 108 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar() 114 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar() 119 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar() 153 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor() 154 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor() 157 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor() 158 nvkm_vmm_unref(&bar->bar[0].vmm); in gf100_bar_dtor() 183 .bar1.vmm = gf100_bar_bar1_vmm, [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
A D | cgrp.c | 102 nvkm_vmm_put(vctx->vmm, &vctx->vma); in nvkm_cgrp_vctx_put() 106 if (vctx->vmm) { in nvkm_cgrp_vctx_put() 107 atomic_dec(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_put() 108 nvkm_vmm_unref(&vctx->vmm); in nvkm_cgrp_vctx_put() 128 vctx->ectx->engn == engn && vctx->vmm == chan->vmm); in nvkm_cgrp_vctx_get() 150 vctx->vmm = nvkm_vmm_ref(chan->vmm); in nvkm_cgrp_vctx_get() 155 if (vctx->vmm) in nvkm_cgrp_vctx_get() 156 atomic_inc(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_get() 181 nvkm_vmm_unref(&cgrp->vmm); in nvkm_cgrp_del() 219 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new() argument [all …]
|
A D | chan.c | 104 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get() 285 if (chan->vmm) { in nvkm_chan_del() 286 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_chan_del() 287 nvkm_vmm_unref(&chan->vmm); in nvkm_chan_del() 358 (!func->inst->vmm != !vmm) || in nvkm_chan_new_() 365 runl->func->runqs, runq, func->inst->vmm, vmm, in nvkm_chan_new_() 401 if (cgrp->runl != runl || cgrp->vmm != vmm) { in nvkm_chan_new_() 402 RUNL_DEBUG(runl, "cgrp %d %d", cgrp->runl != runl, cgrp->vmm != vmm); in nvkm_chan_new_() 418 if (func->inst->vmm) { in nvkm_chan_new_() 419 if (WARN_ON(vmm->mmu != device->mmu)) in nvkm_chan_new_() [all …]
|
A D | ucgrp.c | 86 struct nvkm_vmm *vmm; in nvkm_ucgrp_new() local 102 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_ucgrp_new() 103 if (IS_ERR(vmm)) in nvkm_ucgrp_new() 104 return PTR_ERR(vmm); in nvkm_ucgrp_new() 115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new() 123 nvkm_vmm_unref(&vmm); in nvkm_ucgrp_new()
|
A D | uchan.c | 326 struct nvkm_vmm *vmm = NULL; in nvkm_uchan_new() local 345 if (args->v0.vmm) { in nvkm_uchan_new() 346 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_uchan_new() 347 if (IS_ERR(vmm)) in nvkm_uchan_new() 348 return PTR_ERR(vmm); in nvkm_uchan_new() 378 args->v0.priv != 0, args->v0.devm, vmm, ctxdma, args->v0.offset, in nvkm_uchan_new() 407 nvkm_vmm_unref(&vmm); in nvkm_uchan_new()
|
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
A D | nv50.c | 137 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap() 158 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap() 162 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap() 167 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap() 178 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap() 220 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local 237 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire() 239 nv50_instobj_kmap(iobj, vmm); in nv50_instobj_acquire() 274 nv50_instobj_kmap(iobj, vmm); in nv50_instobj_boot() 328 if (likely(vmm)) /* Can be NULL during BAR destructor. */ in nv50_instobj_dtor() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/ |
A D | drm_gem_vram_helper.c | 193 struct drm_vram_mm *vmm = dev->vram_mm; in drm_gem_vram_create() local 197 if (WARN_ONCE(!vmm, "VRAM MM not initialized")) in drm_gem_vram_create() 221 bdev = &vmm->bdev; in drm_gem_vram_create() 961 struct drm_vram_mm *vmm = entry->dev->vram_mm; in drm_vram_mm_debugfs() local 991 vmm->vram_base = vram_base; in drm_vram_mm_init() 992 vmm->vram_size = vram_size; in drm_vram_mm_init() 1001 ret = ttm_range_man_init(&vmm->bdev, TTM_PL_VRAM, in drm_vram_mm_init() 1011 ttm_range_man_fini(&vmm->bdev, TTM_PL_VRAM); in drm_vram_mm_cleanup() 1012 ttm_device_fini(&vmm->bdev); in drm_vram_mm_cleanup() 1097 struct drm_vram_mm *vmm = dev->vram_mm; in drm_vram_helper_mode_valid_internal() local [all …]
|
/linux-6.3-rc2/arch/xtensa/kernel/ |
A D | syscall.c | 60 struct vm_area_struct *vmm; in arch_get_unmapped_area() local 84 for_each_vma(vmi, vmm) { in arch_get_unmapped_area() 86 if (addr + len <= vm_start_gap(vmm)) in arch_get_unmapped_area() 89 addr = vmm->vm_end; in arch_get_unmapped_area()
|