Home
last modified time | relevance | path

Searched refs:vmm (Results 1 – 25 of 98) sorted by relevance

1234

/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
A Dvmm.c145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local
203 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_sptes() local
302 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_ref_sptes() local
419 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_ref_hwpt() local
513 it.vmm = vmm; in nvkm_vmm_iter()
1066 vmm->nullp, vmm->null); in nvkm_vmm_dtor()
1190 if (vmm->start > vmm->limit || vmm->limit > (1ULL << bits)) in nvkm_vmm_ctor()
1432 mutex_lock(&vmm->mutex.vmm); in nvkm_vmm_unmap()
1584 mutex_lock(&vmm->mutex.vmm); in nvkm_vmm_map()
1846 mutex_lock(&vmm->mutex.vmm); in nvkm_vmm_get()
[all …]
A Duvmm.c51 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnclr() local
65 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr()
67 mutex_unlock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr()
98 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnmap()
100 mutex_unlock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnmap()
125 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_unmap()
146 mutex_unlock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_unmap()
180 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_map()
222 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_map()
247 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_put()
[all …]
A Dvmmnv44.c66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte()
147 nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv44_vmm_pgt_unmap() argument
213 struct nvkm_vmm *vmm; in nv44_vmm_new() local
217 argv, argc, key, name, &vmm); in nv44_vmm_new()
218 *pvmm = vmm; in nv44_vmm_new()
223 &vmm->null, GFP_KERNEL); in nv44_vmm_new()
224 if (!vmm->nullp) { in nv44_vmm_new()
[all …]
A Dr535.c30 r535_mmu_promote_vmm(struct nvkm_vmm *vmm) in r535_mmu_promote_vmm() argument
36 &vmm->rm.client, &vmm->rm.device); in r535_mmu_promote_vmm()
41 sizeof(*args), &vmm->rm.object); in r535_mmu_promote_vmm()
54 mutex_lock(&vmm->mutex.vmm); in r535_mmu_promote_vmm()
56 &vmm->rm.rsvd); in r535_mmu_promote_vmm()
57 mutex_unlock(&vmm->mutex.vmm); in r535_mmu_promote_vmm()
61 ctrl = nvkm_gsp_rm_ctrl_get(&vmm->rm.object, in r535_mmu_promote_vmm()
68 ctrl->virtAddrLo = vmm->rm.rsvd->addr; in r535_mmu_promote_vmm()
69 ctrl->virtAddrHi = vmm->rm.rsvd->addr + vmm->rm.rsvd->size - 1; in r535_mmu_promote_vmm()
79 if (vmm->pd->pde[0]->pde[0]) { in r535_mmu_promote_vmm()
[all …]
A Dvmmnv50.c148 u32 pdeo = vmm->mmu->func->vmm.pd_offset + (pdei * 8); in nv50_vmm_pgd_pde()
187 mutex_lock(&vmm->mmu->mutex); in nv50_vmm_flush()
189 if (!atomic_read(&vmm->engref[i])) in nv50_vmm_flush()
222 mutex_unlock(&vmm->mmu->mutex); in nv50_vmm_flush()
256 VMM_DEBUG(vmm, "args"); in nv50_vmm_valid()
280 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in nv50_vmm_valid()
282 VMM_DEBUG(vmm, "kind %02x", kind); in nv50_vmm_valid()
303 VMM_DEBUG(vmm, "comp %d", ret); in nv50_vmm_valid()
343 const u32 pd_offset = vmm->mmu->func->vmm.pd_offset; in nv50_vmm_join()
355 for (pdei = vmm->start >> 29; pdei <= (vmm->limit - 1) >> 29; pdei++) { in nv50_vmm_join()
[all …]
A Dvmmgp100.c34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, in gp100_vmm_pfn_unmap() argument
109 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pfn()
373 VMM_WO064(pd, vmm, pdei * 8, data); in gp100_vmm_pd1_pde()
435 VMM_DEBUG(vmm, "args"); in gp100_vmm_valid()
439 aper = vmm->func->aper(target); in gp100_vmm_valid()
443 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in gp100_vmm_valid()
445 VMM_DEBUG(vmm, "kind %02x", kind); in gp100_vmm_valid()
461 VMM_DEBUG(vmm, "comp %d", ret); in gp100_vmm_valid()
536 gp100_vmm_mthd(struct nvkm_vmm *vmm, in gp100_vmm_mthd() argument
565 gf100_vmm_invalidate(vmm, type); in gp100_vmm_flush()
[all …]
A Dvmmgf100.c44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
142 VMM_WO064(pd, vmm, pdei * 8, data); in gf100_vmm_pgd_pde()
194 mutex_lock(&vmm->mmu->mutex); in gf100_vmm_invalidate()
214 vmm->func->invalidate_pdb(vmm, addr); in gf100_vmm_invalidate()
224 mutex_unlock(&vmm->mmu->mutex); in gf100_vmm_invalidate()
233 gf100_vmm_invalidate(vmm, type); in gf100_vmm_flush()
268 VMM_DEBUG(vmm, "args"); in gf100_vmm_valid()
272 aper = vmm->func->aper(target); in gf100_vmm_valid()
276 kindm = vmm->mmu->func->kind(vmm->mmu, &kindn, &kind_inv); in gf100_vmm_valid()
278 VMM_DEBUG(vmm, "kind %02x", kind); in gf100_vmm_valid()
[all …]
A Dvmmnv04.c28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument
33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte()
60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
80 nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, in nv04_vmm_valid() argument
88 VMM_DEBUG(vmm, "args"); in nv04_vmm_valid()
126 struct nvkm_vmm *vmm; in nv04_vmm_new() local
130 argv, argc, key, name, &vmm); in nv04_vmm_new()
131 *pvmm = vmm; in nv04_vmm_new()
135 mem = vmm->pd->pt[0]->memory; in nv04_vmm_new()
[all …]
A Dvmmnv41.c27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument
32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte()
38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma()
61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
81 nv41_vmm_flush(struct nvkm_vmm *vmm, int level) in nv41_vmm_flush() argument
83 struct nvkm_device *device = vmm->mmu->subdev.device; in nv41_vmm_flush()
85 mutex_lock(&vmm->mmu->mutex); in nv41_vmm_flush()
[all …]
A Dvmmtu102.c27 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) in tu102_vmm_flush() argument
29 struct nvkm_device *device = vmm->mmu->subdev.device; in tu102_vmm_flush()
33 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) in tu102_vmm_flush()
36 mutex_lock(&vmm->mmu->mutex); in tu102_vmm_flush()
38 if (!vmm->rm.bar2_pdb) in tu102_vmm_flush()
39 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); in tu102_vmm_flush()
41 nvkm_wr32(device, 0xb830a0, vmm->rm.bar2_pdb >> 8); in tu102_vmm_flush()
50 mutex_unlock(&vmm->mmu->mutex); in tu102_vmm_flush()
A Dvmmgm200.c28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument
57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse()
96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument
98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_()
100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_()
104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument
106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
/linux/drivers/gpu/drm/nouveau/
A Dnouveau_vmm.c32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
54 if (vma->vmm == vmm) in nouveau_vma_find()
68 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del()
92 vma->vmm = vmm; in nouveau_vma_new()
101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new()
109 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new()
127 nvif_vmm_dtor(&vmm->vmm); in nouveau_vmm_fini()
128 vmm->cli = NULL; in nouveau_vmm_fini()
135 PAGE_SIZE, 0, NULL, 0, &vmm->vmm); in nouveau_vmm_init()
[all …]
A Dnouveau_svm.c210 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part()
243 nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, in nouveau_svmm_invalidate()
266 if (unlikely(!svmm->vmm)) in nouveau_svmm_invalidate_range_start()
308 svmm->vmm = NULL; in nouveau_svmm_fini()
331 svmm->vmm = &cli->svm; in nouveau_svmm_init()
350 cli->vmm.vmm.object.oclass, MANAGED, in nouveau_svmm_init()
384 WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, in nouveau_svm_fault_replay()
400 WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, in nouveau_svm_fault_cancel()
637 ret = nvif_object_ioctl(&svmm->vmm->vmm.object, args, size, NULL); in nouveau_atomic_range_fault()
703 ret = nvif_object_ioctl(&svmm->vmm->vmm.object, args, size, NULL); in nouveau_range_fault()
[all …]
A Dnouveau_chan.c97 nouveau_svmm_part(chan->vmm->svmm, chan->inst); in nouveau_channel_del()
158 chan->vmm = nouveau_cli_vmm(cli); in nouveau_channel_prep()
193 ret = nouveau_vma_new(chan->push.buffer, chan->vmm, in nouveau_channel_prep()
208 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
236 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
309 args.chan.vmm = 0; in nouveau_channel_ctor()
314 args.chan.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ctor()
398 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
416 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
427 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
[all …]
/linux/drivers/gpu/drm/nouveau/nvif/
A Dvmm.c194 kfree(vmm->page); in nvif_vmm_dtor()
195 nvif_object_dtor(&vmm->object); in nvif_vmm_dtor()
201 struct nvif_vmm *vmm) in nvif_vmm_ctor() argument
207 vmm->object.client = NULL; in nvif_vmm_ctor()
208 vmm->page = NULL; in nvif_vmm_ctor()
232 vmm->start = args->addr; in nvif_vmm_ctor()
233 vmm->limit = args->size; in nvif_vmm_ctor()
235 vmm->page_nr = args->page_nr; in nvif_vmm_ctor()
236 vmm->page = kmalloc_array(vmm->page_nr, sizeof(*vmm->page), in nvif_vmm_ctor()
238 if (!vmm->page) { in nvif_vmm_ctor()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
A Dr535.c77 struct nvkm_vmm *vmm = gf100_bar(bar)->bar[0].vmm; in r535_bar_bar2_init() local
80 WARN_ON(r535_bar_bar2_update_pde(gsp, vmm->pd->pde[0]->pt[0]->addr)); in r535_bar_bar2_init()
81 vmm->rm.bar2_pdb = gsp->bar.rm_bar2_pdb; in r535_bar_bar2_init()
115 struct nvkm_vmm *vmm = gf100_bar(bar)->bar[1].vmm; in r535_bar_bar1_init() local
123 nvkm_memory_unref(&vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
125 ret = nvkm_memory_kmap(pd3, &vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
130 vmm->pd->pt[0]->addr = nvkm_memory_addr(vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
163 rm->bar1.vmm = hw->bar1.vmm; in r535_bar_new_()
167 rm->bar2.vmm = hw->bar2.vmm; in r535_bar_new_()
A Dgf100.c34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm()
63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm()
108 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar()
114 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar()
119 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar()
153 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor()
154 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor()
157 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor()
158 nvkm_vmm_unref(&bar->bar[0].vmm); in gf100_bar_dtor()
183 .bar1.vmm = gf100_bar_bar1_vmm,
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
A Dcgrp.c102 nvkm_vmm_put(vctx->vmm, &vctx->vma); in nvkm_cgrp_vctx_put()
106 if (vctx->vmm) { in nvkm_cgrp_vctx_put()
107 atomic_dec(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_put()
108 nvkm_vmm_unref(&vctx->vmm); in nvkm_cgrp_vctx_put()
128 vctx->ectx->engn == engn && vctx->vmm == chan->vmm); in nvkm_cgrp_vctx_get()
150 vctx->vmm = nvkm_vmm_ref(chan->vmm); in nvkm_cgrp_vctx_get()
155 if (vctx->vmm) in nvkm_cgrp_vctx_get()
156 atomic_inc(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_get()
184 nvkm_vmm_unref(&cgrp->vmm); in nvkm_cgrp_del()
222 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new() argument
[all …]
A Dchan.c103 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get()
288 if (chan->vmm) { in nvkm_chan_del()
289 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_chan_del()
290 nvkm_vmm_unref(&chan->vmm); in nvkm_chan_del()
361 (!func->inst->vmm != !vmm) || in nvkm_chan_new_()
368 runl->func->runqs, runq, func->inst->vmm, vmm, in nvkm_chan_new_()
404 if (cgrp->runl != runl || cgrp->vmm != vmm) { in nvkm_chan_new_()
405 RUNL_DEBUG(runl, "cgrp %d %d", cgrp->runl != runl, cgrp->vmm != vmm); in nvkm_chan_new_()
421 if (func->inst->vmm) { in nvkm_chan_new_()
422 if (WARN_ON(vmm->mmu != device->mmu)) in nvkm_chan_new_()
[all …]
A Ducgrp.c86 struct nvkm_vmm *vmm; in nvkm_ucgrp_new() local
102 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_ucgrp_new()
103 if (IS_ERR(vmm)) in nvkm_ucgrp_new()
104 return PTR_ERR(vmm); in nvkm_ucgrp_new()
115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new()
123 nvkm_vmm_unref(&vmm); in nvkm_ucgrp_new()
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/
A Dr535.c65 struct nvkm_vmm *vmm; member
115 nvkm_vmm_put(grc->vmm, &grc->vma[i]); in r535_gr_chan_dtor()
119 nvkm_vmm_unref(&grc->vmm); in r535_gr_chan_dtor()
180 mutex_lock(&vmm->mutex.vmm); in r535_gr_promote_ctx()
183 mutex_unlock(&vmm->mutex.vmm); in r535_gr_promote_ctx()
224 grc->vmm = nvkm_vmm_ref(chan->vmm); in r535_gr_chan_new()
254 struct nvkm_vmm *vmm; in r535_gr_oneinit() member
269 ret = mmu->func->promote_vmm(golden.vmm); in r535_gr_oneinit()
307 args->hVASpace = golden.vmm->rm.object.handle; in r535_gr_oneinit()
456 nvkm_vmm_put(golden.vmm, &golden.vma[i]); in r535_gr_oneinit()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
A Dnv50.c138 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap()
159 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap()
163 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap()
168 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap()
179 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap()
221 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local
241 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire()
243 nv50_instobj_kmap(iobj, vmm); in nv50_instobj_acquire()
280 nv50_instobj_kmap(iobj, vmm); in nv50_instobj_boot()
334 if (likely(vmm)) /* Can be NULL during BAR destructor. */ in nv50_instobj_dtor()
[all …]
/linux/drivers/gpu/drm/nouveau/include/nvif/
A Dvmm.h50 int nvif_vmm_raw_get(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift);
51 int nvif_vmm_raw_put(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift);
52 int nvif_vmm_raw_map(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift,
54 int nvif_vmm_raw_unmap(struct nvif_vmm *vmm, u64 addr, u64 size,
56 int nvif_vmm_raw_sparse(struct nvif_vmm *vmm, u64 addr, u64 size, bool ref);
/linux/drivers/gpu/drm/
A Ddrm_gem_vram_helper.c191 struct drm_vram_mm *vmm = dev->vram_mm; in drm_gem_vram_create() local
195 if (WARN_ONCE(!vmm, "VRAM MM not initialized")) in drm_gem_vram_create()
219 bdev = &vmm->bdev; in drm_gem_vram_create()
945 struct drm_vram_mm *vmm = entry->dev->vram_mm; in drm_vram_mm_debugfs() local
975 vmm->vram_base = vram_base; in drm_vram_mm_init()
976 vmm->vram_size = vram_size; in drm_vram_mm_init()
985 ret = ttm_range_man_init(&vmm->bdev, TTM_PL_VRAM, in drm_vram_mm_init()
995 ttm_range_man_fini(&vmm->bdev, TTM_PL_VRAM); in drm_vram_mm_cleanup()
996 ttm_device_fini(&vmm->bdev); in drm_vram_mm_cleanup()
1081 struct drm_vram_mm *vmm = dev->vram_mm; in drm_vram_helper_mode_valid_internal() local
[all …]
/linux/arch/xtensa/kernel/
A Dsyscall.c61 struct vm_area_struct *vmm; in arch_get_unmapped_area() local
85 for_each_vma(vmi, vmm) { in arch_get_unmapped_area()
87 if (addr + len <= vm_start_gap(vmm)) in arch_get_unmapped_area()
90 addr = vmm->vm_end; in arch_get_unmapped_area()

Completed in 897 milliseconds

1234