Home
last modified time | relevance | path

Searched refs:heap (Results 1 – 25 of 44) sorted by relevance

12

/drivers/gpu/drm/panthor/
A Dpanthor_heap.c130 heap->chunk_count--; in panthor_free_heap_chunk()
185 heap->chunk_count++; in panthor_alloc_heap_chunk()
232 if (!heap) in panthor_heap_destroy_locked()
237 kfree(heap); in panthor_heap_destroy_locked()
305 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in panthor_heap_create()
306 if (!heap) { in panthor_heap_create()
352 kfree(heap); in panthor_heap_create()
384 if (!heap) { in panthor_heap_return_chunk()
396 heap->chunk_count--; in panthor_heap_return_chunk()
448 if (!heap) { in panthor_heap_grow()
[all …]
/drivers/dma-buf/
A Ddma-heap.c66 dmabuf = heap->ops->allocate(heap, len, fd_flags, heap_flags); in dma_heap_buffer_alloc()
83 if (!heap) { in dma_heap_open()
203 return heap->priv; in dma_heap_get_drvdata()
215 return heap->name; in dma_heap_get_name()
239 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in dma_heap_add()
240 if (!heap) in dma_heap_add()
260 ret = cdev_add(&heap->heap_cdev, heap->heap_devt, 1); in dma_heap_add()
269 heap->heap_devt, in dma_heap_add()
271 heap->name); in dma_heap_add()
294 return heap; in dma_heap_add()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
A Dbase.c258 mmu->type[mmu->type_nr].type = type | mmu->heap[heap].type; in nvkm_mmu_type()
259 mmu->type[mmu->type_nr].heap = heap; in nvkm_mmu_type()
269 mmu->heap[mmu->heap_nr].type = type; in nvkm_mmu_heap()
270 mmu->heap[mmu->heap_nr].size = size; in nvkm_mmu_heap()
282 int heap; in nvkm_mmu_host() local
285 heap = nvkm_mmu_heap(mmu, NVKM_MEM_HOST, ~0ULL); in nvkm_mmu_host()
286 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
298 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
323 u8 heap = NVKM_MEM_VRAM; in nvkm_mmu_vram() local
329 heap |= NVKM_MEM_COMP; in nvkm_mmu_vram()
[all …]
A Dummu.c69 args->v0.size = mmu->heap[index].size; in nvkm_ummu_heap()
90 args->v0.heap = mmu->type[index].heap; in nvkm_ummu_type()
/drivers/gpu/drm/nouveau/nvkm/core/
A Dmm.c99 b->heap = a->heap; in region_head()
111 nvkm_mm_head(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_head() argument
122 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_head()
123 if (this->heap != heap) in nvkm_mm_head()
175 b->heap = a->heap; in region_tail()
186 nvkm_mm_tail(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_tail() argument
198 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_tail()
199 if (this->heap != heap) in nvkm_mm_tail()
240 nvkm_mm_init(struct nvkm_mm *mm, u8 heap, u32 offset, u32 length, u32 block) in nvkm_mm_init() argument
277 node->heap = heap; in nvkm_mm_init()
A Dgpuobj.c180 ret = nvkm_mm_head(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
183 ret = nvkm_mm_tail(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
211 return nvkm_mm_init(&gpuobj->heap, 0, 0, gpuobj->size, 1); in nvkm_gpuobj_ctor()
220 nvkm_mm_free(&gpuobj->parent->heap, &gpuobj->node); in nvkm_gpuobj_del()
221 nvkm_mm_fini(&gpuobj->heap); in nvkm_gpuobj_del()
/drivers/gpu/drm/nouveau/include/nvkm/core/
A Dmm.h12 u8 heap; member
34 int nvkm_mm_init(struct nvkm_mm *, u8 heap, u32 offset, u32 length, u32 block);
36 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
38 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
44 nvkm_mm_heap_size(struct nvkm_mm *mm, u8 heap) in nvkm_mm_heap_size() argument
49 if (node->heap == heap) in nvkm_mm_heap_size()
/drivers/gpu/drm/nouveau/nvif/
A Dmmu.c35 kfree(mmu->heap); in nvif_mmu_dtor()
53 mmu->heap = NULL; in nvif_mmu_ctor()
72 mmu->heap = kmalloc_array(mmu->heap_nr, sizeof(*mmu->heap), in nvif_mmu_ctor()
76 if (ret = -ENOMEM, !mmu->heap || !mmu->type) in nvif_mmu_ctor()
92 mmu->heap[i].size = args.size; in nvif_mmu_ctor()
112 mmu->type[i].heap = args.heap; in nvif_mmu_ctor()
/drivers/md/bcache/
A Dmovinggc.c194 return (b = heap_peek(&ca->heap)) ? GC_SECTORS_USED(b) : 0; in bucket_heap_top()
212 ca->heap.used = 0; in bch_moving_gc()
221 if (!heap_full(&ca->heap)) { in bch_moving_gc()
223 heap_add(&ca->heap, b, bucket_cmp); in bch_moving_gc()
224 } else if (bucket_cmp(b, heap_peek(&ca->heap))) { in bch_moving_gc()
228 ca->heap.data[0] = b; in bch_moving_gc()
229 heap_sift(&ca->heap, 0, bucket_cmp); in bch_moving_gc()
234 heap_pop(&ca->heap, b, bucket_cmp); in bch_moving_gc()
238 while (heap_pop(&ca->heap, b, bucket_cmp)) in bch_moving_gc()
A Dutil.h39 #define init_heap(heap, _size, gfp) \ argument
42 (heap)->used = 0; \
43 (heap)->size = (_size); \
44 _bytes = (heap)->size * sizeof(*(heap)->data); \
45 (heap)->data = kvmalloc(_bytes, (gfp) & GFP_KERNEL); \
46 (heap)->data; \
49 #define free_heap(heap) \ argument
51 kvfree((heap)->data); \
52 (heap)->data = NULL; \
A Dalloc.c182 ca->heap.used = 0; in invalidate_buckets_lru()
188 if (!heap_full(&ca->heap)) in invalidate_buckets_lru()
189 heap_add(&ca->heap, b, bucket_max_cmp); in invalidate_buckets_lru()
190 else if (bucket_max_cmp(b, heap_peek(&ca->heap))) { in invalidate_buckets_lru()
191 ca->heap.data[0] = b; in invalidate_buckets_lru()
192 heap_sift(&ca->heap, 0, bucket_max_cmp); in invalidate_buckets_lru()
196 for (i = ca->heap.used / 2 - 1; i >= 0; --i) in invalidate_buckets_lru()
197 heap_sift(&ca->heap, i, bucket_min_cmp); in invalidate_buckets_lru()
200 if (!heap_pop(&ca->heap, b, bucket_min_cmp)) { in invalidate_buckets_lru()
/drivers/dma-buf/heaps/
A DKconfig5 Choose this option to enable the system dmabuf heap. The system heap
12 Choose this option to enable dma-buf CMA heap. This heap is backed
21 Add a duplicate CMA-backed dma-buf heap with legacy naming derived
A Dcma_heap.c31 struct dma_heap *heap; member
36 struct cma_heap *heap; member
251 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
279 static struct dma_buf *cma_heap_allocate(struct dma_heap *heap, in cma_heap_allocate() argument
284 struct cma_heap *cma_heap = dma_heap_get_drvdata(heap); in cma_heap_allocate()
343 buffer->heap = cma_heap; in cma_heap_allocate()
347 exp_info.exp_name = dma_heap_get_name(heap); in cma_heap_allocate()
387 cma_heap->heap = dma_heap_add(&exp_info); in __add_cma_heap()
388 if (IS_ERR(cma_heap->heap)) { in __add_cma_heap()
389 int ret = PTR_ERR(cma_heap->heap); in __add_cma_heap()
A Dsystem_heap.c25 struct dma_heap *heap; member
323 static struct dma_buf *system_heap_allocate(struct dma_heap *heap, in system_heap_allocate() argument
345 buffer->heap = heap; in system_heap_allocate()
382 exp_info.exp_name = dma_heap_get_name(heap); in system_heap_allocate()
/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/
A Dtu102.c227 meta->gspFwRsvdStart = gsp->fb.heap.addr; in tu102_gsp_wpr_meta_init()
228 meta->nonWprHeapOffset = gsp->fb.heap.addr; in tu102_gsp_wpr_meta_init()
229 meta->nonWprHeapSize = gsp->fb.heap.size; in tu102_gsp_wpr_meta_init()
231 meta->gspFwHeapOffset = gsp->fb.wpr2.heap.addr; in tu102_gsp_wpr_meta_init()
232 meta->gspFwHeapSize = gsp->fb.wpr2.heap.size; in tu102_gsp_wpr_meta_init()
319 gsp->fb.wpr2.heap.size = tu102_gsp_wpr_heap_size(gsp); in tu102_gsp_oneinit()
321 gsp->fb.wpr2.heap.addr = ALIGN_DOWN(gsp->fb.wpr2.elf.addr - gsp->fb.wpr2.heap.size, 0x100000); in tu102_gsp_oneinit()
322 gsp->fb.wpr2.heap.size = ALIGN_DOWN(gsp->fb.wpr2.elf.addr - gsp->fb.wpr2.heap.addr, 0x100000); in tu102_gsp_oneinit()
324 gsp->fb.wpr2.addr = ALIGN_DOWN(gsp->fb.wpr2.heap.addr - sizeof(GspFwWprMeta), 0x100000); in tu102_gsp_oneinit()
327 gsp->fb.heap.size = 0x100000; in tu102_gsp_oneinit()
[all …]
A Dgh100.c101 rsvd_size = gsp->fb.heap.size; in gh100_gsp_init()
142 gsp->fb.heap.size = gsp->rm->wpr->heap_size_non_wpr; in gh100_gsp_wpr_meta_init()
161 meta->nonWprHeapSize = gsp->fb.heap.size; in gh100_gsp_wpr_meta_init()
/drivers/accel/amdxdna/
A Daie2_ctx.c59 struct amdxdna_gem_obj *heap = hwctx->priv->heap; in aie2_hwctx_restart() local
69 heap->mem.userptr, heap->mem.size); in aie2_hwctx_restart()
532 struct amdxdna_gem_obj *heap; in aie2_hwctx_init() local
542 heap = client->dev_heap; in aie2_hwctx_init()
543 if (!heap) { in aie2_hwctx_init()
549 drm_gem_object_get(to_gobj(heap)); in aie2_hwctx_init()
551 priv->heap = heap; in aie2_hwctx_init()
554 ret = amdxdna_gem_pin(heap); in aie2_hwctx_init()
613 heap->mem.userptr, heap->mem.size); in aie2_hwctx_init()
648 amdxdna_gem_unpin(heap); in aie2_hwctx_init()
[all …]
A Damdxdna_gem.c32 struct amdxdna_gem_obj *heap; in amdxdna_gem_heap_alloc() local
39 heap = client->dev_heap; in amdxdna_gem_heap_alloc()
40 if (!heap) { in amdxdna_gem_heap_alloc()
53 mem->size, heap->mem.size); in amdxdna_gem_heap_alloc()
68 offset = mem->dev_addr - heap->mem.dev_addr; in amdxdna_gem_heap_alloc()
69 mem->userptr = heap->mem.userptr + offset; in amdxdna_gem_heap_alloc()
70 mem->kva = heap->mem.kva + offset; in amdxdna_gem_heap_alloc()
72 drm_gem_object_get(to_gobj(heap)); in amdxdna_gem_heap_alloc()
90 struct amdxdna_gem_obj *heap; in amdxdna_gem_heap_free() local
96 heap = abo->client->dev_heap; in amdxdna_gem_heap_free()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
A Dnv04.c32 struct nvkm_mm heap; member
104 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor()
137 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv04_instobj_new()
210 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv04_instmem_oneinit()
248 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor()
A Dnv40.c32 struct nvkm_mm heap; member
103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor()
136 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv40_instobj_new()
179 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv40_instmem_oneinit()
221 nvkm_mm_fini(&imem->heap); in nv40_instmem_dtor()
/drivers/gpu/drm/nouveau/include/nvif/
A Dmmu.h16 } *heap; member
28 u8 heap; member
/drivers/gpu/drm/lima/
A Dlima_gp.c148 task->heap = bo; in lima_gp_task_run()
253 if (fail_size == task->heap->heap_size) { in lima_gp_task_recover()
256 ret = lima_heap_alloc(task->heap, task->vm); in lima_gp_task_recover()
266 f[LIMA_GP_PLBU_ALLOC_START_ADDR >> 2] + task->heap->heap_size; in lima_gp_task_recover()
/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
A Dram.c148 nvkm_ram_get(struct nvkm_device *device, u8 heap, u8 type, u8 rpage, u64 size, in nvkm_ram_get() argument
177 ret = nvkm_mm_tail(mm, heap, type, max, min, align, &r); in nvkm_ram_get()
179 ret = nvkm_mm_head(mm, heap, type, max, min, align, &r); in nvkm_ram_get()
/drivers/gpu/drm/nouveau/include/nvkm/subdev/
A Dmmu.h130 } heap[4]; member
139 u8 heap; member
/drivers/gpu/drm/nouveau/
A Dnouveau_abi16.c166 nvkm_mm_free(&chan->heap, &ntfy->node); in nouveau_abi16_ntfy_fini()
198 if (chan->heap.block_size) in nouveau_abi16_chan_fini()
199 nvkm_mm_fini(&chan->heap); in nouveau_abi16_chan_fini()
489 ret = nvkm_mm_init(&chan->heap, 0, 0, PAGE_SIZE, 1); in nouveau_abi16_ioctl_channel_alloc()
644 ret = nvkm_mm_head(&chan->heap, 0, 1, info->size, info->size, 1, in nouveau_abi16_ioctl_notifierobj_alloc()

Completed in 664 milliseconds

12