Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 359) sorted by relevance

12345678910>>...15

/drivers/gpu/drm/
A Ddrm_buddy.c153 mm->clear_avail -= drm_buddy_block_size(mm, buddy); in __drm_buddy_free()
261 mm->free_list = kmalloc_array(mm->max_order + 1, in drm_buddy_init()
272 mm->roots = kmalloc_array(mm->n_roots, in drm_buddy_init()
313 drm_block_free(mm, mm->roots[i]); in drm_buddy_init()
344 drm_block_free(mm, mm->roots[i]); in drm_buddy_fini()
350 WARN_ON(mm->avail != mm->size); in drm_buddy_fini()
461 mm->avail += drm_buddy_block_size(mm, block); in drm_buddy_free_block()
968 mm->avail += drm_buddy_block_size(mm, block); in drm_buddy_block_trim()
980 mm->avail -= drm_buddy_block_size(mm, block); in drm_buddy_block_trim()
1124 mm->avail -= drm_buddy_block_size(mm, block); in drm_buddy_alloc_blocks()
[all …]
A Ddrm_mm.c167 struct drm_mm *mm = hole_node->mm; in drm_mm_interval_tree_add_node() local
268 struct drm_mm *mm = node->mm; in add_hole() local
475 node->mm = mm; in drm_mm_reserve_node()
590 node->mm = mm; in drm_mm_insert_node_in_range()
629 struct drm_mm *mm = node->mm; in drm_mm_remove_node() local
714 scan->mm = mm; in drm_mm_scan_init_with_range()
748 struct drm_mm *mm = scan->mm; in drm_mm_scan_add_block() local
754 DRM_MM_BUG_ON(node->mm != mm); in drm_mm_scan_add_block()
841 DRM_MM_BUG_ON(node->mm != scan->mm); in drm_mm_scan_remove_block()
879 struct drm_mm *mm = scan->mm; in drm_mm_scan_color_evict() local
[all …]
A Ddrm_gpusvm.c448 gpusvm->mm = mm; in drm_gpusvm_init()
457 mmgrab(mm); in drm_gpusvm_init()
888 struct mm_struct *mm = gpusvm->mm; in drm_gpusvm_find_vma_start() local
902 mmput(mm); in drm_gpusvm_find_vma_start()
930 struct mm_struct *mm = gpusvm->mm; in drm_gpusvm_range_find_or_insert() local
1009 mmput(mm); in drm_gpusvm_range_find_or_insert()
1021 mmput(mm); in drm_gpusvm_range_find_or_insert()
1256 struct mm_struct *mm = gpusvm->mm; in drm_gpusvm_range_get_pages() local
1301 mmput(mm); in drm_gpusvm_range_get_pages()
1496 struct mm_struct *mm = gpusvm->mm; in drm_gpusvm_range_evict() local
[all …]
/drivers/gpu/drm/tests/
A Ddrm_buddy_test.c31 struct drm_buddy mm; in drm_test_buddy_alloc_range_bias() local
166 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
226 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
258 drm_buddy_fini(&mm); in drm_test_buddy_alloc_range_bias()
268 struct drm_buddy mm; in drm_test_buddy_alloc_clear() local
333 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
384 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
410 drm_buddy_fini(&mm); in drm_test_buddy_alloc_clear()
735 size = mm.chunk_size << mm.max_order; in drm_test_buddy_alloc_limit()
747 BIT_ULL(mm.max_order) * mm.chunk_size, in drm_test_buddy_alloc_limit()
[all …]
A Ddrm_mm_test.c108 if (!drm_mm_node_allocated(node) || node->mm != mm) { in assert_node()
138 struct drm_mm mm; in drm_test_mm_init() local
142 memset(&mm, 0, sizeof(mm)); in drm_test_mm_init()
146 memset(&mm, 0xff, sizeof(mm)); in drm_test_mm_init()
186 drm_mm_takedown(&mm); in drm_test_mm_init()
192 struct drm_mm mm; in drm_test_mm_debug() local
213 drm_mm_print(&mm, &p); in drm_test_mm_debug()
243 struct drm_mm mm; in drm_test_mm_align_pot() local
275 drm_mm_takedown(&mm); in drm_test_mm_align_pot()
290 struct drm_mm mm; in drm_test_mm_once() local
[all …]
/drivers/gpu/drm/i915/gem/
A Di915_gem_shrinker.c112 { &i915->mm.purge_list, ~0u }, in i915_gem_shrink()
114 &i915->mm.shrink_list, in i915_gem_shrink()
197 mm.link))) { in i915_gem_shrink()
307 i915->mm.shrinker->batch = in i915_gem_shrinker_count()
368 list_for_each_entry(obj, &i915->mm.shrink_list, mm.link) { in i915_gem_shrinker_oom()
431 if (!i915->mm.shrinker) { in i915_gem_driver_register__shrinker()
436 i915->mm.shrinker->batch = 4096; in i915_gem_driver_register__shrinker()
456 shrinker_free(i915->mm.shrinker); in i915_gem_driver_unregister__shrinker()
500 list_del_init(&obj->mm.link); in i915_gem_object_make_unshrinkable()
501 i915->mm.shrink_count--; in i915_gem_object_make_unshrinkable()
[all …]
A Di915_gem_pages.c44 obj->mm.get_page.sg_idx = 0; in __i915_gem_object_set_pages()
48 obj->mm.pages = pages; in __i915_gem_object_set_pages()
61 obj->mm.page_sizes.sg = 0; in __i915_gem_object_set_pages()
86 i915->mm.shrink_count++; in __i915_gem_object_set_pages()
203 if (!obj->mm.tlb[id]) in flush_tlb_invalidate()
207 obj->mm.tlb[id] = 0; in flush_tlb_invalidate()
228 if (obj->mm.mapping) { in __i915_gem_object_unset_pages()
230 obj->mm.mapping = NULL; in __i915_gem_object_unset_pages()
234 obj->mm.page_sizes.phys = obj->mm.page_sizes.sg = 0; in __i915_gem_object_unset_pages()
625 obj->mm.dirty = true; in __i915_gem_object_flush_map()
[all …]
A Di915_gem_object.c108 INIT_LIST_HEAD(&obj->mm.link); in i915_gem_object_init()
126 obj->mm.madv = I915_MADV_WILLNEED; in i915_gem_object_init()
302 if (obj->mm.n_placements > 1) in __i915_gem_free_object_rcu()
303 kfree(obj->mm.placements); in __i915_gem_free_object_rcu()
308 atomic_dec(&i915->mm.free_count); in __i915_gem_free_object_rcu()
626 return obj->mm.n_placements > 1; in i915_gem_object_migratable()
695 mr = i915->mm.regions[id]; in i915_gem_object_can_migrate()
702 if (obj->mm.region == mr) in i915_gem_object_can_migrate()
796 mr = i915->mm.regions[id]; in __i915_gem_object_migrate()
824 if (!obj->mm.n_placements) { in i915_gem_object_placement_possible()
[all …]
/drivers/net/ethernet/mscc/
A Docelot_mm.c55 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_port_update_active_preemptible_tcs() local
67 val = mm->preemptible_tcs; in ocelot_port_update_active_preemptible_tcs()
82 mm->tx_active ? "active" : "inactive", mm->preemptible_tcs, in ocelot_port_update_active_preemptible_tcs()
93 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_port_change_fp() local
108 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_mm_update_port_status() local
112 if (!mm->tx_enabled) in ocelot_mm_update_port_status()
180 mm = &ocelot->mm[port]; in ocelot_port_set_mm()
223 WARN_ON(mm->tx_active); in ocelot_port_set_mm()
244 mm = &ocelot->mm[port]; in ocelot_port_get_mm()
283 if (!ocelot->mm) in ocelot_mm_init()
[all …]
/drivers/gpu/drm/amd/amdkfd/
A Dkfd_mqd_manager_v9.c49 return mm->mqd_size; in mqd_stride_v9()
238 return mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
403 checkpoint_mqd(mm, m, in checkpoint_mqd_v9_4_3()
488 mm->update_mqd(mm, m, q, NULL); in init_mqd_sdma()
605 err = mm->dev->kfd2kgd->hiq_mqd_load(mm->dev->adev, xcc_mqd, in hiq_load_mqd_kiq_v9_4_3()
684 uint64_t offset = mm->mqd_stride(mm, q); in init_mqd_v9_4_3()
748 uint64_t size = mm->mqd_stride(mm, q); in update_mqd_v9_4_3()
791 uint64_t offset = mm->mqd_stride(mm, qp); in restore_mqd_v9_4_3()
831 err = mm->dev->kfd2kgd->hqd_destroy(mm->dev->adev, xcc_mqd, in destroy_mqd_v9_4_3()
853 uint64_t mqd_stride_size = mm->mqd_stride(mm, p); in load_mqd_v9_4_3()
[all …]
A Dkfd_mqd_manager.c109 int inc = cu_inc * NUM_XCC(mm->dev->xcc_mask); in mqd_symmetrically_map_cu_mask()
121 dev_err(mm->dev->adev->dev, in mqd_symmetrically_map_cu_mask()
127 dev_err(mm->dev->adev->dev, in mqd_symmetrically_map_cu_mask()
212 return mm->dev->kfd2kgd->hiq_mqd_load(mm->dev->adev, mqd, pipe_id, in kfd_hiq_load_mqd_kiq()
220 return mm->dev->kfd2kgd->hqd_destroy(mm->dev->adev, mqd, type, timeout, in kfd_destroy_mqd_cp()
231 kfd_gtt_sa_free(mm->dev, mqd_mem_obj); in kfd_free_mqd_cp()
239 return mm->dev->kfd2kgd->hqd_is_occupied(mm->dev->adev, queue_address, in kfd_is_occupied_cp()
247 return mm->dev->kfd2kgd->hqd_sdma_load(mm->dev->adev, mqd, in kfd_load_mqd_sdma()
261 return mm->dev->kfd2kgd->hqd_sdma_destroy(mm->dev->adev, mqd, timeout); in kfd_destroy_mqd_sdma()
268 return mm->dev->kfd2kgd->hqd_sdma_is_occupied(mm->dev->adev, mqd); in kfd_is_occupied_sdma()
[all …]
A Dkfd_mqd_manager.h74 void (*init_mqd)(struct mqd_manager *mm, void **mqd,
78 int (*load_mqd)(struct mqd_manager *mm, void *mqd,
83 void (*update_mqd)(struct mqd_manager *mm, void *mqd,
87 int (*destroy_mqd)(struct mqd_manager *mm, void *mqd,
92 void (*free_mqd)(struct mqd_manager *mm, void *mqd,
95 bool (*is_occupied)(struct mqd_manager *mm, void *mqd,
107 void (*checkpoint_mqd)(struct mqd_manager *mm,
112 void (*restore_mqd)(struct mqd_manager *mm, void **mqd,
123 uint64_t (*mqd_stride)(struct mqd_manager *mm,
176 void kfd_free_mqd_cp(struct mqd_manager *mm, void *mqd,
[all …]
A Dkfd_mqd_manager_vi.c57 mqd_symmetrically_map_cu_mask(mm, in update_cu_mask()
91 static void init_mqd(struct mqd_manager *mm, void **mqd, in init_mqd() argument
154 mm->update_mqd(mm, m, q, NULL); in init_mqd()
157 static int load_mqd(struct mqd_manager *mm, void *mqd, in load_mqd() argument
165 return mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
234 update_cu_mask(mm, mqd, minfo); in __update_mqd()
251 __update_mqd(mm, mqd, q, minfo, MTYPE_UC, 0); in update_mqd()
325 init_mqd(mm, mqd, mqd_mem_obj, gart_addr, q); in init_mqd_hiq()
337 __update_mqd(mm, mqd, q, minfo, MTYPE_UC, 0); in update_mqd_hiq()
354 mm->update_mqd(mm, m, q, NULL); in init_mqd_sdma()
[all …]
A Dkfd_mqd_manager_cik.c45 static void update_cu_mask(struct mqd_manager *mm, void *mqd, in update_cu_mask() argument
54 mqd_symmetrically_map_cu_mask(mm, in update_cu_mask()
88 static void init_mqd(struct mqd_manager *mm, void **mqd, in init_mqd() argument
139 mm->update_mqd(mm, m, q, NULL); in init_mqd()
156 mm->update_mqd(mm, m, q, NULL); in init_mqd_sdma()
167 return mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
203 update_cu_mask(mm, mqd, minfo); in __update_mqd()
216 static void update_mqd(struct mqd_manager *mm, void *mqd, in update_mqd() argument
220 __update_mqd(mm, mqd, q, minfo, 0); in update_mqd()
286 static void checkpoint_mqd_sdma(struct mqd_manager *mm, in checkpoint_mqd_sdma() argument
[all …]
A Dkfd_mqd_manager_v10.c54 mqd_symmetrically_map_cu_mask(mm, in update_cu_mask()
88 static void init_mqd(struct mqd_manager *mm, void **mqd, in init_mqd() argument
131 if (mm->dev->kfd->cwsr_enabled) { in init_mqd()
147 mm->update_mqd(mm, m, q, NULL); in init_mqd()
150 static int load_mqd(struct mqd_manager *mm, void *mqd, in load_mqd() argument
158 r = mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
220 if (mm->dev->kfd->cwsr_enabled) in update_mqd()
223 update_cu_mask(mm, mqd, minfo); in update_mqd()
316 init_mqd(mm, mqd, mqd_mem_obj, gart_addr, q); in init_mqd_hiq()
358 mm->update_mqd(mm, m, q, NULL); in init_mqd_sdma()
[all …]
A Dkfd_mqd_manager_v11.c73 mqd_symmetrically_map_cu_mask(mm, in update_cu_mask()
135 if (mm->dev->kfd->shared_resources.enable_mes) in init_mqd()
185 if (mm->dev->kfd->cwsr_enabled) { in init_mqd()
201 mm->update_mqd(mm, m, q, NULL); in init_mqd()
204 static int load_mqd(struct mqd_manager *mm, void *mqd, in load_mqd() argument
212 r = mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
273 if (mm->dev->kfd->cwsr_enabled) in update_mqd()
276 update_cu_mask(mm, mqd, minfo); in update_mqd()
369 init_mqd(mm, mqd, mqd_mem_obj, gart_addr, q); in init_mqd_hiq()
406 if (mm->dev->kfd->shared_resources.enable_mes) in init_mqd_sdma()
[all …]
A Dkfd_mqd_manager_v12.c53 mqd_symmetrically_map_cu_mask(mm, in update_cu_mask()
98 static void init_mqd(struct mqd_manager *mm, void **mqd, in init_mqd() argument
140 if (amdgpu_amdkfd_have_atomics_support(mm->dev->adev)) in init_mqd()
148 if (mm->dev->kfd->cwsr_enabled) { in init_mqd()
164 mm->update_mqd(mm, m, q, NULL); in init_mqd()
167 static int load_mqd(struct mqd_manager *mm, void *mqd, in load_mqd() argument
175 r = mm->dev->kfd2kgd->hqd_load(mm->dev->adev, mqd, pipe_id, queue_id, in load_mqd()
236 if (mm->dev->kfd->cwsr_enabled) in update_mqd()
239 update_cu_mask(mm, mqd, minfo); in update_mqd()
295 init_mqd(mm, mqd, mqd_mem_obj, gart_addr, q); in init_mqd_hiq()
[all …]
/drivers/gpu/drm/nouveau/nvkm/core/
A Dmm.c131 s = roundup(s, mm->block_size); in nvkm_mm_head()
205 s = roundup(s, mm->block_size); in nvkm_mm_tail()
245 if (nvkm_mm_initialised(mm)) { in nvkm_mm_init()
259 INIT_LIST_HEAD(&mm->nodes); in nvkm_mm_init()
260 INIT_LIST_HEAD(&mm->free); in nvkm_mm_init()
261 mm->block_size = block; in nvkm_mm_init()
262 mm->heap_nodes = 0; in nvkm_mm_init()
278 mm->heap_nodes++; in nvkm_mm_init()
283 nvkm_mm_fini(struct nvkm_mm *mm) in nvkm_mm_fini() argument
288 if (!nvkm_mm_initialised(mm)) in nvkm_mm_fini()
[all …]
/drivers/acpi/acpica/
A Dexregion.c99 if (!mm || (address < mm->physical_address) || in acpi_ex_system_memory_space_handler()
100 ((u64) address + length > (u64) mm->physical_address + mm->length)) { in acpi_ex_system_memory_space_handler()
108 for (mm = mem_info->first_mm; mm; mm = mm->next_mm) { in acpi_ex_system_memory_space_handler()
116 (u64) mm->physical_address + mm->length) in acpi_ex_system_memory_space_handler()
119 mem_info->cur_mm = mm; in acpi_ex_system_memory_space_handler()
124 mm = ACPI_ALLOCATE_ZEROED(sizeof(*mm)); in acpi_ex_system_memory_space_handler()
125 if (!mm) { in acpi_ex_system_memory_space_handler()
151 ACPI_FREE(mm); in acpi_ex_system_memory_space_handler()
159 mm->length = map_length; in acpi_ex_system_memory_space_handler()
166 mem_info->first_mm = mm; in acpi_ex_system_memory_space_handler()
[all …]
/drivers/iommu/
A Diommu-sva.c14 struct mm_struct *mm);
24 if (!arch_pgtable_dma_compat(mm)) in iommu_alloc_mm_data()
27 iommu_mm = mm->iommu_mm; in iommu_alloc_mm_data()
93 if (attach_handle->domain->mm != mm) { in iommu_sva_bind_device()
218 if (!mmget_not_zero(mm)) in iommu_sva_handle_mm()
221 mmap_read_lock(mm); in iommu_sva_handle_mm()
253 mmap_read_unlock(mm); in iommu_sva_handle_mm()
254 mmput(mm); in iommu_sva_handle_mm()
294 struct mm_struct *mm) in iommu_sva_domain_alloc() argument
308 mmgrab(mm); in iommu_sva_domain_alloc()
[all …]
/drivers/gpu/drm/i915/gvt/
A Dgtt.c1770 mm = kzalloc(sizeof(*mm), GFP_KERNEL); in vgpu_alloc_mm()
1771 if (!mm) in vgpu_alloc_mm()
1778 return mm; in vgpu_alloc_mm()
1783 kfree(mm); in vgpu_free_mm()
1805 if (!mm) in intel_vgpu_create_ppgtt_mm()
1837 return mm; in intel_vgpu_create_ppgtt_mm()
1846 if (!mm) in intel_vgpu_create_ggtt_mm()
1875 return mm; in intel_vgpu_create_ggtt_mm()
2581 if (mm) { in intel_vgpu_get_ppgtt_mm()
2588 return mm; in intel_vgpu_get_ppgtt_mm()
[all …]
/drivers/gpu/drm/i915/selftests/
A Dmock_region.c19 obj->mm.rsgt = NULL; in mock_region_put_pages()
20 intel_region_ttm_resource_free(obj->mm.region, obj->mm.res); in mock_region_put_pages()
28 obj->mm.res = intel_region_ttm_resource_alloc(obj->mm.region, in mock_region_get_pages()
32 if (IS_ERR(obj->mm.res)) in mock_region_get_pages()
33 return PTR_ERR(obj->mm.res); in mock_region_get_pages()
35 obj->mm.rsgt = intel_region_ttm_resource_to_rsgt(obj->mm.region, in mock_region_get_pages()
36 obj->mm.res, in mock_region_get_pages()
38 if (IS_ERR(obj->mm.rsgt)) { in mock_region_get_pages()
39 err = PTR_ERR(obj->mm.rsgt); in mock_region_get_pages()
43 pages = &obj->mm.rsgt->table; in mock_region_get_pages()
[all …]
/drivers/gpu/drm/i915/
A Di915_ttm_buddy_manager.c19 struct drm_buddy mm; member
41 struct drm_buddy *mm = &bman->mm; in i915_ttm_buddy_man_alloc() local
57 bman_res->mm = mm; in i915_ttm_buddy_man_alloc()
83 n_pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
159 struct drm_buddy *mm = &bman->mm; in i915_ttm_buddy_man_intersects() local
197 struct drm_buddy *mm = &bman->mm; in i915_ttm_buddy_man_compatible() local
239 drm_buddy_print(&bman->mm, printer); in i915_ttm_buddy_man_debug()
336 struct drm_buddy *mm = &bman->mm; in i915_ttm_buddy_man_fini() local
349 drm_buddy_fini(mm); in i915_ttm_buddy_man_fini()
374 struct drm_buddy *mm = &bman->mm; in i915_ttm_buddy_man_reserve() local
[all …]
/drivers/gpu/drm/xe/
A Dxe_ttm_vram_mgr.c37 size = drm_buddy_block_size(mm, block); in xe_is_vram_mgr_blocks_contiguous()
55 struct drm_buddy *mm = &mgr->mm; in xe_ttm_vram_mgr_new() local
187 struct drm_buddy *mm = &mgr->mm; in xe_ttm_vram_mgr_del() local
190 drm_buddy_free_list(mm, &vres->blocks, 0); in xe_ttm_vram_mgr_del()
203 struct drm_buddy *mm = &mgr->mm; in xe_ttm_vram_mgr_debug() local
213 drm_buddy_print(mm, printer); in xe_ttm_vram_mgr_debug()
226 struct drm_buddy *mm = &mgr->mm; in xe_ttm_vram_mgr_intersects() local
256 struct drm_buddy *mm = &mgr->mm; in xe_ttm_vram_mgr_compatible() local
299 drm_buddy_fini(&mgr->mm); in ttm_vram_mgr_fini()
457 *used = mgr->mm.size - mgr->mm.avail; in xe_ttm_vram_get_used()
[all …]
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_amdkfd_fence.c64 struct mm_struct *mm, in amdgpu_amdkfd_fence_create() argument
74 mmgrab(mm); in amdgpu_amdkfd_fence_create()
75 fence->mm = mm; in amdgpu_amdkfd_fence_create()
130 if (!kgd2kfd_schedule_evict_and_restore_process(fence->mm, f)) in amdkfd_fence_enable_signaling()
157 mmdrop(fence->mm); in amdkfd_fence_release()
171 bool amdkfd_fence_check_mm(struct dma_fence *f, struct mm_struct *mm) in amdkfd_fence_check_mm() argument
177 else if (fence->mm == mm && !fence->svm_bo) in amdkfd_fence_check_mm()

Completed in 77 milliseconds

12345678910>>...15