Home
last modified time | relevance | path

Searched refs:xe_vm (Results 1 – 25 of 32) sorted by relevance

12

/drivers/gpu/drm/xe/
A Dxe_vm.h34 static inline struct xe_vm *xe_vm_get(struct xe_vm *vm) in xe_vm_get()
40 static inline void xe_vm_put(struct xe_vm *vm) in xe_vm_put()
45 int xe_vm_lock(struct xe_vm *vm, bool intr);
47 void xe_vm_unlock(struct xe_vm *vm);
49 static inline bool xe_vm_is_closed(struct xe_vm *vm) in xe_vm_is_closed()
55 static inline bool xe_vm_is_banned(struct xe_vm *vm) in xe_vm_is_banned()
88 return container_of(gpuvm, struct xe_vm, gpuvm); in gpuvm_to_vm()
195 void xe_vm_close_and_put(struct xe_vm *vm);
215 int xe_vm_userptr_pin(struct xe_vm *vm);
219 int xe_vm_userptr_check_repin(struct xe_vm *vm);
[all …]
A Dxe_svm.h19 struct xe_vm;
57 int xe_svm_init(struct xe_vm *vm);
59 void xe_svm_fini(struct xe_vm *vm);
61 void xe_svm_close(struct xe_vm *vm);
87 bool xe_svm_range_validate(struct xe_vm *vm,
161 void xe_svm_flush(struct xe_vm *vm);
171 struct xe_vm;
199 int xe_svm_init(struct xe_vm *vm) in xe_svm_init()
205 void xe_svm_fini(struct xe_vm *vm) in xe_svm_fini()
210 void xe_svm_close(struct xe_vm *vm) in xe_svm_close()
[all …]
A Dxe_trace_bo.h31 __field(struct xe_vm *, vm)
188 DECLARE_EVENT_CLASS(xe_vm,
189 TP_PROTO(struct xe_vm *vm),
211 DEFINE_EVENT(xe_vm, xe_vm_kill,
212 TP_PROTO(struct xe_vm *vm),
216 DEFINE_EVENT(xe_vm, xe_vm_create,
217 TP_PROTO(struct xe_vm *vm),
221 DEFINE_EVENT(xe_vm, xe_vm_free,
222 TP_PROTO(struct xe_vm *vm),
227 TP_PROTO(struct xe_vm *vm),
[all …]
A Dxe_exec_queue.h17 struct xe_exec_queue *xe_exec_queue_create(struct xe_device *xe, struct xe_vm *vm,
22 struct xe_vm *vm,
81 void xe_exec_queue_last_fence_put(struct xe_exec_queue *e, struct xe_vm *vm);
84 struct xe_vm *vm);
86 struct xe_vm *vm);
87 void xe_exec_queue_last_fence_set(struct xe_exec_queue *e, struct xe_vm *vm,
90 struct xe_vm *vm);
A Dxe_svm.c42 static struct xe_vm *gpusvm_to_vm(struct drm_gpusvm *gpusvm) in gpusvm_to_vm()
44 return container_of(gpusvm, struct xe_vm, svm.gpusvm); in gpusvm_to_vm()
177 struct xe_vm *vm = gpusvm_to_vm(gpusvm); in xe_svm_invalidate()
255 static int xe_svm_garbage_collector(struct xe_vm *vm) in xe_svm_garbage_collector()
294 struct xe_vm *vm = container_of(w, struct xe_vm, in xe_svm_garbage_collector_work_func()
566 int xe_svm_init(struct xe_vm *vm) in xe_svm_init()
594 void xe_svm_close(struct xe_vm *vm) in xe_svm_close()
606 void xe_svm_fini(struct xe_vm *vm) in xe_svm_fini()
647 bool xe_svm_range_validate(struct xe_vm *vm, in xe_svm_range_validate()
772 struct xe_vm *vm = range_to_vm(&range->base); in xe_svm_range_needs_migrate_to_vram()
[all …]
A Dxe_vm.c72 struct xe_vm *vm = xe_vma_vm(vma); in xe_vma_userptr_pin_pages()
484 struct xe_vm *vm = container_of(w, struct xe_vm, preempt.rebind_work); in preempt_rebind_work_func()
1606 struct xe_vm *vm) in xe_vm_create_scratch()
1646 struct xe_vm *vm; in xe_vm_create()
1974 struct xe_vm *vm = in vm_destroy_work_func()
2006 struct xe_vm *vm = container_of(gpuvm, struct xe_vm, gpuvm); in xe_vm_free()
2014 struct xe_vm *vm; in xe_vm_lookup()
2063 struct xe_vm *vm; in xe_vm_create_ioctl()
2149 struct xe_vm *vm; in xe_vm_destroy_ioctl()
3020 struct xe_vm *vm, in vm_bind_ioctl_ops_lock_and_prep()
[all …]
A Dxe_pt.h19 struct xe_vm;
31 struct xe_pt *xe_pt_create(struct xe_vm *vm, struct xe_tile *tile,
34 void xe_pt_populate_empty(struct xe_tile *tile, struct xe_vm *vm,
48 bool xe_pt_zap_ptes_range(struct xe_tile *tile, struct xe_vm *vm,
A Dxe_bo.h96 struct xe_tile *tile, struct xe_vm *vm,
100 struct xe_vm *vm, size_t size,
103 struct xe_vm *vm, size_t size,
106 struct xe_vm *vm, size_t size,
110 struct xe_vm *vm, size_t size,
113 struct xe_vm *vm, size_t size, u64 offset,
117 struct xe_vm *vm,
205 int xe_bo_validate(struct xe_bo *bo, struct xe_vm *vm, bool allow_res_evict);
A Dxe_pt.c284 struct xe_vm *vm;
523 struct xe_vm *vm = xe_walk->vm; in xe_pt_stage_bind_entry()
699 struct xe_vm *vm = xe_vma_vm(vma); in xe_pt_stage_bind()
1011 struct xe_vm *vm; in xe_pt_commit_prepare_locks_assert()
1027 struct xe_vm *vm; in xe_pt_commit_locks_assert()
1264 struct xe_vm *vm, in xe_pt_vm_dependencies()
1337 struct xe_vm *vm = vops->vm; in xe_pt_pre_commit()
1710 struct xe_vm *vm, in xe_pt_stage_unbind()
2063 static int op_prepare(struct xe_vm *vm, in op_prepare()
2295 static void op_commit(struct xe_vm *vm, in op_commit()
[all …]
A Dxe_exec_queue.c54 struct xe_vm *vm, in __xe_exec_queue_alloc()
154 struct xe_exec_queue *xe_exec_queue_create(struct xe_device *xe, struct xe_vm *vm, in xe_exec_queue_create()
196 struct xe_vm *vm, in xe_exec_queue_create_class()
243 struct xe_vm *migrate_vm; in xe_exec_queue_create_bind()
591 struct xe_vm *vm; in xe_exec_queue_create_ioctl()
903 struct xe_vm *vm) in xe_exec_queue_last_fence_lockdep_assert()
918 void xe_exec_queue_last_fence_put(struct xe_exec_queue *q, struct xe_vm *vm) in xe_exec_queue_last_fence_put()
949 struct xe_vm *vm) in xe_exec_queue_last_fence_get()
976 struct xe_vm *vm) in xe_exec_queue_last_fence_get_for_resume()
1000 void xe_exec_queue_last_fence_set(struct xe_exec_queue *q, struct xe_vm *vm, in xe_exec_queue_last_fence_set()
[all …]
A Dxe_bo_types.h20 struct xe_vm;
38 struct xe_vm *vm;
A Dxe_gt_tlb_invalidation.h15 struct xe_vm;
22 void xe_gt_tlb_invalidation_vm(struct xe_gt *gt, struct xe_vm *vm);
A Dxe_sync.h15 struct xe_vm;
32 struct xe_exec_queue *q, struct xe_vm *vm);
A Dxe_pxp_types.h20 struct xe_vm;
44 struct xe_vm *vm;
A Dxe_vm_types.h25 struct xe_vm;
152 struct xe_vm { struct
454 struct xe_vm *vm;
A Dxe_gt_pagefault.c80 struct xe_vm *vm = xe_vma_vm(vma); in xe_pf_begin()
110 struct xe_vm *vm = xe_vma_vm(vma); in handle_vma_pagefault()
170 static struct xe_vm *asid_to_vm(struct xe_device *xe, u32 asid) in asid_to_vm()
172 struct xe_vm *vm; in asid_to_vm()
188 struct xe_vm *vm; in handle_pagefault()
533 static struct xe_vma *get_acc_vma(struct xe_vm *vm, struct acc *acc) in get_acc_vma()
546 struct xe_vm *vm; in handle_acc()
A Dxe_sched_job.h12 struct xe_vm;
61 int xe_sched_job_last_fence_add_dep(struct xe_sched_job *job, struct xe_vm *vm);
A Dxe_exec_queue_types.h22 struct xe_vm;
55 struct xe_vm *vm;
A Dxe_migrate.h23 struct xe_vm;
128 struct xe_vm *xe_migrate_get_vm(struct xe_migrate *m);
A Dxe_hmm.c137 struct xe_vm *vm = xe_vma_vm(&uvma->vma); in xe_hmm_userptr_set_mapped()
153 struct xe_vm *vm = xe_vma_vm(vma); in xe_hmm_userptr_unmap()
224 struct xe_vm *vm = xe_vma_vm(vma); in xe_hmm_userptr_populate_range()
A Dxe_exec.c99 struct xe_vm *vm = container_of(vm_exec->vm, struct xe_vm, gpuvm); in xe_exec_fn()
119 struct xe_vm *vm; in xe_exec_ioctl()
A Dxe_lrc.h20 struct xe_vm;
47 struct xe_lrc *xe_lrc_create(struct xe_hw_engine *hwe, struct xe_vm *vm,
A Dxe_sched_job.c247 struct xe_vm *vm = q->vm; in xe_sched_job_arm()
306 int xe_sched_job_last_fence_add_dep(struct xe_sched_job *job, struct xe_vm *vm) in xe_sched_job_last_fence_add_dep()
A Dxe_bo.c635 struct xe_vm *vm = gpuvm_to_vm(vm_bo->vm); in xe_bo_trigger_rebind()
1994 struct xe_tile *tile, struct xe_vm *vm, in __xe_bo_create_locked()
2077 struct xe_tile *tile, struct xe_vm *vm, in xe_bo_create_locked_range()
2086 struct xe_vm *vm, size_t size, in xe_bo_create_locked()
2094 struct xe_vm *vm, size_t size, in xe_bo_create_user()
2108 struct xe_vm *vm, size_t size, in xe_bo_create()
2120 struct xe_vm *vm, in xe_bo_create_pin_map_at()
2130 struct xe_vm *vm, in xe_bo_create_pin_map_at_aligned()
2171 struct xe_vm *vm, size_t size, in xe_bo_create_pin_map()
2434 int xe_bo_validate(struct xe_bo *bo, struct xe_vm *vm, bool allow_res_evict) in xe_bo_validate()
[all …]
A Dxe_migrate.c139 static void xe_migrate_program_identity(struct xe_device *xe, struct xe_vm *vm, struct xe_bo *bo, in xe_migrate_program_identity()
185 struct xe_vm *vm) in xe_migrate_prepare_vm()
400 struct xe_vm *vm; in xe_migrate_init()
565 struct xe_vm *vm = m->q->vm; in emit_pte()
1245 struct xe_vm *xe_migrate_get_vm(struct xe_migrate *m) in xe_migrate_get_vm()
1268 struct xe_vm *vm = pt_update->vops->vm; in xe_migrate_update_pgtables_cpu()
1404 struct xe_vm *vm = pt_update->vops->vm; in __xe_migrate_update_pgtables()

Completed in 54 milliseconds

12