| /linux/drivers/gpu/drm/ |
| A D | drm_exec.c | 65 exec->prelocked = NULL; in drm_exec_unlock_all() 84 exec->flags = flags; in drm_exec_init() 88 exec->max_objects = exec->objects ? nr : 0; in drm_exec_init() 89 exec->num_objects = 0; in drm_exec_init() 91 exec->prelocked = NULL; in drm_exec_init() 105 kvfree(exec->objects); in drm_exec_fini() 135 exec->num_objects = 0; in drm_exec_cleanup() 144 if (unlikely(exec->num_objects == exec->max_objects)) { in drm_exec_obj_locked() 152 exec->objects = tmp; in drm_exec_obj_locked() 156 exec->objects[exec->num_objects++] = obj; in drm_exec_obj_locked() [all …]
|
| A D | drm_gpuvm.c | 1112 struct drm_exec *exec, in drm_gpuvm_prepare_vm() argument 1121 struct drm_exec *exec, in __drm_gpuvm_prepare_objects() argument 1142 struct drm_exec *exec, in drm_gpuvm_prepare_objects_locked() argument 1186 struct drm_exec *exec, in drm_gpuvm_prepare_objects() argument 1249 struct drm_exec *exec = &vm_exec->exec; in drm_gpuvm_exec_lock() local 1255 drm_exec_until_all_locked(exec) { in drm_gpuvm_exec_lock() 1277 drm_exec_fini(exec); in drm_gpuvm_exec_lock() 1341 struct drm_exec *exec = &vm_exec->exec; in drm_gpuvm_exec_lock_range() local 1346 drm_exec_until_all_locked(exec) { in drm_gpuvm_exec_lock_range() 1357 drm_exec_fini(exec); in drm_gpuvm_exec_lock_range() [all …]
|
| /linux/drivers/gpu/drm/tests/ |
| A D | drm_exec_test.c | 47 struct drm_exec exec; in sanitycheck() local 50 drm_exec_fini(&exec); in sanitycheck() 58 struct drm_exec exec; in test_lock() local 71 drm_exec_fini(&exec); in test_lock() 78 struct drm_exec exec; in test_lock_unlock() local 98 drm_exec_fini(&exec); in test_lock_unlock() 105 struct drm_exec exec; in test_duplicates() local 125 drm_exec_fini(&exec); in test_duplicates() 132 struct drm_exec exec; in test_prepare() local 145 drm_exec_fini(&exec); in test_prepare() [all …]
|
| /linux/drivers/gpu/drm/vc4/ |
| A D | vc4_validate.c | 135 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 278 shader_state = &exec->shader_state[exec->shader_state_count - 1]; in validate_indexed_prim_list() 287 exec->bin_dep_seqno = max(exec->bin_dep_seqno, in validate_indexed_prim_list() 315 shader_state = &exec->shader_state[exec->shader_state_count - 1]; in validate_gl_array_primitive() 374 tile_count = exec->bin_tiles_x * exec->bin_tiles_y; in validate_tile_binning_config() 380 exec->bin_tiles_x, exec->bin_tiles_y); in validate_tile_binning_config() 436 memcpy(exec->bo_index, untrusted, sizeof(exec->bo_index)); in validate_gem_handles() 549 exec->ct0ea = exec->ct0ca + dst_offset; in vc4_validate_bin_cl() 742 exec->bin_dep_seqno = max(exec->bin_dep_seqno, in reloc_tex() 888 ((uint32_t *)exec->uniforms_v)[o] = exec->uniforms_p; in validate_gl_shader_rec() [all …]
|
| A D | vc4_gem.c | 171 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 482 if (!exec) in vc4_submit_next_bin_job() 496 if (exec->ct0ca != exec->ct0ea) { in vc4_submit_next_bin_job() 499 submit_cl(dev, 0, exec->ct0ca, exec->ct0ea); in vc4_submit_next_bin_job() 522 if (!exec) in vc4_submit_next_render_job() 536 trace_vc4_submit_cl(dev, true, exec->seqno, exec->ct1ca, exec->ct1ea); in vc4_submit_next_render_job() 537 submit_cl(dev, 1, exec->ct1ca, exec->ct1ea); in vc4_submit_next_render_job() 763 exec->bo_count, &exec->bo); in vc4_cl_lookup_bos() 790 for (i = 0; i < exec->bo_count && exec->bo[i]; i++) in vc4_cl_lookup_bos() 878 exec->ct0ca = exec->exec_bo->dma_addr + bin_offset; in vc4_get_bcl() [all …]
|
| A D | vc4_irq.c | 67 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 92 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 93 if (!exec) in vc4_overflow_mem_work() 95 if (exec) { in vc4_overflow_mem_work() 122 if (!exec) in vc4_irq_finish_bin_job() 144 if (!exec) in vc4_cancel_bin_job() 148 if (exec->perfmon) in vc4_cancel_bin_job() 162 if (!exec) in vc4_irq_finish_render_job() 192 if (exec->fence) { in vc4_irq_finish_render_job() 194 dma_fence_put(exec->fence); in vc4_irq_finish_render_job() [all …]
|
| A D | vc4_render_cl.c | 186 (y * exec->bin_tiles_x + x) * 32)); in emit_tile() 335 &exec->unref_list); in vc4_create_rcl_bo() 377 exec->ct1ca = setup->rcl->dma_addr; in vc4_create_rcl_bo() 421 *obj = vc4_use_bo(exec, surf->hindex); in vc4_rcl_msaa_surface_setup() 425 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_msaa_surface_setup() 462 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_surface_setup() 465 if (surf == &exec->args->zs_write) { in vc4_rcl_surface_setup() 526 exec->args->width, exec->args->height, cpp)) { in vc4_rcl_surface_setup() 566 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_render_config_surface_setup() 587 exec->args->width, exec->args->height, cpp)) { in vc4_rcl_render_config_surface_setup() [all …]
|
| /linux/include/drm/ |
| A D | drm_exec.h | 65 return index < exec->num_objects ? exec->objects[index] : NULL; in drm_exec_obj() 91 for ((index) = (exec)->num_objects - 1; \ 92 ((obj) = drm_exec_obj(exec, index)); --(index)) 105 #define drm_exec_until_all_locked(exec) \ argument 110 drm_exec_cleanup(exec); \ 120 #define drm_exec_retry_on_contention(exec) \ argument 122 if (unlikely(drm_exec_is_contended(exec))) \ 135 return !!exec->contended; in drm_exec_is_contended() 139 void drm_exec_fini(struct drm_exec *exec); 140 bool drm_exec_cleanup(struct drm_exec *exec); [all …]
|
| A D | drm_gpuvm.h | 516 struct drm_exec exec; member 553 struct drm_exec *exec, 557 struct drm_exec *exec, 561 struct drm_exec *exec, 586 drm_exec_fini(&vm_exec->exec); in drm_gpuvm_exec_unlock() 589 int drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec); 591 struct drm_exec *exec, 611 drm_gpuvm_resv_add_fence(vm_exec->vm, &vm_exec->exec, fence, in drm_gpuvm_exec_resv_add_fence() 626 return drm_gpuvm_validate(vm_exec->vm, &vm_exec->exec); in drm_gpuvm_exec_validate() 1155 struct drm_exec *exec);
|
| /linux/include/uapi/linux/ |
| A D | a.out.h | 44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) argument 46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) argument 47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) argument 48 #define N_SET_INFO(exec, magic, type, flags) \ argument 49 ((exec).a_info = ((magic) & 0xffff) \ 52 #define N_SET_MAGIC(exec, magic) \ argument 53 ((exec).a_info = (((exec).a_info & 0xffff0000) | ((magic) & 0xffff))) 55 #define N_SET_MACHTYPE(exec, machtype) \ argument 56 ((exec).a_info = \ 59 #define N_SET_FLAGS(exec, flags) \ argument [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_csa.c | 69 struct drm_exec exec; in amdgpu_map_static_csa() local 73 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa() 74 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa() 76 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa() 77 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa() 101 drm_exec_fini(&exec); in amdgpu_map_static_csa() 109 struct drm_exec exec; in amdgpu_unmap_static_csa() local 113 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa() 114 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_unmap_static_csa() 117 drm_exec_retry_on_contention(&exec); in amdgpu_unmap_static_csa() [all …]
|
| A D | amdgpu_seq64.c | 67 struct drm_exec exec; in amdgpu_seq64_map() local 76 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_map() 77 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_seq64_map() 79 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_seq64_map() 80 drm_exec_retry_on_contention(&exec); in amdgpu_seq64_map() 108 drm_exec_fini(&exec); in amdgpu_seq64_map() 124 struct drm_exec exec; in amdgpu_seq64_unmap() local 137 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_unmap() 138 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_seq64_unmap() 141 drm_exec_retry_on_contention(&exec); in amdgpu_seq64_unmap() [all …]
|
| A D | amdgpu_gem.c | 234 struct drm_exec exec; in amdgpu_gem_object_close() local 238 drm_exec_until_all_locked(&exec) { in amdgpu_gem_object_close() 240 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 244 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_gem_object_close() 245 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 271 drm_exec_fini(&exec); in amdgpu_gem_object_close() 715 struct drm_exec exec; in amdgpu_gem_va_ioctl() local 778 drm_exec_until_all_locked(&exec) { in amdgpu_gem_va_ioctl() 780 r = drm_exec_lock_obj(&exec, gobj); in amdgpu_gem_va_ioctl() 787 drm_exec_retry_on_contention(&exec); in amdgpu_gem_va_ioctl() [all …]
|
| /linux/tools/testing/selftests/exec/ |
| A D | null-argv.c | 10 #define FORK(exec) \ argument 15 exec; /* Some kind of exec */ \ 16 perror("# " #exec); \ 19 check_result(pid, #exec); \
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
| A D | base.c | 73 LIST_HEAD(exec); in nvkm_timer_alarm_trigger() 90 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger() 99 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger() 100 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
|
| /linux/tools/perf/util/ |
| A D | comm.h | 15 bool exec; member 23 struct comm *comm__new(const char *str, u64 timestamp, bool exec); 26 bool exec);
|
| A D | comm.c | 188 struct comm *comm__new(const char *str, u64 timestamp, bool exec) in comm__new() argument 196 comm->exec = exec; in comm__new() 207 int comm__override(struct comm *comm, const char *str, u64 timestamp, bool exec) in comm__override() argument 218 if (exec) in comm__override() 219 comm->exec = true; in comm__override()
|
| /linux/drivers/sbus/char/ |
| A D | oradax.c | 861 ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT; in dax_ccb_exec() 893 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 897 &ctx->result.exec.status_data); in dax_ccb_exec() 898 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 906 &accepted_len, &ctx->result.exec.status_data); in dax_ccb_exec() 919 ctx->result.exec.status = DAX_SUBMIT_OK; in dax_ccb_exec() 935 ctx->result.exec.status = DAX_SUBMIT_ERR_NOMAP; in dax_ccb_exec() 954 ctx->result.exec.status = DAX_SUBMIT_ERR_NOACCESS; in dax_ccb_exec() 963 ctx->result.exec.status = DAX_SUBMIT_ERR_UNAVAIL; in dax_ccb_exec() 966 ctx->result.exec.status = DAX_SUBMIT_ERR_INTERNAL; in dax_ccb_exec() [all …]
|
| /linux/tools/testing/selftests/bpf/progs/ |
| A D | test_overhead.c | 13 int BPF_KPROBE(prog1, struct task_struct *tsk, const char *buf, bool exec) in BPF_KPROBE() argument 31 int BPF_PROG(prog4, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG() argument 37 int BPF_PROG(prog5, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG() argument
|
| /linux/drivers/gpu/drm/xe/ |
| A D | xe_gt_pagefault.c | 104 err = xe_vm_lock_vma(exec, vma); in xe_pf_begin() 132 struct drm_exec exec; in handle_vma_pagefault() local 156 drm_exec_init(&exec, 0, 0); in handle_vma_pagefault() 157 drm_exec_until_all_locked(&exec) { in handle_vma_pagefault() 159 drm_exec_retry_on_contention(&exec); in handle_vma_pagefault() 181 drm_exec_fini(&exec); in handle_vma_pagefault() 550 struct drm_exec exec; in handle_acc() local 579 drm_exec_init(&exec, 0, 0); in handle_acc() 580 drm_exec_until_all_locked(&exec) { in handle_acc() 582 drm_exec_retry_on_contention(&exec); in handle_acc() [all …]
|
| A D | xe_vm.c | 227 struct drm_exec *exec = &vm_exec.exec; in xe_vm_add_compute_exec_queue() local 267 drm_exec_fini(exec); in xe_vm_add_compute_exec_queue() 482 struct drm_exec exec; in preempt_rebind_work_func() local 516 drm_exec_fini(&exec); in preempt_rebind_work_func() 565 drm_exec_fini(&exec); in preempt_rebind_work_func() 1118 struct drm_exec exec; in xe_vma_destroy_unlocked() local 1131 drm_exec_fini(&exec); in xe_vma_destroy_unlocked() 1986 struct drm_exec exec; in new_vma() local 2023 drm_exec_fini(&exec); in new_vma() 2673 struct drm_exec exec; in vm_bind_ioctl_ops_execute() local [all …]
|
| A D | xe_exec.c | 102 return xe_vm_validate_rebind(vm, &vm_exec->exec, 1); in xe_exec_fn() 116 struct drm_exec *exec = &vm_exec.exec; in xe_exec_ioctl() local 243 drm_exec_init(exec, vm_exec.flags, 0); in xe_exec_ioctl() 247 if (xe_vm_validate_should_retry(exec, err, &end)) in xe_exec_ioctl() 306 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, &job->drm.s_fence->finished, in xe_exec_ioctl() 338 drm_exec_fini(exec); in xe_exec_ioctl()
|
| /linux/arch/sparc/mm/ |
| A D | tlb.c | 70 bool exec, unsigned int hugepage_shift) in tlb_batch_add_one() argument 76 if (exec) in tlb_batch_add_one() 159 bool exec = pte_exec(*pte); in tlb_batch_pmd_scan() local 161 tlb_batch_add_one(mm, vaddr, exec, PAGE_SHIFT); in tlb_batch_pmd_scan() 211 bool exec = pte_exec(orig_pte); in __set_pmd_acct() local 213 tlb_batch_add_one(mm, addr, exec, REAL_HPAGE_SHIFT); in __set_pmd_acct() 214 tlb_batch_add_one(mm, addr + REAL_HPAGE_SIZE, exec, in __set_pmd_acct()
|
| /linux/drivers/gpu/drm/imagination/ |
| A D | pvr_job.c | 538 prepare_fw_obj_resv(struct drm_exec *exec, struct pvr_fw_object *fw_obj) in prepare_fw_obj_resv() argument 540 return drm_exec_prepare_obj(exec, gem_from_pvr_gem(fw_obj->gem), 1); in prepare_fw_obj_resv() 544 jobs_lock_all_objs(struct drm_exec *exec, struct pvr_job_data *job_data, in jobs_lock_all_objs() argument 553 int err = drm_exec_lock_obj(exec, in jobs_lock_all_objs() 560 err = prepare_fw_obj_resv(exec, in jobs_lock_all_objs() 574 drm_exec_until_all_locked(exec) { in prepare_job_resvs_for_each() 575 int err = jobs_lock_all_objs(exec, job_data, job_count); in prepare_job_resvs_for_each() 577 drm_exec_retry_on_contention(exec); in prepare_job_resvs_for_each() 717 struct drm_exec exec; in pvr_submit_jobs() local 758 err = prepare_job_resvs_for_each(&exec, job_data, args->jobs.count); in pvr_submit_jobs() [all …]
|
| /linux/drivers/gpu/drm/panthor/ |
| A D | panthor_mmu.h | 47 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec, 88 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec, 90 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job); 92 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
|