| /drivers/gpu/drm/ |
| A D | drm_exec.c | 67 exec->prelocked = NULL; in drm_exec_unlock_all() 86 exec->flags = flags; in drm_exec_init() 90 exec->max_objects = exec->objects ? nr : 0; in drm_exec_init() 91 exec->num_objects = 0; in drm_exec_init() 93 exec->prelocked = NULL; in drm_exec_init() 107 kvfree(exec->objects); in drm_exec_fini() 137 exec->num_objects = 0; in drm_exec_cleanup() 146 if (unlikely(exec->num_objects == exec->max_objects)) { in drm_exec_obj_locked() 154 exec->objects = tmp; in drm_exec_obj_locked() 158 exec->objects[exec->num_objects++] = obj; in drm_exec_obj_locked() [all …]
|
| A D | drm_gpuvm.c | 1113 struct drm_exec *exec, in drm_gpuvm_prepare_vm() argument 1122 struct drm_exec *exec, in __drm_gpuvm_prepare_objects() argument 1143 struct drm_exec *exec, in drm_gpuvm_prepare_objects_locked() argument 1187 struct drm_exec *exec, in drm_gpuvm_prepare_objects() argument 1250 struct drm_exec *exec = &vm_exec->exec; in drm_gpuvm_exec_lock() local 1256 drm_exec_until_all_locked(exec) { in drm_gpuvm_exec_lock() 1278 drm_exec_fini(exec); in drm_gpuvm_exec_lock() 1342 struct drm_exec *exec = &vm_exec->exec; in drm_gpuvm_exec_lock_range() local 1358 drm_exec_fini(exec); in drm_gpuvm_exec_lock_range() 1442 struct drm_exec *exec, in drm_gpuvm_resv_add_fence() argument [all …]
|
| /drivers/gpu/drm/tests/ |
| A D | drm_exec_test.c | 47 struct drm_exec exec; in sanitycheck() local 50 drm_exec_fini(&exec); in sanitycheck() 58 struct drm_exec exec; in test_lock() local 71 drm_exec_fini(&exec); in test_lock() 78 struct drm_exec exec; in test_lock_unlock() local 98 drm_exec_fini(&exec); in test_lock_unlock() 105 struct drm_exec exec; in test_duplicates() local 125 drm_exec_fini(&exec); in test_duplicates() 132 struct drm_exec exec; in test_prepare() local 145 drm_exec_fini(&exec); in test_prepare() [all …]
|
| /drivers/gpu/drm/vc4/ |
| A D | vc4_gem.c | 172 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 483 if (!exec) in vc4_submit_next_bin_job() 497 if (exec->ct0ca != exec->ct0ea) { in vc4_submit_next_bin_job() 500 submit_cl(dev, 0, exec->ct0ca, exec->ct0ea); in vc4_submit_next_bin_job() 523 if (!exec) in vc4_submit_next_render_job() 537 trace_vc4_submit_cl(dev, true, exec->seqno, exec->ct1ca, exec->ct1ea); in vc4_submit_next_render_job() 538 submit_cl(dev, 1, exec->ct1ca, exec->ct1ea); in vc4_submit_next_render_job() 696 exec->bo_count, &exec->bo); in vc4_cl_lookup_bos() 723 for (i = 0; i < exec->bo_count && exec->bo[i]; i++) in vc4_cl_lookup_bos() 811 exec->ct0ca = exec->exec_bo->dma_addr + bin_offset; in vc4_get_bcl() [all …]
|
| A D | vc4_validate.c | 135 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 224 if (!validate_bin_pos(exec, untrusted, exec->args->bin_cl_size - 1)) { in validate_flush() 253 if (!validate_bin_pos(exec, untrusted, exec->args->bin_cl_size - 2)) { in validate_increment_semaphore() 278 shader_state = &exec->shader_state[exec->shader_state_count - 1]; in validate_indexed_prim_list() 312 shader_state = &exec->shader_state[exec->shader_state_count - 1]; in validate_gl_array_primitive() 371 tile_count = exec->bin_tiles_x * exec->bin_tiles_y; in validate_tile_binning_config() 377 exec->bin_tiles_x, exec->bin_tiles_y); in validate_tile_binning_config() 433 memcpy(exec->bo_index, untrusted, sizeof(exec->bo_index)); in validate_gem_handles() 546 exec->ct0ea = exec->ct0ca + dst_offset; in vc4_validate_bin_cl() 880 ((uint32_t *)exec->uniforms_v)[o] = exec->uniforms_p; in validate_gl_shader_rec() [all …]
|
| A D | vc4_render_cl.c | 186 (y * exec->bin_tiles_x + x) * 32)); in emit_tile() 335 &exec->unref_list); in vc4_create_rcl_bo() 377 exec->ct1ca = setup->rcl->dma_addr; in vc4_create_rcl_bo() 421 *obj = vc4_use_bo(exec, surf->hindex); in vc4_rcl_msaa_surface_setup() 425 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_msaa_surface_setup() 462 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_surface_setup() 465 if (surf == &exec->args->zs_write) { in vc4_rcl_surface_setup() 526 exec->args->width, exec->args->height, cpp)) { in vc4_rcl_surface_setup() 566 exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; in vc4_rcl_render_config_surface_setup() 587 exec->args->width, exec->args->height, cpp)) { in vc4_rcl_render_config_surface_setup() [all …]
|
| A D | vc4_irq.c | 67 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 92 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 93 if (!exec) in vc4_overflow_mem_work() 95 if (exec) { in vc4_overflow_mem_work() 122 if (!exec) in vc4_irq_finish_bin_job() 144 if (!exec) in vc4_cancel_bin_job() 148 if (exec->perfmon) in vc4_cancel_bin_job() 162 if (!exec) in vc4_irq_finish_render_job() 192 if (exec->fence) { in vc4_irq_finish_render_job() 194 dma_fence_put(exec->fence); in vc4_irq_finish_render_job() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_csa.c | 69 struct drm_exec exec; in amdgpu_map_static_csa() local 73 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa() 74 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa() 77 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa() 101 drm_exec_fini(&exec); in amdgpu_map_static_csa() 109 struct drm_exec exec; in amdgpu_unmap_static_csa() local 112 drm_exec_init(&exec, 0, 0); in amdgpu_unmap_static_csa() 113 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa() 114 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_unmap_static_csa() 117 drm_exec_retry_on_contention(&exec); in amdgpu_unmap_static_csa() [all …]
|
| A D | amdgpu_seq64.c | 72 struct drm_exec exec; in amdgpu_seq64_map() local 80 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_map() 81 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_seq64_map() 84 drm_exec_retry_on_contention(&exec); in amdgpu_seq64_map() 114 drm_exec_fini(&exec); in amdgpu_seq64_map() 130 struct drm_exec exec; in amdgpu_seq64_unmap() local 142 drm_exec_init(&exec, 0, 0); in amdgpu_seq64_unmap() 143 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_unmap() 144 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_seq64_unmap() 147 drm_exec_retry_on_contention(&exec); in amdgpu_seq64_unmap() [all …]
|
| A D | amdgpu_userq_fence.c | 427 struct drm_exec exec; in amdgpu_userq_signal_ioctl() local 531 drm_exec_until_all_locked(&exec) { in amdgpu_userq_signal_ioctl() 533 drm_exec_retry_on_contention(&exec); in amdgpu_userq_signal_ioctl() 540 drm_exec_retry_on_contention(&exec); in amdgpu_userq_signal_ioctl() 571 drm_exec_fini(&exec); in amdgpu_userq_signal_ioctl() 610 struct drm_exec exec; in amdgpu_userq_wait_ioctl() local 681 drm_exec_until_all_locked(&exec) { in amdgpu_userq_wait_ioctl() 685 drm_exec_fini(&exec); in amdgpu_userq_wait_ioctl() 692 drm_exec_fini(&exec); in amdgpu_userq_wait_ioctl() 922 drm_exec_fini(&exec); in amdgpu_userq_wait_ioctl() [all …]
|
| A D | amdgpu_gem.c | 351 struct drm_exec exec; in amdgpu_gem_object_close() local 355 drm_exec_until_all_locked(&exec) { in amdgpu_gem_object_close() 357 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 361 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_gem_object_close() 362 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 392 drm_exec_fini(&exec); in amdgpu_gem_object_close() 842 struct drm_exec exec; in amdgpu_gem_va_ioctl() local 911 drm_exec_until_all_locked(&exec) { in amdgpu_gem_va_ioctl() 913 r = drm_exec_lock_obj(&exec, gobj); in amdgpu_gem_va_ioctl() 920 drm_exec_retry_on_contention(&exec); in amdgpu_gem_va_ioctl() [all …]
|
| A D | amdgpu_amdkfd_gpuvm.c | 1112 struct drm_exec exec; member 1158 drm_exec_fini(&ctx->exec); in reserve_bo_and_vm() 1192 &ctx->exec, 2); in reserve_bo_and_cond_vms() 1208 drm_exec_fini(&ctx->exec); in reserve_bo_and_cond_vms() 1230 drm_exec_fini(&ctx->exec); in unreserve_bo_and_vms() 2606 struct drm_exec exec; in validate_invalid_user_pages() local 2615 drm_exec_init(&exec, 0, 0); in validate_invalid_user_pages() 2617 drm_exec_until_all_locked(&exec) { in validate_invalid_user_pages() 2689 drm_exec_fini(&exec); in validate_invalid_user_pages() 2866 struct drm_exec exec; in amdgpu_amdkfd_gpuvm_restore_process_bos() local [all …]
|
| /drivers/gpu/drm/msm/ |
| A D | msm_gem_submit.c | 278 struct drm_exec *exec = &submit->exec; in submit_lock_objects() local 286 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects() 287 ret = drm_gpuvm_prepare_vm(submit->vm, exec, 1); in submit_lock_objects() 288 drm_exec_retry_on_contention(exec); in submit_lock_objects() 293 drm_exec_retry_on_contention(exec); in submit_lock_objects() 303 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects() 304 ret = drm_exec_lock_obj(&submit->exec, in submit_lock_objects() 306 drm_exec_retry_on_contention(&submit->exec); in submit_lock_objects() 312 drm_exec_retry_on_contention(&submit->exec); in submit_lock_objects() 517 if (submit->exec.objects) in submit_cleanup() [all …]
|
| A D | msm_gem.c | 53 struct drm_exec exec; in msm_gem_close() local 83 drm_exec_fini(&exec); /* drop locks */ in msm_gem_close() 98 struct drm_exec exec; in msm_gem_vma_put() local 579 struct drm_exec exec; in msm_gem_get_and_pin_iova_range() local 604 struct drm_exec exec; in msm_gem_get_iova() local 643 struct drm_exec exec; in msm_gem_set_iova() local 679 struct drm_exec exec; in msm_gem_unpin_iova() local 1061 struct drm_exec exec; in msm_gem_free_object() local 1089 drm_exec_init(&exec, 0, 0); in msm_gem_free_object() 1090 drm_exec_until_all_locked (&exec) { in msm_gem_free_object() [all …]
|
| A D | msm_gem_vma.c | 844 struct drm_exec exec; in msm_gem_vm_close() local 861 drm_exec_init(&exec, 0, 2); in msm_gem_vm_close() 862 drm_exec_until_all_locked (&exec) { in msm_gem_vm_close() 864 drm_exec_retry_on_contention(&exec); in msm_gem_vm_close() 877 drm_exec_lock_obj(&exec, obj); in msm_gem_vm_close() 885 drm_exec_unlock_obj(&exec, obj); in msm_gem_vm_close() 889 drm_exec_fini(&exec); in msm_gem_vm_close() 1158 drm_exec_until_all_locked (exec) { in vm_bind_job_lock_objects() 1160 drm_exec_retry_on_contention(exec); in vm_bind_job_lock_objects() 1441 struct drm_exec exec; in msm_ioctl_vm_bind() local [all …]
|
| A D | msm_gem.h | 361 msm_gem_lock_vm_and_obj(struct drm_exec *exec, in msm_gem_lock_vm_and_obj() argument 367 drm_exec_init(exec, 0, 2); in msm_gem_lock_vm_and_obj() 368 drm_exec_until_all_locked (exec) { in msm_gem_lock_vm_and_obj() 369 ret = drm_exec_lock_obj(exec, drm_gpuvm_resv_obj(vm)); in msm_gem_lock_vm_and_obj() 371 ret = drm_exec_lock_obj(exec, obj); in msm_gem_lock_vm_and_obj() 372 drm_exec_retry_on_contention(exec); in msm_gem_lock_vm_and_obj() 439 struct drm_exec exec; member
|
| /drivers/sbus/char/ |
| A D | oradax.c | 861 ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT; in dax_ccb_exec() 893 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 897 &ctx->result.exec.status_data); in dax_ccb_exec() 898 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 906 &accepted_len, &ctx->result.exec.status_data); in dax_ccb_exec() 919 ctx->result.exec.status = DAX_SUBMIT_OK; in dax_ccb_exec() 935 ctx->result.exec.status = DAX_SUBMIT_ERR_NOMAP; in dax_ccb_exec() 954 ctx->result.exec.status = DAX_SUBMIT_ERR_NOACCESS; in dax_ccb_exec() 963 ctx->result.exec.status = DAX_SUBMIT_ERR_UNAVAIL; in dax_ccb_exec() 966 ctx->result.exec.status = DAX_SUBMIT_ERR_INTERNAL; in dax_ccb_exec() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_gt_pagefault.c | 83 err = xe_vm_lock_vma(exec, vma); in xe_pf_begin() 112 struct drm_exec exec; in handle_vma_pagefault() local 139 drm_exec_init(&exec, 0, 0); in handle_vma_pagefault() 140 drm_exec_until_all_locked(&exec) { in handle_vma_pagefault() 142 drm_exec_retry_on_contention(&exec); in handle_vma_pagefault() 163 drm_exec_fini(&exec); in handle_vma_pagefault() 545 struct drm_exec exec; in handle_acc() local 574 drm_exec_init(&exec, 0, 0); in handle_acc() 575 drm_exec_until_all_locked(&exec) { in handle_acc() 577 drm_exec_retry_on_contention(&exec); in handle_acc() [all …]
|
| A D | xe_exec.c | 102 return xe_vm_validate_rebind(vm, &vm_exec->exec, 1); in xe_exec_fn() 116 struct drm_exec *exec = &vm_exec.exec; in xe_exec_ioctl() local 243 drm_exec_init(exec, vm_exec.flags, 0); in xe_exec_ioctl() 247 if (xe_vm_validate_should_retry(exec, err, &end)) in xe_exec_ioctl() 312 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, &job->drm.s_fence->finished, in xe_exec_ioctl() 344 drm_exec_fini(exec); in xe_exec_ioctl()
|
| A D | xe_vm.c | 229 struct drm_exec *exec = &vm_exec.exec; in xe_vm_add_compute_exec_queue() local 269 drm_exec_fini(exec); in xe_vm_add_compute_exec_queue() 485 struct drm_exec exec; in preempt_rebind_work_func() local 519 drm_exec_fini(&exec); in preempt_rebind_work_func() 568 drm_exec_fini(&exec); in preempt_rebind_work_func() 1384 struct drm_exec exec; in xe_vma_destroy_unlocked() local 1397 drm_exec_fini(&exec); in xe_vma_destroy_unlocked() 2449 struct drm_exec exec; in new_vma() local 2486 drm_exec_fini(&exec); in new_vma() 3253 struct drm_exec exec; in vm_bind_ioctl_ops_execute() local [all …]
|
| /drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
| A D | base.c | 73 LIST_HEAD(exec); in nvkm_timer_alarm_trigger() 90 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger() 99 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger() 100 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
|
| /drivers/gpu/drm/imagination/ |
| A D | pvr_job.c | 531 prepare_fw_obj_resv(struct drm_exec *exec, struct pvr_fw_object *fw_obj) in prepare_fw_obj_resv() argument 533 return drm_exec_prepare_obj(exec, gem_from_pvr_gem(fw_obj->gem), 1); in prepare_fw_obj_resv() 537 jobs_lock_all_objs(struct drm_exec *exec, struct pvr_job_data *job_data, in jobs_lock_all_objs() argument 546 int err = drm_exec_lock_obj(exec, in jobs_lock_all_objs() 553 err = prepare_fw_obj_resv(exec, in jobs_lock_all_objs() 567 drm_exec_until_all_locked(exec) { in prepare_job_resvs_for_each() 568 int err = jobs_lock_all_objs(exec, job_data, job_count); in prepare_job_resvs_for_each() 570 drm_exec_retry_on_contention(exec); in prepare_job_resvs_for_each() 711 struct drm_exec exec; in pvr_submit_jobs() local 752 err = prepare_job_resvs_for_each(&exec, job_data, args->jobs.count); in pvr_submit_jobs() [all …]
|
| /drivers/gpu/drm/panthor/ |
| A D | panthor_mmu.h | 49 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec, 90 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec, 92 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job); 94 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
|
| /drivers/gpu/drm/qxl/ |
| A D | qxl_release.c | 221 drm_exec_init(&release->exec, no_intr ? 0 : in qxl_release_reserve_list() 223 drm_exec_until_all_locked(&release->exec) { in qxl_release_reserve_list() 225 ret = drm_exec_prepare_obj(&release->exec, in qxl_release_reserve_list() 228 drm_exec_retry_on_contention(&release->exec); in qxl_release_reserve_list() 241 drm_exec_fini(&release->exec); in qxl_release_reserve_list() 252 drm_exec_fini(&release->exec); in qxl_release_backoff_reserve_list() 440 drm_exec_fini(&release->exec); in qxl_release_fence_buffer_objects()
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_execbuffer.c | 40 struct drm_i915_gem_exec_object2 *exec; member 570 ev->exec = entry; in eb_add_vma() 1674 err = check_relocations(&eb->exec[i]); in eb_copy_relocations() 1733 if (eb->exec[i].relocation_count) in eb_copy_relocations() 1889 &eb->exec[i]; in eb_relocate_parse_slow() 2202 if (exec->num_cliprects || exec->cliprects_ptr) in i915_gem_check_execbuffer() 2206 if (exec->DR4 == 0xffffffff) { in i915_gem_check_execbuffer() 2208 exec->DR4 = 0; in i915_gem_check_execbuffer() 2210 if (exec->DR1 || exec->DR4) in i915_gem_check_execbuffer() 2213 if ((exec->batch_start_offset | exec->batch_len) & 0x7) in i915_gem_check_execbuffer() [all …]
|