Lines Matching refs:fence_drv
101 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write()
117 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read()
161 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit()
170 &ring->fence_drv.lock, in amdgpu_fence_emit()
176 &ring->fence_drv.lock, in amdgpu_fence_emit()
181 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
184 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
232 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling()
234 seq - ring->fence_drv.num_fences_mask, in amdgpu_fence_emit_polling()
239 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
256 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback()
273 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_process()
278 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_process()
283 if (del_timer(&ring->fence_drv.fallback_timer) && in amdgpu_fence_process()
284 seq != ring->fence_drv.sync_seq) in amdgpu_fence_process()
326 fence_drv.fallback_timer); in amdgpu_fence_fallback()
342 uint64_t seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_wait_empty()
349 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
401 emitted -= atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_count_emitted()
402 emitted += READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_count_emitted()
415 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_last_unsignaled_time_us()
419 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_last_unsignaled_time_us()
420 sync_seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_last_unsignaled_time_us()
446 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_update_start_timestamp()
478 ring->fence_drv.cpu_addr = ring->fence_cpu_addr; in amdgpu_fence_driver_start_ring()
479 ring->fence_drv.gpu_addr = ring->fence_gpu_addr; in amdgpu_fence_driver_start_ring()
483 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring()
484 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
486 amdgpu_fence_write(ring, atomic_read(&ring->fence_drv.last_seq)); in amdgpu_fence_driver_start_ring()
488 ring->fence_drv.irq_src = irq_src; in amdgpu_fence_driver_start_ring()
489 ring->fence_drv.irq_type = irq_type; in amdgpu_fence_driver_start_ring()
490 ring->fence_drv.initialized = true; in amdgpu_fence_driver_start_ring()
493 ring->name, ring->fence_drv.gpu_addr); in amdgpu_fence_driver_start_ring()
516 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring()
517 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
518 ring->fence_drv.sync_seq = 0; in amdgpu_fence_driver_init_ring()
519 atomic_set(&ring->fence_drv.last_seq, 0); in amdgpu_fence_driver_init_ring()
520 ring->fence_drv.initialized = false; in amdgpu_fence_driver_init_ring()
522 timer_setup(&ring->fence_drv.fallback_timer, amdgpu_fence_fallback, 0); in amdgpu_fence_driver_init_ring()
524 ring->fence_drv.num_fences_mask = ring->num_hw_submission * 2 - 1; in amdgpu_fence_driver_init_ring()
525 spin_lock_init(&ring->fence_drv.lock); in amdgpu_fence_driver_init_ring()
526 ring->fence_drv.fences = kcalloc(ring->num_hw_submission * 2, sizeof(void *), in amdgpu_fence_driver_init_ring()
529 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
603 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_hw_fini()
616 ring->fence_drv.irq_src && in amdgpu_fence_driver_hw_fini()
618 amdgpu_irq_put(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_hw_fini()
619 ring->fence_drv.irq_type); in amdgpu_fence_driver_hw_fini()
621 del_timer_sync(&ring->fence_drv.fallback_timer); in amdgpu_fence_driver_hw_fini()
633 if (!ring || !ring->fence_drv.initialized || !ring->fence_drv.irq_src) in amdgpu_fence_driver_isr_toggle()
650 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_sw_fini()
662 for (j = 0; j <= ring->fence_drv.num_fences_mask; ++j) in amdgpu_fence_driver_sw_fini()
663 dma_fence_put(ring->fence_drv.fences[j]); in amdgpu_fence_driver_sw_fini()
664 kfree(ring->fence_drv.fences); in amdgpu_fence_driver_sw_fini()
665 ring->fence_drv.fences = NULL; in amdgpu_fence_driver_sw_fini()
666 ring->fence_drv.initialized = false; in amdgpu_fence_driver_sw_fini()
689 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_hw_init()
693 if (ring->fence_drv.irq_src && in amdgpu_fence_driver_hw_init()
695 amdgpu_irq_get(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_hw_init()
696 ring->fence_drv.irq_type); in amdgpu_fence_driver_hw_init()
711 for (i = 0; i <= ring->fence_drv.num_fences_mask; i++) { in amdgpu_fence_driver_clear_job_fences()
712 ptr = &ring->fence_drv.fences[i]; in amdgpu_fence_driver_clear_job_fences()
739 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_driver_set_error()
763 amdgpu_fence_write(ring, ring->fence_drv.sync_seq); in amdgpu_fence_driver_force_completion()
798 if (!timer_pending(&to_amdgpu_fence(f)->ring->fence_drv.fallback_timer)) in amdgpu_fence_enable_signaling()
815 if (!timer_pending(&to_amdgpu_ring(job->base.sched)->fence_drv.fallback_timer)) in amdgpu_job_fence_enable_signaling()
903 if (!ring || !ring->fence_drv.initialized) in amdgpu_debugfs_fence_info_show()
910 atomic_read(&ring->fence_drv.last_seq)); in amdgpu_debugfs_fence_info_show()
912 ring->fence_drv.sync_seq); in amdgpu_debugfs_fence_info_show()
927 le32_to_cpu(*(ring->fence_drv.cpu_addr + 2))); in amdgpu_debugfs_fence_info_show()
930 le32_to_cpu(*(ring->fence_drv.cpu_addr + 4))); in amdgpu_debugfs_fence_info_show()
933 le32_to_cpu(*(ring->fence_drv.cpu_addr + 6))); in amdgpu_debugfs_fence_info_show()