Home
last modified time | relevance | path

Searched refs:fence_drv (Results 1 – 25 of 54) sorted by relevance

123

/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_fence.c161 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit()
170 &ring->fence_drv.lock, in amdgpu_fence_emit()
176 &ring->fence_drv.lock, in amdgpu_fence_emit()
184 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit()
349 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_wait_empty()
517 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
518 ring->fence_drv.sync_seq = 0; in amdgpu_fence_driver_init_ring()
529 if (!ring->fence_drv.fences) in amdgpu_fence_driver_init_ring()
616 ring->fence_drv.irq_src && in amdgpu_fence_driver_hw_fini()
633 if (!ring || !ring->fence_drv.initialized || !ring->fence_drv.irq_src) in amdgpu_fence_driver_isr_toggle()
[all …]
A Dumsch_mm_v4_0.c310 set_hw_resources.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_set_hw_resources()
311 set_hw_resources.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_set_hw_resources()
360 add_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_add_queue()
361 add_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_add_queue()
392 remove_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_remove_queue()
393 remove_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_remove_queue()
A Damdgpu_ring_mux.c99 last_seq = atomic_read(&e->ring->fence_drv.last_seq); in amdgpu_mux_resubmit_chunks()
109 le32_to_cpu(*(e->ring->fence_drv.cpu_addr + 2))) { in amdgpu_mux_resubmit_chunks()
473 last_seq = atomic_read(&ring->fence_drv.last_seq); in scan_and_remove_signaled_chunk()
536 chunk->sync_seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_ring_mux_end_ib()
569 mux->seqno_to_resubmit = ring->fence_drv.sync_seq; in amdgpu_mcbp_handle_trailing_fence_irq()
A Damdgpu_debugfs.c1817 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_ib_preempt_fences_swap()
1820 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_ib_preempt_fences_swap()
1821 sync_seq = ring->fence_drv.sync_seq; in amdgpu_ib_preempt_fences_swap()
1878 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_ib_preempt_mark_partial_job()
1932 length = ring->fence_drv.num_fences_mask + 1; in amdgpu_debugfs_ib_preempt()
1954 if (atomic_read(&ring->fence_drv.last_seq) != in amdgpu_debugfs_ib_preempt()
1955 ring->fence_drv.sync_seq) { in amdgpu_debugfs_ib_preempt()
A Damdgpu_job.c124 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq), in amdgpu_job_timedout()
125 ring->fence_drv.sync_seq); in amdgpu_job_timedout()
A Duvd_v6_0.c1093 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_ring_emit_pipeline_sync()
1094 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_ring_emit_pipeline_sync()
1122 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_enc_ring_emit_pipeline_sync()
1123 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_enc_ring_emit_pipeline_sync()
A Dsi_dma.c415 uint32_t seq = ring->fence_drv.sync_seq; in si_dma_ring_emit_pipeline_sync()
416 uint64_t addr = ring->fence_drv.gpu_addr; in si_dma_ring_emit_pipeline_sync()
A Dvce_v3_0.c888 uint32_t seq = ring->fence_drv.sync_seq; in vce_v3_0_emit_pipeline_sync()
889 uint64_t addr = ring->fence_drv.gpu_addr; in vce_v3_0_emit_pipeline_sync()
A Damdgpu_vpe.c537 uint32_t seq = ring->fence_drv.sync_seq; in vpe_ring_emit_pipeline_sync()
538 uint64_t addr = ring->fence_drv.gpu_addr; in vpe_ring_emit_pipeline_sync()
A Damdgpu_umsch_mm.c511 r = amdgpu_fence_wait_polling(ring, ring->fence_drv.sync_seq, adev->usec_timeout); in amdgpu_umsch_mm_query_fence()
514 ring->fence_drv.sync_seq); in amdgpu_umsch_mm_query_fence()
A Dsdma_v2_4.c758 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v2_4_ring_emit_pipeline_sync()
759 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v2_4_ring_emit_pipeline_sync()
A Dmes_v12_0.c186 seq = ++ring->fence_drv.sync_seq; in mes_v12_0_submit_pkt_and_poll_completion()
188 seq - ring->fence_drv.num_fences_mask, in mes_v12_0_submit_pkt_and_poll_completion()
204 ring->fence_drv.gpu_addr; in mes_v12_0_submit_pkt_and_poll_completion()
A Dcik_sdma.c823 uint32_t seq = ring->fence_drv.sync_seq; in cik_sdma_ring_emit_pipeline_sync()
824 uint64_t addr = ring->fence_drv.gpu_addr; in cik_sdma_ring_emit_pipeline_sync()
/linux/drivers/gpu/drm/radeon/
A Dradeon_fence.c119 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
269 struct radeon_fence_driver *fence_drv; in radeon_fence_check_lockup() local
275 rdev = fence_drv->rdev; in radeon_fence_check_lockup()
276 ring = fence_drv - &rdev->fence_drv[0]; in radeon_fence_check_lockup()
287 fence_drv->delayed_irq = false; in radeon_fence_check_lockup()
301 fence_drv->sync_seq[ring], ring); in radeon_fence_check_lockup()
836 rdev->fence_drv[ring].scratch_reg - in radeon_fence_driver_start_ring()
864 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
871 rdev->fence_drv[ring].rdev = rdev; in radeon_fence_driver_init_ring()
953 if (!rdev->fence_drv[i].initialized) in radeon_debugfs_fence_info_show()
[all …]
A Duvd_v2_2.c43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
A Devergreen_dma.c44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
A Duvd_v1_0.c85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
A Dr600_dma.c290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
/linux/drivers/gpu/drm/virtio/
A Dvirtgpu_fence.c79 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_alloc()
105 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_emit()
130 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_event_process()
135 atomic64_set(&vgdev->fence_drv.last_fence_id, fence_id); in virtio_gpu_fence_event_process()
A Dvirtgpu_debugfs.c75 (u64)atomic64_read(&vgdev->fence_drv.last_fence_id), in virtio_gpu_debugfs_irq_info()
76 vgdev->fence_drv.current_fence_id); in virtio_gpu_debugfs_irq_info()
A Dvirtgpu_kms.c149 vgdev->fence_drv.context = dma_fence_context_alloc(1); in virtio_gpu_init()
150 spin_lock_init(&vgdev->fence_drv.lock); in virtio_gpu_init()
151 INIT_LIST_HEAD(&vgdev->fence_drv.fences); in virtio_gpu_init()
A Dvirtgpu_ioctl.c172 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0); in virtio_gpu_resource_create_ioctl()
259 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0); in virtio_gpu_transfer_from_host_ioctl()
319 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, in virtio_gpu_transfer_to_host_ioctl()
A Dvirtgpu_drv.h241 struct virtio_gpu_fence_driver fence_drv; member
A Dvirtgpu_plane.c261 vgfb->fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, in virtio_gpu_plane_prepare_fb()
A Dvirtgpu_submit.c478 u64 fence_ctx = vgdev->fence_drv.context; in virtio_gpu_execbuffer_ioctl()

Completed in 79 milliseconds

123