| /linux/drivers/gpu/drm/ |
| A D | drm_writeback.c | 376 if (job->out_fence) in drm_writeback_cleanup_job() 377 dma_fence_put(job->out_fence); in drm_writeback_cleanup_job() 421 struct dma_fence *out_fence; in drm_writeback_signal_completion() local 435 out_fence = job->out_fence; in drm_writeback_signal_completion() 436 if (out_fence) { in drm_writeback_signal_completion() 438 dma_fence_set_error(out_fence, status); in drm_writeback_signal_completion() 439 dma_fence_signal(out_fence); in drm_writeback_signal_completion() 440 dma_fence_put(out_fence); in drm_writeback_signal_completion() 441 job->out_fence = NULL; in drm_writeback_signal_completion()
|
| A D | drm_atomic.c | 495 if (writeback_job->out_fence) { in drm_atomic_connector_check()
|
| A D | drm_atomic_uapi.c | 1284 conn_state->writeback_job->out_fence = fence; in prepare_signaling()
|
| /linux/drivers/gpu/drm/virtio/ |
| A D | virtgpu_submit.c | 37 struct virtio_gpu_fence *out_fence; member 255 struct dma_fence *fence = &submit->out_fence->f; in virtio_gpu_process_post_deps() 344 if (submit->out_fence) in virtio_gpu_cleanup_submit() 345 dma_fence_put(&submit->out_fence->f); in virtio_gpu_cleanup_submit() 355 submit->out_fence); in virtio_gpu_submit() 375 struct virtio_gpu_fence *out_fence; in virtio_gpu_init_submit() local 391 out_fence = virtio_gpu_fence_alloc(vgdev, fence_ctx, ring_idx); in virtio_gpu_init_submit() 393 out_fence = NULL; in virtio_gpu_init_submit() 398 dma_fence_put(&out_fence->f); in virtio_gpu_init_submit() 403 submit->out_fence = out_fence; in virtio_gpu_init_submit() [all …]
|
| /linux/drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_sched.c | 45 if (dma_fence_is_signaled(submit->out_fence)) in etnaviv_sched_timedout_job() 112 submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); in etnaviv_sched_push_job() 114 submit->out_fence, xa_limit_32b, in etnaviv_sched_push_job()
|
| A D | etnaviv_gem_submit.c | 207 dma_resv_add_fence(obj->resv, submit->out_fence, write ? in submit_attach_object_fences() 392 if (submit->out_fence) { in submit_cleanup() 398 dma_fence_put(submit->out_fence); in submit_cleanup() 602 sync_file = sync_file_create(submit->out_fence); in etnaviv_ioctl_gem_submit()
|
| A D | etnaviv_gem.h | 95 struct dma_fence *out_fence; member
|
| /linux/drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_kms.h | 128 struct vmw_fence_obj **out_fence; member 444 struct vmw_fence_obj **out_fence, 521 struct vmw_fence_obj **out_fence, 529 struct vmw_fence_obj **out_fence, 551 struct vmw_fence_obj **out_fence,
|
| A D | vmwgfx_scrn.c | 535 struct vmw_fence_obj **out_fence) in vmw_sou_plane_update_bo() argument 545 bo_update.base.out_fence = out_fence; in vmw_sou_plane_update_bo() 697 struct vmw_fence_obj **out_fence) in vmw_sou_plane_update_surface() argument 707 srf_update.base.out_fence = out_fence; in vmw_sou_plane_update_surface() 1102 struct vmw_fence_obj **out_fence, in vmw_kms_sou_do_surface_dirty() argument 1139 vmw_kms_helper_validation_finish(dev_priv, NULL, &val_ctx, out_fence, in vmw_kms_sou_do_surface_dirty() 1216 struct vmw_fence_obj **out_fence, in vmw_kms_sou_do_bo_dirty() argument 1247 vmw_kms_helper_validation_finish(dev_priv, NULL, &val_ctx, out_fence, in vmw_kms_sou_do_bo_dirty()
|
| A D | vmwgfx_stdu.c | 730 struct vmw_fence_obj **out_fence, in vmw_kms_stdu_surface_dirty() argument 765 vmw_kms_helper_validation_finish(dev_priv, NULL, &val_ctx, out_fence, in vmw_kms_stdu_surface_dirty() 1247 struct vmw_fence_obj **out_fence) in vmw_stdu_plane_update_bo() argument 1257 bo_update.base.out_fence = out_fence; in vmw_stdu_plane_update_bo() 1357 struct vmw_fence_obj **out_fence) in vmw_stdu_plane_update_surface() argument 1372 srf_update.out_fence = out_fence; in vmw_stdu_plane_update_surface()
|
| A D | vmwgfx_kms.c | 2419 struct vmw_fence_obj **out_fence, in vmw_kms_helper_validation_finish() argument 2428 out_fence) in vmw_kms_helper_validation_finish() 2436 if (out_fence) in vmw_kms_helper_validation_finish() 2437 *out_fence = fence; in vmw_kms_helper_validation_finish() 2643 update->out_fence, NULL); in vmw_du_helper_plane_update()
|
| A D | vmwgfx_drv.h | 1002 struct vmw_fence_obj **out_fence,
|
| A D | vmwgfx_execbuf.c | 4095 struct vmw_fence_obj **out_fence, uint32_t flags) in vmw_execbuf_process() argument 4273 if (unlikely(out_fence != NULL)) { in vmw_execbuf_process() 4274 *out_fence = fence; in vmw_execbuf_process()
|
| /linux/drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_execbuffer.c | 3235 if (!out_fence) in eb_composite_fence_create() 3241 return out_fence; in eb_composite_fence_create() 3278 if (IS_ERR(out_fence)) in eb_fences_add() 3282 if (!out_fence) in eb_fences_add() 3286 return out_fence; in eb_fences_add() 3319 return out_fence; in eb_requests_create() 3330 if (IS_ERR(out_fence)) in eb_requests_create() 3331 return out_fence; in eb_requests_create() 3349 return out_fence; in eb_requests_create() 3485 out_fence = NULL; in i915_gem_do_execbuffer() [all …]
|
| /linux/include/uapi/drm/ |
| A D | vgem_drm.h | 49 __u32 out_fence; member
|
| /linux/include/drm/ |
| A D | drm_writeback.h | 135 struct dma_fence *out_fence; member
|
| /linux/drivers/gpu/drm/imagination/ |
| A D | pvr_ccb.c | 591 struct dma_fence *out_fence = NULL; in pvr_kccb_reserve_slot() local 602 out_fence = dma_fence_get(&fence->base); in pvr_kccb_reserve_slot() 611 return out_fence; in pvr_kccb_reserve_slot()
|
| /linux/drivers/gpu/drm/vgem/ |
| A D | vgem_fence.c | 175 arg->out_fence = ret; in vgem_fence_attach_ioctl()
|
| /linux/Documentation/gpu/ |
| A D | drm-vm-bind-async.rst | 120 complete and signal the out_fence. In particular if the operation is
|