Lines Matching refs:f

66 				 struct dma_fence *f)  in amdgpu_sync_same_dev()  argument
68 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev()
87 static void *amdgpu_sync_get_owner(struct dma_fence *f) in amdgpu_sync_get_owner() argument
92 if (!f) in amdgpu_sync_get_owner()
95 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner()
99 kfd_fence = to_amdgpu_amdkfd_fence(f); in amdgpu_sync_get_owner()
133 static bool amdgpu_sync_add_later(struct amdgpu_sync *sync, struct dma_fence *f) in amdgpu_sync_add_later() argument
137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
140 e->fence = dma_fence_get(f); in amdgpu_sync_add_later()
144 if (likely(e->fence->context == f->context)) { in amdgpu_sync_add_later()
145 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later()
161 int amdgpu_sync_fence(struct amdgpu_sync *sync, struct dma_fence *f, in amdgpu_sync_fence() argument
166 if (!f) in amdgpu_sync_fence()
169 if (amdgpu_sync_add_later(sync, f)) in amdgpu_sync_fence()
176 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
177 e->fence = dma_fence_get(f); in amdgpu_sync_fence()
184 void *owner, struct dma_fence *f) in amdgpu_sync_test_fence() argument
186 void *fence_owner = amdgpu_sync_get_owner(f); in amdgpu_sync_test_fence()
211 if (amdgpu_sync_same_dev(adev, f) && in amdgpu_sync_test_fence()
217 if (amdgpu_sync_same_dev(adev, f) && in amdgpu_sync_test_fence()
247 struct dma_fence *f; in amdgpu_sync_resv() local
253 dma_resv_for_each_fence(&cursor, resv, DMA_RESV_USAGE_READ, f) { in amdgpu_sync_resv()
254 dma_fence_chain_for_each(f, f) { in amdgpu_sync_resv()
255 struct dma_fence *tmp = dma_fence_chain_contained(f); in amdgpu_sync_resv()
258 r = amdgpu_sync_fence(sync, f, GFP_KERNEL); in amdgpu_sync_resv()
259 dma_fence_put(f); in amdgpu_sync_resv()
280 struct dma_fence *f; in amdgpu_sync_kfd() local
284 dma_resv_for_each_fence_unlocked(&cursor, f) { in amdgpu_sync_kfd()
285 void *fence_owner = amdgpu_sync_get_owner(f); in amdgpu_sync_kfd()
290 r = amdgpu_sync_fence(sync, f, GFP_KERNEL); in amdgpu_sync_kfd()
324 struct dma_fence *f = e->fence; in amdgpu_sync_peek_fence() local
325 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_peek_fence()
327 if (dma_fence_is_signaled(f)) { in amdgpu_sync_peek_fence()
343 return f; in amdgpu_sync_peek_fence()
360 struct dma_fence *f; in amdgpu_sync_get_fence() local
365 f = e->fence; in amdgpu_sync_get_fence()
370 if (!dma_fence_is_signaled(f)) in amdgpu_sync_get_fence()
371 return f; in amdgpu_sync_get_fence()
373 dma_fence_put(f); in amdgpu_sync_get_fence()
391 struct dma_fence *f; in amdgpu_sync_clone() local
395 f = e->fence; in amdgpu_sync_clone()
396 if (!dma_fence_is_signaled(f)) { in amdgpu_sync_clone()
397 r = amdgpu_sync_fence(clone, f, GFP_KERNEL); in amdgpu_sync_clone()
438 struct dma_fence *f; in amdgpu_sync_push_to_job() local
442 f = e->fence; in amdgpu_sync_push_to_job()
443 if (dma_fence_is_signaled(f)) { in amdgpu_sync_push_to_job()
448 dma_fence_get(f); in amdgpu_sync_push_to_job()
449 r = drm_sched_job_add_dependency(&job->base, f); in amdgpu_sync_push_to_job()
451 dma_fence_put(f); in amdgpu_sync_push_to_job()