Lines Matching refs:cursor
367 static void dma_resv_iter_restart_unlocked(struct dma_resv_iter *cursor) in dma_resv_iter_restart_unlocked() argument
369 cursor->index = 0; in dma_resv_iter_restart_unlocked()
370 cursor->num_fences = 0; in dma_resv_iter_restart_unlocked()
371 cursor->fences = dma_resv_fences_list(cursor->obj); in dma_resv_iter_restart_unlocked()
372 if (cursor->fences) in dma_resv_iter_restart_unlocked()
373 cursor->num_fences = cursor->fences->num_fences; in dma_resv_iter_restart_unlocked()
374 cursor->is_restarted = true; in dma_resv_iter_restart_unlocked()
378 static void dma_resv_iter_walk_unlocked(struct dma_resv_iter *cursor) in dma_resv_iter_walk_unlocked() argument
380 if (!cursor->fences) in dma_resv_iter_walk_unlocked()
385 dma_fence_put(cursor->fence); in dma_resv_iter_walk_unlocked()
387 if (cursor->index >= cursor->num_fences) { in dma_resv_iter_walk_unlocked()
388 cursor->fence = NULL; in dma_resv_iter_walk_unlocked()
393 dma_resv_list_entry(cursor->fences, cursor->index++, in dma_resv_iter_walk_unlocked()
394 cursor->obj, &cursor->fence, in dma_resv_iter_walk_unlocked()
395 &cursor->fence_usage); in dma_resv_iter_walk_unlocked()
396 cursor->fence = dma_fence_get_rcu(cursor->fence); in dma_resv_iter_walk_unlocked()
397 if (!cursor->fence) { in dma_resv_iter_walk_unlocked()
398 dma_resv_iter_restart_unlocked(cursor); in dma_resv_iter_walk_unlocked()
402 if (!dma_fence_is_signaled(cursor->fence) && in dma_resv_iter_walk_unlocked()
403 cursor->usage >= cursor->fence_usage) in dma_resv_iter_walk_unlocked()
420 struct dma_fence *dma_resv_iter_first_unlocked(struct dma_resv_iter *cursor) in dma_resv_iter_first_unlocked() argument
424 dma_resv_iter_restart_unlocked(cursor); in dma_resv_iter_first_unlocked()
425 dma_resv_iter_walk_unlocked(cursor); in dma_resv_iter_first_unlocked()
426 } while (dma_resv_fences_list(cursor->obj) != cursor->fences); in dma_resv_iter_first_unlocked()
429 return cursor->fence; in dma_resv_iter_first_unlocked()
443 struct dma_fence *dma_resv_iter_next_unlocked(struct dma_resv_iter *cursor) in dma_resv_iter_next_unlocked() argument
448 cursor->is_restarted = false; in dma_resv_iter_next_unlocked()
449 restart = dma_resv_fences_list(cursor->obj) != cursor->fences; in dma_resv_iter_next_unlocked()
452 dma_resv_iter_restart_unlocked(cursor); in dma_resv_iter_next_unlocked()
453 dma_resv_iter_walk_unlocked(cursor); in dma_resv_iter_next_unlocked()
455 } while (dma_resv_fences_list(cursor->obj) != cursor->fences); in dma_resv_iter_next_unlocked()
458 return cursor->fence; in dma_resv_iter_next_unlocked()
471 struct dma_fence *dma_resv_iter_first(struct dma_resv_iter *cursor) in dma_resv_iter_first() argument
475 dma_resv_assert_held(cursor->obj); in dma_resv_iter_first()
477 cursor->index = 0; in dma_resv_iter_first()
478 cursor->fences = dma_resv_fences_list(cursor->obj); in dma_resv_iter_first()
480 fence = dma_resv_iter_next(cursor); in dma_resv_iter_first()
481 cursor->is_restarted = true; in dma_resv_iter_first()
493 struct dma_fence *dma_resv_iter_next(struct dma_resv_iter *cursor) in dma_resv_iter_next() argument
497 dma_resv_assert_held(cursor->obj); in dma_resv_iter_next()
499 cursor->is_restarted = false; in dma_resv_iter_next()
502 if (!cursor->fences || in dma_resv_iter_next()
503 cursor->index >= cursor->fences->num_fences) in dma_resv_iter_next()
506 dma_resv_list_entry(cursor->fences, cursor->index++, in dma_resv_iter_next()
507 cursor->obj, &fence, &cursor->fence_usage); in dma_resv_iter_next()
508 } while (cursor->fence_usage > cursor->usage); in dma_resv_iter_next()
523 struct dma_resv_iter cursor; in dma_resv_copy_fences() local
531 dma_resv_iter_begin(&cursor, src, DMA_RESV_USAGE_BOOKKEEP); in dma_resv_copy_fences()
532 dma_resv_for_each_fence_unlocked(&cursor, f) { in dma_resv_copy_fences()
534 if (dma_resv_iter_is_restarted(&cursor)) { in dma_resv_copy_fences()
537 list = dma_resv_list_alloc(cursor.num_fences); in dma_resv_copy_fences()
539 dma_resv_iter_end(&cursor); in dma_resv_copy_fences()
547 dma_resv_iter_usage(&cursor)); in dma_resv_copy_fences()
549 dma_resv_iter_end(&cursor); in dma_resv_copy_fences()
572 struct dma_resv_iter cursor; in dma_resv_get_fences() local
578 dma_resv_iter_begin(&cursor, obj, usage); in dma_resv_get_fences()
579 dma_resv_for_each_fence_unlocked(&cursor, fence) { in dma_resv_get_fences()
581 if (dma_resv_iter_is_restarted(&cursor)) { in dma_resv_get_fences()
588 count = cursor.num_fences + 1; in dma_resv_get_fences()
598 dma_resv_iter_end(&cursor); in dma_resv_get_fences()
606 dma_resv_iter_end(&cursor); in dma_resv_get_fences()
682 struct dma_resv_iter cursor; in dma_resv_wait_timeout() local
685 dma_resv_iter_begin(&cursor, obj, usage); in dma_resv_wait_timeout()
686 dma_resv_for_each_fence_unlocked(&cursor, fence) { in dma_resv_wait_timeout()
696 dma_resv_iter_end(&cursor); in dma_resv_wait_timeout()
714 struct dma_resv_iter cursor; in dma_resv_set_deadline() local
717 dma_resv_iter_begin(&cursor, obj, usage); in dma_resv_set_deadline()
718 dma_resv_for_each_fence_unlocked(&cursor, fence) { in dma_resv_set_deadline()
721 dma_resv_iter_end(&cursor); in dma_resv_set_deadline()
740 struct dma_resv_iter cursor; in dma_resv_test_signaled() local
743 dma_resv_iter_begin(&cursor, obj, usage); in dma_resv_test_signaled()
744 dma_resv_for_each_fence_unlocked(&cursor, fence) { in dma_resv_test_signaled()
745 dma_resv_iter_end(&cursor); in dma_resv_test_signaled()
748 dma_resv_iter_end(&cursor); in dma_resv_test_signaled()
764 struct dma_resv_iter cursor; in dma_resv_describe() local
767 dma_resv_for_each_fence(&cursor, obj, DMA_RESV_USAGE_READ, fence) { in dma_resv_describe()
769 usage[dma_resv_iter_usage(&cursor)]); in dma_resv_describe()