Lines Matching refs:ev

439 	   struct eb_vma *ev)  in eb_pin_vma()  argument
441 struct i915_vma *vma = ev->vma; in eb_pin_vma()
451 if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_GTT)) in eb_pin_vma()
467 eb_pin_flags(entry, ev->flags) | in eb_pin_vma()
473 if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_FENCE)) { in eb_pin_vma()
479 ev->flags |= __EXEC_OBJECT_HAS_FENCE; in eb_pin_vma()
482 ev->flags |= __EXEC_OBJECT_HAS_PIN; in eb_pin_vma()
483 if (eb_vma_misplaced(entry, vma, ev->flags)) in eb_pin_vma()
490 eb_unreserve_vma(struct eb_vma *ev) in eb_unreserve_vma() argument
492 if (unlikely(ev->flags & __EXEC_OBJECT_HAS_FENCE)) in eb_unreserve_vma()
493 __i915_vma_unpin_fence(ev->vma); in eb_unreserve_vma()
495 ev->flags &= ~__EXEC_OBJECT_RESERVED; in eb_unreserve_vma()
567 struct eb_vma *ev = &eb->vma[i]; in eb_add_vma() local
569 ev->vma = vma; in eb_add_vma()
570 ev->exec = entry; in eb_add_vma()
571 ev->flags = entry->flags; in eb_add_vma()
574 ev->handle = entry->handle; in eb_add_vma()
575 hlist_add_head(&ev->node, in eb_add_vma()
581 list_add_tail(&ev->reloc_link, &eb->relocs); in eb_add_vma()
594 !(ev->flags & EXEC_OBJECT_PINNED)) in eb_add_vma()
595 ev->flags |= __EXEC_OBJECT_NEEDS_BIAS; in eb_add_vma()
597 ev->flags |= EXEC_OBJECT_NEEDS_FENCE; in eb_add_vma()
599 eb->batches[*current_batch] = ev; in eb_add_vma()
601 if (unlikely(ev->flags & EXEC_OBJECT_WRITE)) { in eb_add_vma()
610 ev->vma->size)) { in eb_add_vma()
616 eb->batch_len[*current_batch] = ev->vma->size - in eb_add_vma()
655 struct eb_vma *ev, in eb_reserve_vma() argument
658 struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_reserve_vma()
659 struct i915_vma *vma = ev->vma; in eb_reserve_vma()
663 eb_vma_misplaced(entry, vma, ev->flags)) { in eb_reserve_vma()
671 eb_pin_flags(entry, ev->flags) | pin_flags); in eb_reserve_vma()
680 if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_FENCE)) { in eb_reserve_vma()
686 ev->flags |= __EXEC_OBJECT_HAS_FENCE; in eb_reserve_vma()
689 ev->flags |= __EXEC_OBJECT_HAS_PIN; in eb_reserve_vma()
690 GEM_BUG_ON(eb_vma_misplaced(entry, vma, ev->flags)); in eb_reserve_vma()
707 struct eb_vma *ev = &eb->vma[i]; in eb_unbind() local
708 unsigned int flags = ev->flags; in eb_unbind()
715 eb_unreserve_vma(ev); in eb_unbind()
719 list_add(&ev->bind_link, &eb->unbound); in eb_unbind()
722 list_add_tail(&ev->bind_link, &eb->unbound); in eb_unbind()
725 list_add(&ev->bind_link, &last); in eb_unbind()
727 list_add_tail(&ev->bind_link, &last); in eb_unbind()
736 struct eb_vma *ev; in eb_reserve() local
809 list_for_each_entry(ev, &eb->unbound, bind_link) { in eb_reserve()
810 err = eb_reserve_vma(eb, ev, pin_flags); in eb_reserve()
1000 struct eb_vma *ev = &eb->vma[i]; in eb_lock_vmas() local
1001 struct i915_vma *vma = ev->vma; in eb_lock_vmas()
1024 struct eb_vma *ev = &eb->vma[i]; in eb_validate_vmas() local
1025 struct i915_vma *vma = ev->vma; in eb_validate_vmas()
1027 err = eb_pin_vma(eb, entry, ev); in eb_validate_vmas()
1037 eb_unreserve_vma(ev); in eb_validate_vmas()
1039 list_add_tail(&ev->bind_link, &eb->unbound); in eb_validate_vmas()
1053 eb_vma_misplaced(&eb->exec[i], vma, ev->flags)); in eb_validate_vmas()
1071 struct eb_vma *ev; in eb_get_vma() local
1074 hlist_for_each_entry(ev, head, node) { in eb_get_vma()
1075 if (ev->handle == handle) in eb_get_vma()
1076 return ev; in eb_get_vma()
1088 struct eb_vma *ev = &eb->vma[i]; in eb_release_vmas() local
1089 struct i915_vma *vma = ev->vma; in eb_release_vmas()
1094 eb_unreserve_vma(ev); in eb_release_vmas()
1423 struct eb_vma *ev, in eb_relocate_entry() argument
1478 reloc_cache_remap(&eb->reloc_cache, ev->vma->obj); in eb_relocate_entry()
1494 ev->vma->size - (eb->reloc_cache.use_64bit_reloc ? 8 : 4))) { in eb_relocate_entry()
1499 (int)ev->vma->size); in eb_relocate_entry()
1518 ev->flags &= ~EXEC_OBJECT_ASYNC; in eb_relocate_entry()
1521 return relocate_entry(ev->vma, reloc, eb, target->vma); in eb_relocate_entry()
1524 static int eb_relocate_vma(struct i915_execbuffer *eb, struct eb_vma *ev) in eb_relocate_vma() argument
1528 const struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_relocate_vma()
1568 u64 offset = eb_relocate_entry(eb, ev, r); in eb_relocate_vma()
1609 eb_relocate_vma_slow(struct i915_execbuffer *eb, struct eb_vma *ev) in eb_relocate_vma_slow() argument
1611 const struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_relocate_vma_slow()
1618 u64 offset = eb_relocate_entry(eb, ev, &relocs[i]); in eb_relocate_vma_slow()
1765 struct eb_vma *ev = &eb->vma[i]; in eb_reinit_userptr() local
1767 if (!i915_gem_object_is_userptr(ev->vma->obj)) in eb_reinit_userptr()
1770 ret = i915_gem_object_userptr_submit_init(ev->vma->obj); in eb_reinit_userptr()
1774 ev->flags |= __EXEC_OBJECT_USERPTR_INIT; in eb_reinit_userptr()
1783 struct eb_vma *ev; in eb_relocate_parse_slow() local
1838 list_for_each_entry(ev, &eb->relocs, reloc_link) { in eb_relocate_parse_slow()
1840 err = eb_relocate_vma(eb, ev); in eb_relocate_parse_slow()
1844 err = eb_relocate_vma_slow(eb, ev); in eb_relocate_parse_slow()
1928 struct eb_vma *ev; in eb_relocate_parse() local
1930 list_for_each_entry(ev, &eb->relocs, reloc_link) { in eb_relocate_parse()
1931 err = eb_relocate_vma(eb, ev); in eb_relocate_parse()
2008 struct eb_vma *ev = &eb->vma[i]; in eb_capture_stage() local
2009 struct i915_vma *vma = ev->vma; in eb_capture_stage()
2010 unsigned int flags = ev->flags; in eb_capture_stage()
2100 struct eb_vma *ev = &eb->vma[i]; in eb_move_to_gpu() local
2101 struct i915_vma *vma = ev->vma; in eb_move_to_gpu()
2102 unsigned int flags = ev->flags; in eb_move_to_gpu()
2161 struct eb_vma *ev = &eb->vma[i]; in eb_move_to_gpu() local
2162 struct drm_i915_gem_object *obj = ev->vma->obj; in eb_move_to_gpu()