Lines Matching refs:range

552 		struct drm_gpusvm_range *range, *__next;  in drm_gpusvm_fini()  local
560 drm_gpusvm_for_each_range_safe(range, __next, notifier, 0, in drm_gpusvm_fini()
562 drm_gpusvm_range_remove(gpusvm, range); in drm_gpusvm_fini()
639 struct drm_gpusvm_range *range) in drm_gpusvm_range_insert() argument
645 interval_tree_insert(&range->itree, &notifier->root); in drm_gpusvm_range_insert()
647 node = rb_prev(&range->itree.rb); in drm_gpusvm_range_insert()
653 list_add(&range->entry, head); in drm_gpusvm_range_insert()
665 struct drm_gpusvm_range *range) in __drm_gpusvm_range_remove() argument
667 interval_tree_remove(&range->itree, &notifier->root); in __drm_gpusvm_range_remove()
668 list_del(&range->entry); in __drm_gpusvm_range_remove()
689 struct drm_gpusvm_range *range; in drm_gpusvm_range_alloc() local
692 range = gpusvm->ops->range_alloc(gpusvm); in drm_gpusvm_range_alloc()
694 range = kzalloc(sizeof(*range), GFP_KERNEL); in drm_gpusvm_range_alloc()
696 if (!range) in drm_gpusvm_range_alloc()
699 kref_init(&range->refcount); in drm_gpusvm_range_alloc()
700 range->gpusvm = gpusvm; in drm_gpusvm_range_alloc()
701 range->notifier = notifier; in drm_gpusvm_range_alloc()
702 range->itree.start = ALIGN_DOWN(fault_addr, chunk_size); in drm_gpusvm_range_alloc()
703 range->itree.last = ALIGN(fault_addr + 1, chunk_size) - 1; in drm_gpusvm_range_alloc()
704 INIT_LIST_HEAD(&range->entry); in drm_gpusvm_range_alloc()
705 range->notifier_seq = LONG_MAX; in drm_gpusvm_range_alloc()
706 range->flags.migrate_devmem = migrate_devmem ? 1 : 0; in drm_gpusvm_range_alloc()
708 return range; in drm_gpusvm_range_alloc()
825 struct drm_gpusvm_range *range; in drm_gpusvm_range_chunk_size() local
827 range = drm_gpusvm_range_find(notifier, start, end); in drm_gpusvm_range_chunk_size()
828 if (range) { in drm_gpusvm_range_chunk_size()
929 struct drm_gpusvm_range *range; in drm_gpusvm_range_find_or_insert() local
976 range = drm_gpusvm_range_find(notifier, fault_addr, fault_addr + 1); in drm_gpusvm_range_find_or_insert()
977 if (range) in drm_gpusvm_range_find_or_insert()
996 range = drm_gpusvm_range_alloc(gpusvm, notifier, fault_addr, chunk_size, in drm_gpusvm_range_find_or_insert()
998 if (IS_ERR(range)) { in drm_gpusvm_range_find_or_insert()
999 err = PTR_ERR(range); in drm_gpusvm_range_find_or_insert()
1003 drm_gpusvm_range_insert(notifier, range); in drm_gpusvm_range_find_or_insert()
1011 return range; in drm_gpusvm_range_find_or_insert()
1036 struct drm_gpusvm_range *range, in __drm_gpusvm_range_unmap_pages() argument
1040 struct drm_pagemap *dpagemap = range->dpagemap; in __drm_gpusvm_range_unmap_pages()
1045 if (range->flags.has_dma_mapping) { in __drm_gpusvm_range_unmap_pages()
1047 .__flags = range->flags.__flags, in __drm_gpusvm_range_unmap_pages()
1051 struct drm_pagemap_device_addr *addr = &range->dma_addr[j]; in __drm_gpusvm_range_unmap_pages()
1067 WRITE_ONCE(range->flags.__flags, flags.__flags); in __drm_gpusvm_range_unmap_pages()
1069 range->dpagemap = NULL; in __drm_gpusvm_range_unmap_pages()
1081 struct drm_gpusvm_range *range) in drm_gpusvm_range_free_pages() argument
1085 if (range->dma_addr) { in drm_gpusvm_range_free_pages()
1086 kvfree(range->dma_addr); in drm_gpusvm_range_free_pages()
1087 range->dma_addr = NULL; in drm_gpusvm_range_free_pages()
1101 struct drm_gpusvm_range *range) in drm_gpusvm_range_remove() argument
1103 unsigned long npages = npages_in_range(drm_gpusvm_range_start(range), in drm_gpusvm_range_remove()
1104 drm_gpusvm_range_end(range)); in drm_gpusvm_range_remove()
1110 drm_gpusvm_range_start(range)); in drm_gpusvm_range_remove()
1115 __drm_gpusvm_range_unmap_pages(gpusvm, range, npages); in drm_gpusvm_range_remove()
1116 drm_gpusvm_range_free_pages(gpusvm, range); in drm_gpusvm_range_remove()
1117 __drm_gpusvm_range_remove(notifier, range); in drm_gpusvm_range_remove()
1120 drm_gpusvm_range_put(range); in drm_gpusvm_range_remove()
1140 drm_gpusvm_range_get(struct drm_gpusvm_range *range) in drm_gpusvm_range_get() argument
1142 kref_get(&range->refcount); in drm_gpusvm_range_get()
1144 return range; in drm_gpusvm_range_get()
1158 struct drm_gpusvm_range *range = in drm_gpusvm_range_destroy() local
1160 struct drm_gpusvm *gpusvm = range->gpusvm; in drm_gpusvm_range_destroy()
1163 gpusvm->ops->range_free(range); in drm_gpusvm_range_destroy()
1165 kfree(range); in drm_gpusvm_range_destroy()
1175 void drm_gpusvm_range_put(struct drm_gpusvm_range *range) in drm_gpusvm_range_put() argument
1177 kref_put(&range->refcount, drm_gpusvm_range_destroy); in drm_gpusvm_range_put()
1196 struct drm_gpusvm_range *range) in drm_gpusvm_range_pages_valid() argument
1200 return range->flags.has_devmem_pages || range->flags.has_dma_mapping; in drm_gpusvm_range_pages_valid()
1216 struct drm_gpusvm_range *range) in drm_gpusvm_range_pages_valid_unlocked() argument
1220 if (!range->dma_addr) in drm_gpusvm_range_pages_valid_unlocked()
1224 pages_valid = drm_gpusvm_range_pages_valid(gpusvm, range); in drm_gpusvm_range_pages_valid_unlocked()
1226 drm_gpusvm_range_free_pages(gpusvm, range); in drm_gpusvm_range_pages_valid_unlocked()
1244 struct drm_gpusvm_range *range, in drm_gpusvm_range_get_pages() argument
1247 struct mmu_interval_notifier *notifier = &range->notifier->notifier; in drm_gpusvm_range_get_pages()
1252 .start = drm_gpusvm_range_start(range), in drm_gpusvm_range_get_pages()
1253 .end = drm_gpusvm_range_end(range), in drm_gpusvm_range_get_pages()
1261 unsigned long npages = npages_in_range(drm_gpusvm_range_start(range), in drm_gpusvm_range_get_pages()
1262 drm_gpusvm_range_end(range)); in drm_gpusvm_range_get_pages()
1273 if (drm_gpusvm_range_pages_valid_unlocked(gpusvm, range)) in drm_gpusvm_range_get_pages()
1313 flags.__flags = range->flags.__flags; in drm_gpusvm_range_get_pages()
1326 if (!range->dma_addr) { in drm_gpusvm_range_get_pages()
1329 range->dma_addr = kvmalloc_array(npages, in drm_gpusvm_range_get_pages()
1330 sizeof(*range->dma_addr), in drm_gpusvm_range_get_pages()
1332 if (!range->dma_addr) { in drm_gpusvm_range_get_pages()
1371 range->dma_addr[j] = in drm_gpusvm_range_get_pages()
1377 range->dma_addr[j].addr)) { in drm_gpusvm_range_get_pages()
1403 range->dma_addr[j] = drm_pagemap_device_addr_encode in drm_gpusvm_range_get_pages()
1414 range->dpagemap = dpagemap; in drm_gpusvm_range_get_pages()
1418 WRITE_ONCE(range->flags.__flags, flags.__flags); in drm_gpusvm_range_get_pages()
1423 range->notifier_seq = hmm_range.notifier_seq; in drm_gpusvm_range_get_pages()
1428 __drm_gpusvm_range_unmap_pages(gpusvm, range, num_dma_mapped); in drm_gpusvm_range_get_pages()
1452 struct drm_gpusvm_range *range, in drm_gpusvm_range_unmap_pages() argument
1455 unsigned long npages = npages_in_range(drm_gpusvm_range_start(range), in drm_gpusvm_range_unmap_pages()
1456 drm_gpusvm_range_end(range)); in drm_gpusvm_range_unmap_pages()
1463 __drm_gpusvm_range_unmap_pages(gpusvm, range, npages); in drm_gpusvm_range_unmap_pages()
1480 struct drm_gpusvm_range *range) in drm_gpusvm_range_evict() argument
1482 struct mmu_interval_notifier *notifier = &range->notifier->notifier; in drm_gpusvm_range_evict()
1486 .start = drm_gpusvm_range_start(range), in drm_gpusvm_range_evict()
1487 .end = drm_gpusvm_range_end(range), in drm_gpusvm_range_evict()
1493 unsigned long npages = npages_in_range(drm_gpusvm_range_start(range), in drm_gpusvm_range_evict()
1494 drm_gpusvm_range_end(range)); in drm_gpusvm_range_evict()
1541 struct drm_gpusvm_range *range = NULL; in drm_gpusvm_has_mapping() local
1543 drm_gpusvm_for_each_range(range, notifier, start, end) in drm_gpusvm_has_mapping()
1559 void drm_gpusvm_range_set_unmapped(struct drm_gpusvm_range *range, in drm_gpusvm_range_set_unmapped() argument
1562 lockdep_assert_held_write(&range->gpusvm->notifier_lock); in drm_gpusvm_range_set_unmapped()
1564 range->flags.unmapped = true; in drm_gpusvm_range_set_unmapped()
1565 if (drm_gpusvm_range_start(range) < mmu_range->start || in drm_gpusvm_range_set_unmapped()
1566 drm_gpusvm_range_end(range) > mmu_range->end) in drm_gpusvm_range_set_unmapped()
1567 range->flags.partial_unmap = true; in drm_gpusvm_range_set_unmapped()