Lines Matching refs:svm_bo

87 	if (prange->svm_bo) {  in svm_range_unlink()
88 spin_lock(&prange->svm_bo->list_lock); in svm_range_unlink()
90 spin_unlock(&prange->svm_bo->list_lock); in svm_range_unlink()
174 amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_dma_map_dev()
349 static bool svm_bo_ref_unless_zero(struct svm_range_bo *svm_bo) in svm_bo_ref_unless_zero() argument
351 if (!svm_bo || !kref_get_unless_zero(&svm_bo->kref)) in svm_bo_ref_unless_zero()
359 struct svm_range_bo *svm_bo; in svm_range_bo_release() local
361 svm_bo = container_of(kref, struct svm_range_bo, kref); in svm_range_bo_release()
362 pr_debug("svm_bo 0x%p\n", svm_bo); in svm_range_bo_release()
364 spin_lock(&svm_bo->list_lock); in svm_range_bo_release()
365 while (!list_empty(&svm_bo->range_list)) { in svm_range_bo_release()
367 list_first_entry(&svm_bo->range_list, in svm_range_bo_release()
373 spin_unlock(&svm_bo->list_lock); in svm_range_bo_release()
378 prange->svm_bo = NULL; in svm_range_bo_release()
381 spin_lock(&svm_bo->list_lock); in svm_range_bo_release()
383 spin_unlock(&svm_bo->list_lock); in svm_range_bo_release()
384 if (!dma_fence_is_signaled(&svm_bo->eviction_fence->base)) { in svm_range_bo_release()
389 dma_fence_signal(&svm_bo->eviction_fence->base); in svm_range_bo_release()
390 cancel_work_sync(&svm_bo->eviction_work); in svm_range_bo_release()
392 dma_fence_put(&svm_bo->eviction_fence->base); in svm_range_bo_release()
393 amdgpu_bo_unref(&svm_bo->bo); in svm_range_bo_release()
394 kfree(svm_bo); in svm_range_bo_release()
399 struct svm_range_bo *svm_bo; in svm_range_bo_wq_release() local
401 svm_bo = container_of(work, struct svm_range_bo, release_work); in svm_range_bo_wq_release()
402 svm_range_bo_release(&svm_bo->kref); in svm_range_bo_wq_release()
407 struct svm_range_bo *svm_bo; in svm_range_bo_release_async() local
409 svm_bo = container_of(kref, struct svm_range_bo, kref); in svm_range_bo_release_async()
410 pr_debug("svm_bo 0x%p\n", svm_bo); in svm_range_bo_release_async()
411 INIT_WORK(&svm_bo->release_work, svm_range_bo_wq_release); in svm_range_bo_release_async()
412 schedule_work(&svm_bo->release_work); in svm_range_bo_release_async()
415 void svm_range_bo_unref_async(struct svm_range_bo *svm_bo) in svm_range_bo_unref_async() argument
417 kref_put(&svm_bo->kref, svm_range_bo_release_async); in svm_range_bo_unref_async()
420 static void svm_range_bo_unref(struct svm_range_bo *svm_bo) in svm_range_bo_unref() argument
422 if (svm_bo) in svm_range_bo_unref()
423 kref_put(&svm_bo->kref, svm_range_bo_release); in svm_range_bo_unref()
432 if (!prange->svm_bo) { in svm_range_validate_svm_bo()
441 if (svm_bo_ref_unless_zero(prange->svm_bo)) { in svm_range_validate_svm_bo()
447 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_validate_svm_bo()
451 spin_lock(&prange->svm_bo->list_lock); in svm_range_validate_svm_bo()
453 spin_unlock(&prange->svm_bo->list_lock); in svm_range_validate_svm_bo()
455 svm_range_bo_unref(prange->svm_bo); in svm_range_validate_svm_bo()
458 if (READ_ONCE(prange->svm_bo->evicting)) { in svm_range_validate_svm_bo()
460 struct svm_range_bo *svm_bo; in svm_range_validate_svm_bo() local
465 svm_bo = prange->svm_bo; in svm_range_validate_svm_bo()
466 f = dma_fence_get(&svm_bo->eviction_fence->base); in svm_range_validate_svm_bo()
467 svm_range_bo_unref(prange->svm_bo); in svm_range_validate_svm_bo()
481 prange->ttm_res = prange->svm_bo->bo->tbo.resource; in svm_range_validate_svm_bo()
502 struct svm_range_bo *svm_bo; in svm_range_bo_new() local
504 svm_bo = kzalloc(sizeof(*svm_bo), GFP_KERNEL); in svm_range_bo_new()
505 if (!svm_bo) in svm_range_bo_new()
508 kref_init(&svm_bo->kref); in svm_range_bo_new()
509 INIT_LIST_HEAD(&svm_bo->range_list); in svm_range_bo_new()
510 spin_lock_init(&svm_bo->list_lock); in svm_range_bo_new()
512 return svm_bo; in svm_range_bo_new()
520 struct svm_range_bo *svm_bo; in svm_range_vram_node_new() local
534 svm_bo = svm_range_bo_new(); in svm_range_vram_node_new()
535 if (!svm_bo) { in svm_range_vram_node_new()
542 kfree(svm_bo); in svm_range_vram_node_new()
545 svm_bo->eviction_fence = in svm_range_vram_node_new()
548 svm_bo); in svm_range_vram_node_new()
550 INIT_WORK(&svm_bo->eviction_work, svm_range_evict_svm_bo_worker); in svm_range_vram_node_new()
551 svm_bo->evicting = 0; in svm_range_vram_node_new()
589 amdgpu_bo_fence(bo, &svm_bo->eviction_fence->base, true); in svm_range_vram_node_new()
593 svm_bo->bo = bo; in svm_range_vram_node_new()
594 prange->svm_bo = svm_bo; in svm_range_vram_node_new()
598 spin_lock(&svm_bo->list_lock); in svm_range_vram_node_new()
599 list_add(&prange->svm_bo_list, &svm_bo->range_list); in svm_range_vram_node_new()
600 spin_unlock(&svm_bo->list_lock); in svm_range_vram_node_new()
607 dma_fence_put(&svm_bo->eviction_fence->base); in svm_range_vram_node_new()
608 kfree(svm_bo); in svm_range_vram_node_new()
616 svm_range_bo_unref(prange->svm_bo); in svm_range_vram_node_free()
932 new->svm_bo = svm_range_bo_ref(old->svm_bo); in svm_range_split_nodes()
935 spin_lock(&new->svm_bo->list_lock); in svm_range_split_nodes()
936 list_add(&new->svm_bo_list, &new->svm_bo->range_list); in svm_range_split_nodes()
937 spin_unlock(&new->svm_bo->list_lock); in svm_range_split_nodes()
1162 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_get_pte_flags()
1374 if (prange->svm_bo && prange->ttm_res) in svm_range_map_to_gpus()
1375 bo_adev = amdgpu_ttm_adev(prange->svm_bo->bo->tbo.bdev); in svm_range_map_to_gpus()
1885 if (old->svm_bo) { in svm_range_clone()
1888 new->svm_bo = svm_range_bo_ref(old->svm_bo); in svm_range_clone()
1889 spin_lock(&new->svm_bo->list_lock); in svm_range_clone()
1890 list_add(&new->svm_bo_list, &new->svm_bo->range_list); in svm_range_clone()
1891 spin_unlock(&new->svm_bo->list_lock); in svm_range_clone()
3312 if (fence->svm_bo) { in svm_range_schedule_evict_svm_bo()
3313 WRITE_ONCE(fence->svm_bo->evicting, 1); in svm_range_schedule_evict_svm_bo()
3314 schedule_work(&fence->svm_bo->eviction_work); in svm_range_schedule_evict_svm_bo()
3322 struct svm_range_bo *svm_bo; in svm_range_evict_svm_bo_worker() local
3326 svm_bo = container_of(work, struct svm_range_bo, eviction_work); in svm_range_evict_svm_bo_worker()
3327 if (!svm_bo_ref_unless_zero(svm_bo)) in svm_range_evict_svm_bo_worker()
3330 if (mmget_not_zero(svm_bo->eviction_fence->mm)) { in svm_range_evict_svm_bo_worker()
3331 mm = svm_bo->eviction_fence->mm; in svm_range_evict_svm_bo_worker()
3333 svm_range_bo_unref(svm_bo); in svm_range_evict_svm_bo_worker()
3338 spin_lock(&svm_bo->list_lock); in svm_range_evict_svm_bo_worker()
3339 while (!list_empty(&svm_bo->range_list) && !r) { in svm_range_evict_svm_bo_worker()
3341 list_first_entry(&svm_bo->range_list, in svm_range_evict_svm_bo_worker()
3346 spin_unlock(&svm_bo->list_lock); in svm_range_evict_svm_bo_worker()
3362 prange->svm_bo = NULL; in svm_range_evict_svm_bo_worker()
3367 spin_lock(&svm_bo->list_lock); in svm_range_evict_svm_bo_worker()
3369 spin_unlock(&svm_bo->list_lock); in svm_range_evict_svm_bo_worker()
3373 dma_fence_signal(&svm_bo->eviction_fence->base); in svm_range_evict_svm_bo_worker()
3378 WARN_ONCE(!r && kref_read(&svm_bo->kref) != 1, "This was not the last reference\n"); in svm_range_evict_svm_bo_worker()
3379 svm_range_bo_unref(svm_bo); in svm_range_evict_svm_bo_worker()