Lines Matching refs:bo_va

924 	struct amdgpu_bo_va *bo_va, *tmp;  in amdgpu_vm_get_memory()  local
927 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_vm_get_memory()
928 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
930 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
933 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_vm_get_memory()
934 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
936 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
939 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_vm_get_memory()
940 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
942 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
945 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_get_memory()
946 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
948 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
951 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_vm_get_memory()
952 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
954 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
957 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_vm_get_memory()
958 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
960 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
977 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
980 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
981 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
999 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1032 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1034 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1036 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1038 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1039 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1042 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1076 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1078 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1080 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1083 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1084 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1085 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1088 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1292 struct amdgpu_bo_va *bo_va; in amdgpu_vm_handle_moved() local
1299 bo_va = list_first_entry(&vm->moved, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1304 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
1311 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1313 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1323 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
1355 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
1357 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
1358 if (bo_va == NULL) { in amdgpu_vm_bo_add()
1361 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
1363 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
1364 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
1365 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
1368 return bo_va; in amdgpu_vm_bo_add()
1372 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
1377 return bo_va; in amdgpu_vm_bo_add()
1391 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
1394 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
1395 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
1397 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
1398 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
1405 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
1406 amdgpu_vm_bo_moved(&bo_va->base); in amdgpu_vm_bo_insert_map()
1408 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
1429 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
1434 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
1435 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
1471 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
1495 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
1500 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
1521 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
1535 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
1555 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
1559 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
1564 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
1569 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
1572 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
1577 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
1583 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
1584 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
1590 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
1642 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1643 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1653 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1654 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1673 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
1734 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
1737 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
1758 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_del() argument
1761 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_del()
1762 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_del()
1772 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_del()
1774 if (*base != &bo_va->base) in amdgpu_vm_bo_del()
1777 *base = bo_va->base.next; in amdgpu_vm_bo_del()
1783 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_del()
1786 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_del()
1789 mapping->bo_va = NULL; in amdgpu_vm_bo_del()
1790 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_del()
1793 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_del()
1797 bo_va->last_pt_update); in amdgpu_vm_bo_del()
1800 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_del()
1802 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_del()
1805 kfree(bo_va); in amdgpu_vm_bo_del()
2543 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_debugfs_vm_bo_info() local
2560 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2561 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2563 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2569 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2570 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2572 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2578 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2579 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2581 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2587 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2588 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2590 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2596 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2597 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2599 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2605 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2606 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2608 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()