Lines Matching refs:mapping
1250 struct amdgpu_bo_va_mapping *mapping; in amdgpu_vm_bo_update() local
1336 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1342 if (!(mapping->flags & AMDGPU_PTE_READABLE)) in amdgpu_vm_bo_update()
1344 if (!(mapping->flags & AMDGPU_PTE_WRITEABLE)) in amdgpu_vm_bo_update()
1348 amdgpu_gmc_get_vm_pte(adev, mapping, &update_flags); in amdgpu_vm_bo_update()
1350 trace_amdgpu_vm_bo_update(mapping); in amdgpu_vm_bo_update()
1353 !uncached, &sync, mapping->start, in amdgpu_vm_bo_update()
1354 mapping->last, update_flags, in amdgpu_vm_bo_update()
1355 mapping->offset, vram_base, mem, in amdgpu_vm_bo_update()
1381 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1382 trace_amdgpu_vm_bo_mapping(mapping); in amdgpu_vm_bo_update()
1486 struct amdgpu_bo_va_mapping *mapping, in amdgpu_vm_free_mapping() argument
1489 if (mapping->flags & AMDGPU_PTE_PRT_FLAG(adev)) in amdgpu_vm_free_mapping()
1491 kfree(mapping); in amdgpu_vm_free_mapping()
1534 struct amdgpu_bo_va_mapping *mapping; in amdgpu_vm_clear_freed() local
1551 mapping = list_first_entry(&vm->freed, in amdgpu_vm_clear_freed()
1553 list_del(&mapping->list); in amdgpu_vm_clear_freed()
1556 &sync, mapping->start, mapping->last, in amdgpu_vm_clear_freed()
1558 amdgpu_vm_free_mapping(adev, vm, mapping, f); in amdgpu_vm_clear_freed()
1759 struct amdgpu_bo_va_mapping *mapping) in amdgpu_vm_bo_insert_map() argument
1764 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
1765 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
1766 amdgpu_vm_it_insert(mapping, &vm->va); in amdgpu_vm_bo_insert_map()
1768 if (mapping->flags & AMDGPU_PTE_PRT_FLAG(adev)) in amdgpu_vm_bo_insert_map()
1774 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
1830 struct amdgpu_bo_va_mapping *mapping, *tmp; in amdgpu_vm_bo_map() local
1852 mapping = kmalloc(sizeof(*mapping), GFP_KERNEL); in amdgpu_vm_bo_map()
1853 if (!mapping) in amdgpu_vm_bo_map()
1856 mapping->start = saddr; in amdgpu_vm_bo_map()
1857 mapping->last = eaddr; in amdgpu_vm_bo_map()
1858 mapping->offset = offset; in amdgpu_vm_bo_map()
1859 mapping->flags = flags; in amdgpu_vm_bo_map()
1861 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
1889 struct amdgpu_bo_va_mapping *mapping; in amdgpu_vm_bo_replace_map() local
1899 mapping = kmalloc(sizeof(*mapping), GFP_KERNEL); in amdgpu_vm_bo_replace_map()
1900 if (!mapping) in amdgpu_vm_bo_replace_map()
1905 kfree(mapping); in amdgpu_vm_bo_replace_map()
1912 mapping->start = saddr; in amdgpu_vm_bo_replace_map()
1913 mapping->last = eaddr; in amdgpu_vm_bo_replace_map()
1914 mapping->offset = offset; in amdgpu_vm_bo_replace_map()
1915 mapping->flags = flags; in amdgpu_vm_bo_replace_map()
1917 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
1940 struct amdgpu_bo_va_mapping *mapping; in amdgpu_vm_bo_unmap() local
1946 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
1947 if (mapping->start == saddr) in amdgpu_vm_bo_unmap()
1951 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
1954 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
1955 if (mapping->start == saddr) in amdgpu_vm_bo_unmap()
1959 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
1963 list_del(&mapping->list); in amdgpu_vm_bo_unmap()
1964 amdgpu_vm_it_remove(mapping, &vm->va); in amdgpu_vm_bo_unmap()
1965 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
1966 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
1969 list_add(&mapping->list, &vm->freed); in amdgpu_vm_bo_unmap()
1971 amdgpu_vm_free_mapping(adev, vm, mapping, in amdgpu_vm_bo_unmap()
2125 struct amdgpu_bo_va_mapping *mapping; in amdgpu_vm_bo_trace_cs() local
2130 for (mapping = amdgpu_vm_it_iter_first(&vm->va, 0, U64_MAX); mapping; in amdgpu_vm_bo_trace_cs()
2131 mapping = amdgpu_vm_it_iter_next(mapping, 0, U64_MAX)) { in amdgpu_vm_bo_trace_cs()
2132 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2135 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2141 trace_amdgpu_vm_bo_cs(mapping); in amdgpu_vm_bo_trace_cs()
2158 struct amdgpu_bo_va_mapping *mapping, *next; in amdgpu_vm_bo_del() local
2185 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_del()
2186 list_del(&mapping->list); in amdgpu_vm_bo_del()
2187 amdgpu_vm_it_remove(mapping, &vm->va); in amdgpu_vm_bo_del()
2188 mapping->bo_va = NULL; in amdgpu_vm_bo_del()
2189 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_del()
2190 list_add(&mapping->list, &vm->freed); in amdgpu_vm_bo_del()
2192 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_del()
2193 list_del(&mapping->list); in amdgpu_vm_bo_del()
2194 amdgpu_vm_it_remove(mapping, &vm->va); in amdgpu_vm_bo_del()
2195 amdgpu_vm_free_mapping(adev, vm, mapping, in amdgpu_vm_bo_del()
2724 struct amdgpu_bo_va_mapping *mapping, *tmp; in amdgpu_vm_fini() local
2743 list_for_each_entry_safe(mapping, tmp, &vm->freed, list) { in amdgpu_vm_fini()
2744 if (mapping->flags & AMDGPU_PTE_PRT_FLAG(adev) && prt_fini_needed) { in amdgpu_vm_fini()
2749 list_del(&mapping->list); in amdgpu_vm_fini()
2750 amdgpu_vm_free_mapping(adev, vm, mapping, NULL); in amdgpu_vm_fini()
2763 rbtree_postorder_for_each_entry_safe(mapping, tmp, in amdgpu_vm_fini()
2768 list_del(&mapping->list); in amdgpu_vm_fini()
2769 kfree(mapping); in amdgpu_vm_fini()