Lines Matching refs:mem
70 struct kgd_mem *mem) in kfd_mem_is_attached() argument
74 list_for_each_entry(entry, &mem->attachments, list) in kfd_mem_is_attached()
88 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local
91 mem = si.freeram - si.freehigh; in amdgpu_amdkfd_gpuvm_init_mem_limits()
92 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits()
95 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits()
96 kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits()
403 static uint64_t get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) in get_pte_flags() argument
405 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags()
406 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags()
407 bool uncached = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_UNCACHED; in get_pte_flags()
413 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE) in get_pte_flags()
415 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE) in get_pte_flags()
420 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
435 !(mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM)) in get_pte_flags()
438 } else if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
468 kfd_mem_dmamap_userptr(struct kgd_mem *mem, in kfd_mem_dmamap_userptr() argument
472 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmamap_userptr()
477 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
532 kfd_mem_dmamap_attachment(struct kgd_mem *mem, in kfd_mem_dmamap_attachment() argument
539 return kfd_mem_dmamap_userptr(mem, attachment); in kfd_mem_dmamap_attachment()
549 kfd_mem_dmaunmap_userptr(struct kgd_mem *mem, in kfd_mem_dmaunmap_userptr() argument
553 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmaunmap_userptr()
583 kfd_mem_dmaunmap_attachment(struct kgd_mem *mem, in kfd_mem_dmaunmap_attachment() argument
590 kfd_mem_dmaunmap_userptr(mem, attachment); in kfd_mem_dmaunmap_attachment()
601 kfd_mem_attach_userptr(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_userptr() argument
604 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach_userptr()
608 ret = amdgpu_bo_reserve(mem->bo, false); in kfd_mem_attach_userptr()
615 ttm_bo_type_sg, mem->bo->tbo.base.resv, in kfd_mem_attach_userptr()
617 amdgpu_bo_unreserve(mem->bo); in kfd_mem_attach_userptr()
622 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_userptr()
628 kfd_mem_attach_dmabuf(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_dmabuf() argument
634 if (!mem->dmabuf) { in kfd_mem_attach_dmabuf()
635 mem->dmabuf = amdgpu_gem_prime_export(&mem->bo->tbo.base, in kfd_mem_attach_dmabuf()
636 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_attach_dmabuf()
638 if (IS_ERR(mem->dmabuf)) { in kfd_mem_attach_dmabuf()
639 ret = PTR_ERR(mem->dmabuf); in kfd_mem_attach_dmabuf()
640 mem->dmabuf = NULL; in kfd_mem_attach_dmabuf()
645 gobj = amdgpu_gem_prime_import(adev_to_drm(adev), mem->dmabuf); in kfd_mem_attach_dmabuf()
651 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_dmabuf()
669 static int kfd_mem_attach(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach() argument
672 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in kfd_mem_attach()
673 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach()
674 uint64_t va = mem->va; in kfd_mem_attach()
694 if (adev == bo_adev || (mem->domain == AMDGPU_GEM_DOMAIN_VRAM && in kfd_mem_attach()
700 bo[i] = mem->bo; in kfd_mem_attach()
707 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
710 ret = kfd_mem_attach_userptr(adev, mem, &bo[i]); in kfd_mem_attach()
713 } else if (mem->domain == AMDGPU_GEM_DOMAIN_GTT && in kfd_mem_attach()
714 mem->bo->tbo.type != ttm_bo_type_sg) { in kfd_mem_attach()
720 ret = kfd_mem_attach_dmabuf(adev, mem, &bo[i]); in kfd_mem_attach()
728 bo[i] = mem->bo; in kfd_mem_attach()
747 attachment[i]->pte_flags = get_pte_flags(adev, mem); in kfd_mem_attach()
749 list_add(&attachment[i]->list, &mem->attachments); in kfd_mem_attach()
785 static void add_kgd_mem_to_kfd_bo_list(struct kgd_mem *mem, in add_kgd_mem_to_kfd_bo_list() argument
789 struct ttm_validate_buffer *entry = &mem->validate_list; in add_kgd_mem_to_kfd_bo_list()
790 struct amdgpu_bo *bo = mem->bo; in add_kgd_mem_to_kfd_bo_list()
803 static void remove_kgd_mem_from_kfd_bo_list(struct kgd_mem *mem, in remove_kgd_mem_from_kfd_bo_list() argument
808 bo_list_entry = &mem->validate_list; in remove_kgd_mem_from_kfd_bo_list()
826 static int init_user_pages(struct kgd_mem *mem, uint64_t user_addr) in init_user_pages() argument
828 struct amdkfd_process_info *process_info = mem->process_info; in init_user_pages()
829 struct amdgpu_bo *bo = mem->bo; in init_user_pages()
859 amdgpu_bo_placement_from_domain(bo, mem->domain); in init_user_pages()
902 static int reserve_bo_and_vm(struct kgd_mem *mem, in reserve_bo_and_vm() argument
906 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_vm()
913 ctx->sync = &mem->sync; in reserve_bo_and_vm()
952 static int reserve_bo_and_cond_vms(struct kgd_mem *mem, in reserve_bo_and_cond_vms() argument
956 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_cond_vms()
964 ctx->sync = &mem->sync; in reserve_bo_and_cond_vms()
969 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
991 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
1045 static void unmap_bo_from_gpuvm(struct kgd_mem *mem, in unmap_bo_from_gpuvm() argument
1059 kfd_mem_dmaunmap_attachment(mem, entry); in unmap_bo_from_gpuvm()
1062 static int update_gpuvm_pte(struct kgd_mem *mem, in update_gpuvm_pte() argument
1071 ret = kfd_mem_dmamap_attachment(mem, entry); in update_gpuvm_pte()
1085 static int map_bo_to_gpuvm(struct kgd_mem *mem, in map_bo_to_gpuvm() argument
1106 ret = update_gpuvm_pte(mem, entry, sync, table_freed); in map_bo_to_gpuvm()
1115 unmap_bo_from_gpuvm(mem, entry, sync); in map_bo_to_gpuvm()
1390 void *drm_priv, struct kgd_mem **mem, in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() argument
1437 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1438 if (!*mem) { in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1442 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1443 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1444 (*mem)->aql_queue = !!(flags & KFD_IOC_ALLOC_MEM_FLAGS_AQL_QUEUE_MEM); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1450 if ((*mem)->aql_queue) in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1453 (*mem)->alloc_flags = flags; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1455 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1483 bo->kfd_bo = *mem; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1484 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1488 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1489 (*mem)->domain = domain; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1490 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1491 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1492 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1495 ret = init_user_pages(*mem, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1506 remove_kgd_mem_from_kfd_bo_list(*mem, avm->process_info); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1514 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1518 kfree(*mem); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1528 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu() argument
1531 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1532 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1540 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1541 mapped_to_gpu_memory = mem->mapped_to_gpu_memory; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1542 is_imported = mem->is_imported; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1543 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1550 mem->va, bo_size); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1555 bo_list_entry = &mem->validate_list; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1561 amdgpu_mn_unregister(mem->bo); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1563 ret = reserve_bo_and_cond_vms(mem, NULL, BO_VM_ALL, &ctx); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1571 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1573 pr_debug("Release VA 0x%llx - 0x%llx\n", mem->va, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1574 mem->va + bo_size * (1 + mem->aql_queue)); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1577 list_for_each_entry_safe(entry, tmp, &mem->attachments, list) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1583 amdgpu_sync_free(&mem->sync); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1588 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1589 sg_free_table(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1590 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1597 if ((mem->bo->preferred_domains == AMDGPU_GEM_DOMAIN_VRAM) && in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1605 drm_vma_node_revoke(&mem->bo->tbo.base.vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1606 if (mem->dmabuf) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1607 dma_buf_put(mem->dmabuf); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1608 mutex_destroy(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1614 drm_gem_object_put(&mem->bo->tbo.base); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1620 struct kgd_dev *kgd, struct kgd_mem *mem, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() argument
1633 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1643 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1651 is_invalid_userptr = atomic_read(&mem->invalid); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1655 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1657 domain = mem->domain; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1661 mem->va, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1662 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1665 if (!kfd_mem_is_attached(avm, mem)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1666 ret = kfd_mem_attach(adev, mem, avm, mem->aql_queue); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1671 ret = reserve_bo_and_vm(mem, avm, &ctx); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1688 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1701 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1708 ret = map_bo_to_gpuvm(mem, entry, ctx.sync, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1722 mem->mapped_to_gpu_memory++; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1724 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1744 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1745 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1750 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() argument
1754 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1759 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1761 ret = reserve_bo_and_cond_vms(mem, avm, BO_VM_MAPPED, &ctx); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1775 mem->va, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1776 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1779 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1786 unmap_bo_from_gpuvm(mem, entry, ctx.sync); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1789 mem->mapped_to_gpu_memory--; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1791 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1797 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1798 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1799 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1800 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1806 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1811 struct kgd_dev *kgd, struct kgd_mem *mem, bool intr) in amdgpu_amdkfd_gpuvm_sync_memory() argument
1818 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1819 amdgpu_sync_clone(&mem->sync, &sync); in amdgpu_amdkfd_gpuvm_sync_memory()
1820 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1828 struct kgd_mem *mem, void **kptr, uint64_t *size) in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() argument
1831 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1841 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1862 bo, mem->process_info->eviction_fence); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1863 list_del_init(&mem->validate_list.head); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1870 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1878 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1883 void amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel(struct kgd_dev *kgd, struct kgd_mem *mem) in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel() argument
1885 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel()
1894 struct kfd_vm_fault_info *mem) in amdgpu_amdkfd_gpuvm_get_vm_fault_info() argument
1900 *mem = *adev->gmc.vm_fault_info; in amdgpu_amdkfd_gpuvm_get_vm_fault_info()
1910 struct kgd_mem **mem, uint64_t *size, in amdgpu_amdkfd_gpuvm_import_dmabuf() argument
1934 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1935 if (!*mem) in amdgpu_amdkfd_gpuvm_import_dmabuf()
1940 kfree(mem); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1950 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1951 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1953 (*mem)->alloc_flags = in amdgpu_amdkfd_gpuvm_import_dmabuf()
1960 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1961 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1962 (*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ? in amdgpu_amdkfd_gpuvm_import_dmabuf()
1964 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1965 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1966 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, false); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1967 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1968 (*mem)->is_imported = true; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1984 int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem, in amdgpu_amdkfd_evict_userptr() argument
1987 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_evict_userptr()
1991 atomic_inc(&mem->invalid); in amdgpu_amdkfd_evict_userptr()
2014 struct kgd_mem *mem, *tmp_mem; in update_invalid_user_pages() local
2022 list_for_each_entry_safe(mem, tmp_mem, in update_invalid_user_pages()
2025 if (!atomic_read(&mem->invalid)) in update_invalid_user_pages()
2028 bo = mem->bo; in update_invalid_user_pages()
2041 list_move_tail(&mem->validate_list.head, in update_invalid_user_pages()
2049 list_for_each_entry(mem, &process_info->userptr_inval_list, in update_invalid_user_pages()
2051 invalid = atomic_read(&mem->invalid); in update_invalid_user_pages()
2058 bo = mem->bo; in update_invalid_user_pages()
2086 if (atomic_cmpxchg(&mem->invalid, invalid, 0) != invalid) in update_invalid_user_pages()
2107 struct kgd_mem *mem, *tmp_mem; in validate_invalid_user_pages() local
2131 list_for_each_entry(mem, &process_info->userptr_inval_list, in validate_invalid_user_pages()
2133 list_add_tail(&mem->resv_list.head, &resv_list); in validate_invalid_user_pages()
2134 mem->resv_list.bo = mem->validate_list.bo; in validate_invalid_user_pages()
2135 mem->resv_list.num_shared = mem->validate_list.num_shared; in validate_invalid_user_pages()
2151 list_for_each_entry_safe(mem, tmp_mem, in validate_invalid_user_pages()
2156 bo = mem->bo; in validate_invalid_user_pages()
2160 amdgpu_bo_placement_from_domain(bo, mem->domain); in validate_invalid_user_pages()
2168 list_move_tail(&mem->validate_list.head, in validate_invalid_user_pages()
2177 list_for_each_entry(attachment, &mem->attachments, list) { in validate_invalid_user_pages()
2181 kfd_mem_dmaunmap_attachment(mem, attachment); in validate_invalid_user_pages()
2182 ret = update_gpuvm_pte(mem, attachment, &sync, NULL); in validate_invalid_user_pages()
2186 atomic_inc(&mem->invalid); in validate_invalid_user_pages()
2301 struct kgd_mem *mem; in amdgpu_amdkfd_gpuvm_restore_process_bos() local
2329 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2332 list_add_tail(&mem->resv_list.head, &ctx.list); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2333 mem->resv_list.bo = mem->validate_list.bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2334 mem->resv_list.num_shared = mem->validate_list.num_shared; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2358 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2361 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2362 uint32_t domain = mem->domain; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2383 list_for_each_entry(attachment, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_restore_process_bos()
2387 kfd_mem_dmaunmap_attachment(mem, attachment); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2388 ret = update_gpuvm_pte(mem, attachment, &sync_obj, NULL); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2427 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2429 amdgpu_bo_fence(mem->bo, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2449 int amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem) in amdgpu_amdkfd_add_gws_to_process() argument
2458 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_add_gws_to_process()
2459 if (!*mem) in amdgpu_amdkfd_add_gws_to_process()
2462 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2463 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_add_gws_to_process()
2464 (*mem)->bo = amdgpu_bo_ref(gws_bo); in amdgpu_amdkfd_add_gws_to_process()
2465 (*mem)->domain = AMDGPU_GEM_DOMAIN_GWS; in amdgpu_amdkfd_add_gws_to_process()
2466 (*mem)->process_info = process_info; in amdgpu_amdkfd_add_gws_to_process()
2467 add_kgd_mem_to_kfd_bo_list(*mem, process_info, false); in amdgpu_amdkfd_add_gws_to_process()
2468 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2472 mutex_lock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2493 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2501 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2502 amdgpu_sync_free(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2503 remove_kgd_mem_from_kfd_bo_list(*mem, process_info); in amdgpu_amdkfd_add_gws_to_process()
2505 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2506 kfree(*mem); in amdgpu_amdkfd_add_gws_to_process()
2507 *mem = NULL; in amdgpu_amdkfd_add_gws_to_process()
2511 int amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem) in amdgpu_amdkfd_remove_gws_from_process() argument
2515 struct kgd_mem *kgd_mem = (struct kgd_mem *)mem; in amdgpu_amdkfd_remove_gws_from_process()
2535 kfree(mem); in amdgpu_amdkfd_remove_gws_from_process()