Lines Matching refs:pdd

217 		struct kfd_process_device *pdd;  in svm_range_dma_map()  local
220 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_dma_map()
221 if (!pdd) { in svm_range_dma_map()
226 r = svm_range_dma_map_dev(pdd->dev->adev, prange, offset, npages, in svm_range_dma_map()
255 struct kfd_process_device *pdd; in svm_range_dma_unmap() local
268 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_dma_unmap()
269 if (!pdd) { in svm_range_dma_unmap()
273 dev = &pdd->dev->adev->pdev->dev; in svm_range_dma_unmap()
410 struct kfd_process_device *pdd; in svm_range_bo_release() local
421 pdd = kfd_get_process_device_data(svm_bo->node, p); in svm_range_bo_release()
422 if (pdd) in svm_range_bo_release()
423 atomic64_sub(amdgpu_bo_size(svm_bo->bo), &pdd->vram_usage); in svm_range_bo_release()
556 struct kfd_process_device *pdd; in svm_range_vram_node_new() local
649 pdd = svm_range_get_pdd_by_node(prange, node); in svm_range_vram_node_new()
650 if (pdd) in svm_range_vram_node_new()
651 atomic64_add(amdgpu_bo_size(bo), &pdd->vram_usage); in svm_range_vram_node_new()
682 struct kfd_process_device *pdd; in svm_range_get_node_by_id() local
685 pdd = kfd_process_device_data_by_id(p, gpu_id); in svm_range_get_node_by_id()
686 if (!pdd) { in svm_range_get_node_by_id()
691 return pdd->dev; in svm_range_get_node_by_id()
1331 struct kfd_process_device *pdd; in svm_range_unmap_from_gpus() local
1354 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_unmap_from_gpus()
1355 if (!pdd) { in svm_range_unmap_from_gpus()
1360 kfd_smi_event_unmap_from_gpu(pdd->dev, p->lead_thread->pid, in svm_range_unmap_from_gpus()
1363 r = svm_range_unmap_from_gpu(pdd->dev->adev, in svm_range_unmap_from_gpus()
1364 drm_priv_to_vm(pdd->drm_priv), in svm_range_unmap_from_gpus()
1376 kfd_flush_tlb(pdd, TLB_FLUSH_HEAVYWEIGHT); in svm_range_unmap_from_gpus()
1383 svm_range_map_to_gpu(struct kfd_process_device *pdd, struct svm_range *prange, in svm_range_map_to_gpu() argument
1388 struct amdgpu_device *adev = pdd->dev->adev; in svm_range_map_to_gpu()
1389 struct amdgpu_vm *vm = drm_priv_to_vm(pdd->drm_priv); in svm_range_map_to_gpu()
1415 pte_flags = svm_range_get_pte_flags(pdd->dev, prange, last_domain); in svm_range_map_to_gpu()
1464 struct kfd_process_device *pdd; in svm_range_map_to_gpus() local
1477 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_map_to_gpus()
1478 if (!pdd) { in svm_range_map_to_gpus()
1483 pdd = kfd_bind_process_to_device(pdd->dev, p); in svm_range_map_to_gpus()
1484 if (IS_ERR(pdd)) in svm_range_map_to_gpus()
1487 if (bo_adev && pdd->dev->adev != bo_adev && in svm_range_map_to_gpus()
1488 !amdgpu_xgmi_same_hive(pdd->dev->adev, bo_adev)) { in svm_range_map_to_gpus()
1493 r = svm_range_map_to_gpu(pdd, prange, offset, npages, readonly, in svm_range_map_to_gpus()
1510 kfd_flush_tlb(pdd, TLB_FLUSH_LEGACY); in svm_range_map_to_gpus()
1526 struct kfd_process_device *pdd; in svm_range_reserve_bos() local
1534 pdd = kfd_process_device_from_gpuidx(ctx->process, gpuidx); in svm_range_reserve_bos()
1535 if (!pdd) { in svm_range_reserve_bos()
1540 vm = drm_priv_to_vm(pdd->drm_priv); in svm_range_reserve_bos()
1552 pdd = kfd_process_device_from_gpuidx(ctx->process, gpuidx); in svm_range_reserve_bos()
1553 if (!pdd) { in svm_range_reserve_bos()
1559 r = amdgpu_vm_validate(pdd->dev->adev, in svm_range_reserve_bos()
1560 drm_priv_to_vm(pdd->drm_priv), NULL, in svm_range_reserve_bos()
1582 struct kfd_process_device *pdd; in kfd_svm_page_owner() local
1584 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in kfd_svm_page_owner()
1585 if (!pdd) in kfd_svm_page_owner()
1588 return SVM_ADEV_PGMAP_OWNER(pdd->dev->adev); in kfd_svm_page_owner()
2293 struct kfd_process_device *pdd; in svm_range_drain_retry_fault() local
2300 pdd = p->pdds[i]; in svm_range_drain_retry_fault()
2301 if (!pdd) in svm_range_drain_retry_fault()
2306 amdgpu_ih_wait_on_checkpoint_process_ts(pdd->dev->adev, in svm_range_drain_retry_fault()
2307 pdd->dev->adev->irq.retry_cam_enabled ? in svm_range_drain_retry_fault()
2308 &pdd->dev->adev->irq.ih : in svm_range_drain_retry_fault()
2309 &pdd->dev->adev->irq.ih1); in svm_range_drain_retry_fault()
2311 if (pdd->dev->adev->irq.retry_cam_enabled) in svm_range_drain_retry_fault()
2312 amdgpu_ih_wait_on_checkpoint_process_ts(pdd->dev->adev, in svm_range_drain_retry_fault()
2313 &pdd->dev->adev->irq.ih_soft); in svm_range_drain_retry_fault()
2486 struct kfd_process_device *pdd; in svm_range_unmap_from_cpu() local
2491 pdd = p->pdds[i]; in svm_range_unmap_from_cpu()
2492 if (!pdd) in svm_range_unmap_from_cpu()
2495 adev = pdd->dev->adev; in svm_range_unmap_from_cpu()
2913 struct kfd_process_device *pdd; in svm_range_count_fault() local
2931 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_count_fault()
2932 if (pdd) in svm_range_count_fault()
2933 WRITE_ONCE(pdd->faults, pdd->faults + 1); in svm_range_count_fault()
3420 struct kfd_process_device *pdd; in svm_range_best_prefetch_location() local
3449 pdd = kfd_process_device_from_gpuidx(p, gpuidx); in svm_range_best_prefetch_location()
3450 if (!pdd) { in svm_range_best_prefetch_location()
3455 if (pdd->dev->adev == bo_node->adev) in svm_range_best_prefetch_location()
3458 if (!svm_nodes_in_same_hive(pdd->dev, bo_node)) { in svm_range_best_prefetch_location()
4167 struct kfd_process_device *pdd = p->pdds[index]; in kfd_criu_checkpoint_svm() local
4171 query_attr[index + nattr_common].value = pdd->user_gpu_id; in kfd_criu_checkpoint_svm()