Lines Matching refs:adev
70 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument
72 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe()
77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe()
93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument
102 if (adev->enable_mes) { in amdgpu_doorbell_get_kfd_info()
109 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
112 } else if (adev->doorbell.size > adev->doorbell.num_doorbells * in amdgpu_doorbell_get_kfd_info()
114 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
115 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info()
116 *start_offset = adev->doorbell.num_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info()
127 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, in amdgpu_amdkfd_reset_work() local
135 reset_context.reset_req_dev = adev; in amdgpu_amdkfd_reset_work()
138 amdgpu_device_gpu_recover(adev, NULL, &reset_context); in amdgpu_amdkfd_reset_work()
141 void amdgpu_amdkfd_device_init(struct amdgpu_device *adev) in amdgpu_amdkfd_device_init() argument
146 if (adev->kfd.dev) { in amdgpu_amdkfd_device_init()
150 ((1 << adev->vm_manager.first_kfd_vmid) - 1), in amdgpu_amdkfd_device_init()
151 .num_pipe_per_mec = adev->gfx.mec.num_pipe_per_mec, in amdgpu_amdkfd_device_init()
152 .num_queue_per_pipe = adev->gfx.mec.num_queue_per_pipe, in amdgpu_amdkfd_device_init()
153 .gpuvm_size = min(adev->vm_manager.max_pfn in amdgpu_amdkfd_device_init()
156 .drm_render_minor = adev_to_drm(adev)->render->index, in amdgpu_amdkfd_device_init()
157 .sdma_doorbell_idx = adev->doorbell_index.sdma_engine, in amdgpu_amdkfd_device_init()
158 .enable_mes = adev->enable_mes, in amdgpu_amdkfd_device_init()
165 adev->gfx.mec.queue_bitmap, in amdgpu_amdkfd_device_init()
172 * adev->gfx.mec.num_pipe_per_mec in amdgpu_amdkfd_device_init()
173 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_amdkfd_device_init()
177 amdgpu_doorbell_get_kfd_info(adev, in amdgpu_amdkfd_device_init()
190 if (adev->asic_type >= CHIP_VEGA10) { in amdgpu_amdkfd_device_init()
192 adev->doorbell_index.first_non_cp; in amdgpu_amdkfd_device_init()
194 adev->doorbell_index.last_non_cp; in amdgpu_amdkfd_device_init()
197 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init()
200 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_init()
202 INIT_WORK(&adev->kfd.reset_work, amdgpu_amdkfd_reset_work); in amdgpu_amdkfd_device_init()
206 void amdgpu_amdkfd_device_fini_sw(struct amdgpu_device *adev) in amdgpu_amdkfd_device_fini_sw() argument
208 if (adev->kfd.dev) { in amdgpu_amdkfd_device_fini_sw()
209 kgd2kfd_device_exit(adev->kfd.dev); in amdgpu_amdkfd_device_fini_sw()
210 adev->kfd.dev = NULL; in amdgpu_amdkfd_device_fini_sw()
211 amdgpu_amdkfd_total_mem_size -= adev->gmc.real_vram_size; in amdgpu_amdkfd_device_fini_sw()
215 void amdgpu_amdkfd_interrupt(struct amdgpu_device *adev, in amdgpu_amdkfd_interrupt() argument
218 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt()
219 kgd2kfd_interrupt(adev->kfd.dev, ih_ring_entry); in amdgpu_amdkfd_interrupt()
222 void amdgpu_amdkfd_suspend(struct amdgpu_device *adev, bool run_pm) in amdgpu_amdkfd_suspend() argument
224 if (adev->kfd.dev) in amdgpu_amdkfd_suspend()
225 kgd2kfd_suspend(adev->kfd.dev, run_pm); in amdgpu_amdkfd_suspend()
228 int amdgpu_amdkfd_resume_iommu(struct amdgpu_device *adev) in amdgpu_amdkfd_resume_iommu() argument
232 if (adev->kfd.dev) in amdgpu_amdkfd_resume_iommu()
233 r = kgd2kfd_resume_iommu(adev->kfd.dev); in amdgpu_amdkfd_resume_iommu()
238 int amdgpu_amdkfd_resume(struct amdgpu_device *adev, bool run_pm) in amdgpu_amdkfd_resume() argument
242 if (adev->kfd.dev) in amdgpu_amdkfd_resume()
243 r = kgd2kfd_resume(adev->kfd.dev, run_pm); in amdgpu_amdkfd_resume()
248 int amdgpu_amdkfd_pre_reset(struct amdgpu_device *adev) in amdgpu_amdkfd_pre_reset() argument
252 if (adev->kfd.dev) in amdgpu_amdkfd_pre_reset()
253 r = kgd2kfd_pre_reset(adev->kfd.dev); in amdgpu_amdkfd_pre_reset()
258 int amdgpu_amdkfd_post_reset(struct amdgpu_device *adev) in amdgpu_amdkfd_post_reset() argument
262 if (adev->kfd.dev) in amdgpu_amdkfd_post_reset()
263 r = kgd2kfd_post_reset(adev->kfd.dev); in amdgpu_amdkfd_post_reset()
268 void amdgpu_amdkfd_gpu_reset(struct amdgpu_device *adev) in amdgpu_amdkfd_gpu_reset() argument
270 if (amdgpu_device_should_recover_gpu(adev)) in amdgpu_amdkfd_gpu_reset()
271 amdgpu_reset_domain_schedule(adev->reset_domain, in amdgpu_amdkfd_gpu_reset()
272 &adev->kfd.reset_work); in amdgpu_amdkfd_gpu_reset()
275 int amdgpu_amdkfd_alloc_gtt_mem(struct amdgpu_device *adev, size_t size, in amdgpu_amdkfd_alloc_gtt_mem() argument
296 r = amdgpu_bo_create(adev, &bp, &bo); in amdgpu_amdkfd_alloc_gtt_mem()
298 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gtt_mem()
306 dev_err(adev->dev, "(%d) failed to reserve bo for amdkfd\n", r); in amdgpu_amdkfd_alloc_gtt_mem()
312 dev_err(adev->dev, "(%d) failed to pin bo for amdkfd\n", r); in amdgpu_amdkfd_alloc_gtt_mem()
318 dev_err(adev->dev, "%p bind failed\n", bo); in amdgpu_amdkfd_alloc_gtt_mem()
324 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gtt_mem()
347 void amdgpu_amdkfd_free_gtt_mem(struct amdgpu_device *adev, void *mem_obj) in amdgpu_amdkfd_free_gtt_mem() argument
358 int amdgpu_amdkfd_alloc_gws(struct amdgpu_device *adev, size_t size, in amdgpu_amdkfd_alloc_gws() argument
375 r = amdgpu_bo_create_user(adev, &bp, &ubo); in amdgpu_amdkfd_alloc_gws()
377 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gws()
387 void amdgpu_amdkfd_free_gws(struct amdgpu_device *adev, void *mem_obj) in amdgpu_amdkfd_free_gws() argument
394 uint32_t amdgpu_amdkfd_get_fw_version(struct amdgpu_device *adev, in amdgpu_amdkfd_get_fw_version() argument
399 return adev->gfx.pfp_fw_version; in amdgpu_amdkfd_get_fw_version()
402 return adev->gfx.me_fw_version; in amdgpu_amdkfd_get_fw_version()
405 return adev->gfx.ce_fw_version; in amdgpu_amdkfd_get_fw_version()
408 return adev->gfx.mec_fw_version; in amdgpu_amdkfd_get_fw_version()
411 return adev->gfx.mec2_fw_version; in amdgpu_amdkfd_get_fw_version()
414 return adev->gfx.rlc_fw_version; in amdgpu_amdkfd_get_fw_version()
417 return adev->sdma.instance[0].fw_version; in amdgpu_amdkfd_get_fw_version()
420 return adev->sdma.instance[1].fw_version; in amdgpu_amdkfd_get_fw_version()
429 void amdgpu_amdkfd_get_local_mem_info(struct amdgpu_device *adev, in amdgpu_amdkfd_get_local_mem_info() argument
434 mem_info->local_mem_size_public = adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info()
435 mem_info->local_mem_size_private = adev->gmc.real_vram_size - in amdgpu_amdkfd_get_local_mem_info()
436 adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info()
438 mem_info->vram_width = adev->gmc.vram_width; in amdgpu_amdkfd_get_local_mem_info()
441 &adev->gmc.aper_base, in amdgpu_amdkfd_get_local_mem_info()
445 if (amdgpu_sriov_vf(adev)) in amdgpu_amdkfd_get_local_mem_info()
446 mem_info->mem_clk_max = adev->clock.default_mclk / 100; in amdgpu_amdkfd_get_local_mem_info()
447 else if (adev->pm.dpm_enabled) { in amdgpu_amdkfd_get_local_mem_info()
451 mem_info->mem_clk_max = amdgpu_dpm_get_mclk(adev, false) / 100; in amdgpu_amdkfd_get_local_mem_info()
456 uint64_t amdgpu_amdkfd_get_gpu_clock_counter(struct amdgpu_device *adev) in amdgpu_amdkfd_get_gpu_clock_counter() argument
458 if (adev->gfx.funcs->get_gpu_clock_counter) in amdgpu_amdkfd_get_gpu_clock_counter()
459 return adev->gfx.funcs->get_gpu_clock_counter(adev); in amdgpu_amdkfd_get_gpu_clock_counter()
463 uint32_t amdgpu_amdkfd_get_max_engine_clock_in_mhz(struct amdgpu_device *adev) in amdgpu_amdkfd_get_max_engine_clock_in_mhz() argument
466 if (amdgpu_sriov_vf(adev)) in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
467 return adev->clock.default_sclk / 100; in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
468 else if (adev->pm.dpm_enabled) in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
469 return amdgpu_dpm_get_sclk(adev, false) / 100; in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
474 void amdgpu_amdkfd_get_cu_info(struct amdgpu_device *adev, struct kfd_cu_info *cu_info) in amdgpu_amdkfd_get_cu_info() argument
476 struct amdgpu_cu_info acu_info = adev->gfx.cu_info; in amdgpu_amdkfd_get_cu_info()
486 cu_info->num_shader_engines = adev->gfx.config.max_shader_engines; in amdgpu_amdkfd_get_cu_info()
487 cu_info->num_shader_arrays_per_engine = adev->gfx.config.max_sh_per_se; in amdgpu_amdkfd_get_cu_info()
488 cu_info->num_cu_per_sh = adev->gfx.config.max_cu_per_sh; in amdgpu_amdkfd_get_cu_info()
496 int amdgpu_amdkfd_get_dmabuf_info(struct amdgpu_device *adev, int dma_buf_fd, in amdgpu_amdkfd_get_dmabuf_info() argument
517 if (obj->dev->driver != adev_to_drm(adev)->driver) in amdgpu_amdkfd_get_dmabuf_info()
521 adev = drm_to_adev(obj->dev); in amdgpu_amdkfd_get_dmabuf_info()
530 *dmabuf_adev = adev; in amdgpu_amdkfd_get_dmabuf_info()
554 struct amdgpu_device *adev = dst; in amdgpu_amdkfd_get_xgmi_hops_count() local
555 int ret = amdgpu_xgmi_get_hops_count(adev, peer_adev); in amdgpu_amdkfd_get_xgmi_hops_count()
559 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_hops_count()
570 struct amdgpu_device *adev = dst, *peer_adev; in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes() local
573 if (adev->asic_type != CHIP_ALDEBARAN) in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
580 num_links = is_min ? 1 : amdgpu_xgmi_get_num_links(adev, peer_adev); in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
583 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
592 int amdgpu_amdkfd_get_pcie_bandwidth_mbytes(struct amdgpu_device *adev, bool is_min) in amdgpu_amdkfd_get_pcie_bandwidth_mbytes() argument
594 int num_lanes_shift = (is_min ? ffs(adev->pm.pcie_mlw_mask) : in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
595 fls(adev->pm.pcie_mlw_mask)) - 1; in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
596 int gen_speed_shift = (is_min ? ffs(adev->pm.pcie_gen_mask & in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
598 fls(adev->pm.pcie_gen_mask & in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
649 int amdgpu_amdkfd_submit_ib(struct amdgpu_device *adev, in amdgpu_amdkfd_submit_ib() argument
662 ring = &adev->gfx.compute_ring[0]; in amdgpu_amdkfd_submit_ib()
665 ring = &adev->sdma.instance[0].ring; in amdgpu_amdkfd_submit_ib()
668 ring = &adev->sdma.instance[1].ring; in amdgpu_amdkfd_submit_ib()
676 ret = amdgpu_job_alloc(adev, NULL, NULL, NULL, 1, &job); in amdgpu_amdkfd_submit_ib()
707 void amdgpu_amdkfd_set_compute_idle(struct amdgpu_device *adev, bool idle) in amdgpu_amdkfd_set_compute_idle() argument
712 if (IP_VERSION_MAJ(adev->ip_versions[GC_HWIP][0]) == 11) { in amdgpu_amdkfd_set_compute_idle()
714 amdgpu_gfx_off_ctrl(adev, idle); in amdgpu_amdkfd_set_compute_idle()
716 amdgpu_dpm_switch_power_profile(adev, in amdgpu_amdkfd_set_compute_idle()
721 bool amdgpu_amdkfd_is_kfd_vmid(struct amdgpu_device *adev, u32 vmid) in amdgpu_amdkfd_is_kfd_vmid() argument
723 if (adev->kfd.dev) in amdgpu_amdkfd_is_kfd_vmid()
724 return vmid >= adev->vm_manager.first_kfd_vmid; in amdgpu_amdkfd_is_kfd_vmid()
729 int amdgpu_amdkfd_flush_gpu_tlb_vmid(struct amdgpu_device *adev, in amdgpu_amdkfd_flush_gpu_tlb_vmid() argument
732 if (adev->family == AMDGPU_FAMILY_AI) { in amdgpu_amdkfd_flush_gpu_tlb_vmid()
735 for (i = 0; i < adev->num_vmhubs; i++) in amdgpu_amdkfd_flush_gpu_tlb_vmid()
736 amdgpu_gmc_flush_gpu_tlb(adev, vmid, i, 0); in amdgpu_amdkfd_flush_gpu_tlb_vmid()
738 amdgpu_gmc_flush_gpu_tlb(adev, vmid, AMDGPU_GFXHUB_0, 0); in amdgpu_amdkfd_flush_gpu_tlb_vmid()
744 int amdgpu_amdkfd_flush_gpu_tlb_pasid(struct amdgpu_device *adev, in amdgpu_amdkfd_flush_gpu_tlb_pasid() argument
749 if (adev->family == AMDGPU_FAMILY_AI || in amdgpu_amdkfd_flush_gpu_tlb_pasid()
750 adev->family == AMDGPU_FAMILY_RV) in amdgpu_amdkfd_flush_gpu_tlb_pasid()
753 return amdgpu_gmc_flush_gpu_tlb_pasid(adev, pasid, flush_type, all_hub); in amdgpu_amdkfd_flush_gpu_tlb_pasid()
756 bool amdgpu_amdkfd_have_atomics_support(struct amdgpu_device *adev) in amdgpu_amdkfd_have_atomics_support() argument
758 return adev->have_atomics_support; in amdgpu_amdkfd_have_atomics_support()
761 void amdgpu_amdkfd_ras_poison_consumption_handler(struct amdgpu_device *adev, bool reset) in amdgpu_amdkfd_ras_poison_consumption_handler() argument
763 amdgpu_umc_poison_handler(adev, reset); in amdgpu_amdkfd_ras_poison_consumption_handler()
766 bool amdgpu_amdkfd_ras_query_utcl2_poison_status(struct amdgpu_device *adev) in amdgpu_amdkfd_ras_query_utcl2_poison_status() argument
768 if (adev->gfx.ras && adev->gfx.ras->query_utcl2_poison_status) in amdgpu_amdkfd_ras_query_utcl2_poison_status()
769 return adev->gfx.ras->query_utcl2_poison_status(adev); in amdgpu_amdkfd_ras_query_utcl2_poison_status()