Home
last modified time | relevance | path

Searched refs:adev (Results 1 – 25 of 913) sorted by relevance

12345678910>>...37

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_device.c734 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg()
802 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_xcc_rreg()
861 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg()
890 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_mm_wreg_mmio_rlc()
933 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_xcc_wreg()
2479 adev->ip_blocks[adev->num_ip_blocks].adev = adev; in amdgpu_device_ip_block_add()
3391 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost()
4226 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
4268 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
4473 adev->gfx.enforce_isolation[i].adev = adev; in amdgpu_device_init()
[all …]
A Dgmc_v9_0.c981 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_flush_gpu_tlb() local
1029 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_pasid_mapping() local
1599 adev->smuio.funcs->get_pkg_type(adev); in gmc_v9_0_early_init()
1684 adev->vm_manager.vram_base_offset = adev->gfxhub.funcs->get_mc_fb_offset(adev); in gmc_v9_0_vram_gtt_location()
1853 adev->gfxhub.funcs->init(adev); in gmc_v9_0_sw_init()
1855 adev->mmhub.funcs->init(adev); in gmc_v9_0_sw_init()
2150 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v9_0_gart_enable()
2195 adev->hdp.funcs->init_registers(adev); in gmc_v9_0_hw_init()
2217 adev->umc.funcs->init_registers(adev); in gmc_v9_0_hw_init()
2239 adev->gfxhub.funcs->gart_disable(adev); in gmc_v9_0_gart_disable()
[all …]
A Dgmc_v10_0.c425 struct amdgpu_device *adev = ring->adev; in gmc_v10_0_emit_pasid_mapping() local
683 adev->vm_manager.vram_base_offset = adev->gfxhub.funcs->get_mc_fb_offset(adev); in gmc_v10_0_vram_gtt_location()
718 adev->gmc.aper_base = adev->gfxhub.funcs->get_mc_fb_offset(adev); in gmc_v10_0_mc_init()
773 adev->gfxhub.funcs->init(adev); in gmc_v10_0_sw_init()
775 adev->mmhub.funcs->init(adev); in gmc_v10_0_sw_init()
958 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v10_0_gart_enable()
962 adev->hdp.funcs->init_registers(adev); in gmc_v10_0_gart_enable()
999 adev->gfxhub.funcs->utcl2_harvest(adev); in gmc_v10_0_hw_init()
1012 adev->umc.funcs->init_registers(adev); in gmc_v10_0_hw_init()
1027 adev->gfxhub.funcs->gart_disable(adev); in gmc_v10_0_gart_disable()
[all …]
A Dgmc_v11_0.c76 if (!adev->in_s0ix && (adev->in_runpm || adev->in_suspend || in gmc_v11_0_vm_fault_interrupt_state()
390 struct amdgpu_device *adev = ring->adev; in gmc_v11_0_emit_pasid_mapping() local
604 struct amdgpu_device *adev = ip_block->adev; in gmc_v11_0_early_init() local
625 struct amdgpu_device *adev = ip_block->adev; in gmc_v11_0_late_init() local
658 adev->vm_manager.vram_base_offset = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_vram_gtt_location()
689 adev->gmc.aper_base = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_mc_init()
735 adev->mmhub.funcs->init(adev); in gmc_v11_0_sw_init()
737 adev->gfxhub.funcs->init(adev); in gmc_v11_0_sw_init()
902 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v11_0_gart_enable()
937 adev->umc.funcs->init_registers(adev); in gmc_v11_0_hw_init()
[all …]
A Dgmc_v12_0.c410 struct amdgpu_device *adev = ring->adev; in gmc_v12_0_emit_pasid_mapping() local
600 struct amdgpu_device *adev = ip_block->adev; in gmc_v12_0_early_init() local
620 struct amdgpu_device *adev = ip_block->adev; in gmc_v12_0_late_init() local
651 adev->vm_manager.vram_base_offset = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v12_0_vram_gtt_location()
683 adev->gmc.aper_base = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v12_0_mc_init()
728 struct amdgpu_device *adev = ip_block->adev; in gmc_v12_0_sw_init() local
730 adev->mmhub.funcs->init(adev); in gmc_v12_0_sw_init()
732 adev->gfxhub.funcs->init(adev); in gmc_v12_0_sw_init()
872 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v12_0_gart_enable()
905 adev->umc.funcs->init_registers(adev); in gmc_v12_0_hw_init()
[all …]
A Dsoc24.c93 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize()
249 adev->nbio.funcs->program_aspm(adev); in soc24_program_aspm()
367 struct amdgpu_device *adev = ip_block->adev; in soc24_common_early_init() local
369 adev->nbio.funcs->set_reg_remap(adev); in soc24_common_early_init()
444 struct amdgpu_device *adev = ip_block->adev; in soc24_common_late_init() local
469 struct amdgpu_device *adev = ip_block->adev; in soc24_common_sw_init() local
479 struct amdgpu_device *adev = ip_block->adev; in soc24_common_hw_init() local
484 adev->nbio.funcs->init_registers(adev); in soc24_common_hw_init()
493 adev->df.funcs->hw_init(adev); in soc24_common_hw_init()
503 struct amdgpu_device *adev = ip_block->adev; in soc24_common_hw_fini() local
[all …]
A Dsoc15.c342 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize()
704 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm()
967 struct amdgpu_device *adev = ip_block->adev; in soc15_common_early_init() local
969 adev->nbio.funcs->set_reg_remap(adev); in soc15_common_early_init()
1239 struct amdgpu_device *adev = ip_block->adev; in soc15_common_late_init() local
1261 adev->df.funcs->sw_init(adev); in soc15_common_sw_init()
1272 adev->df.funcs->sw_fini(adev); in soc15_common_sw_fini()
1297 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init()
1434 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state()
1452 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state()
[all …]
A Damdgpu_mes.c95 adev->mes.adev = adev; in amdgpu_mes_init()
168 adev->wb.gpu_addr + (adev->mes.sch_ctx_offs[i] * 4); in amdgpu_mes_init()
170 (uint64_t *)&adev->wb.wb[adev->mes.sch_ctx_offs[i]]; in amdgpu_mes_init()
256 r = adev->mes.funcs->suspend_gang(&adev->mes, &input); in amdgpu_mes_suspend()
280 r = adev->mes.funcs->resume_gang(&adev->mes, &input); in amdgpu_mes_resume()
393 r = adev->mes.funcs->misc_op(&adev->mes, &op_input); in amdgpu_mes_rreg()
423 r = adev->mes.funcs->misc_op(&adev->mes, &op_input); in amdgpu_mes_wreg()
452 r = adev->mes.funcs->misc_op(&adev->mes, &op_input); in amdgpu_mes_reg_write_reg_wait()
495 r = adev->mes.funcs->misc_op(&adev->mes, &op_input); in amdgpu_mes_set_shader_debugger()
522 r = adev->mes.funcs->misc_op(&adev->mes, &op_input); in amdgpu_mes_flush_shader_debugger()
[all …]
A Dsoc21.c223 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize()
441 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm()
556 struct amdgpu_device *adev = ip_block->adev; in soc21_common_early_init() local
558 adev->nbio.funcs->set_reg_remap(adev); in soc21_common_early_init()
649 adev->external_rev_id = adev->rev_id + 0x1; in soc21_common_early_init()
822 struct amdgpu_device *adev = ip_block->adev; in soc21_common_late_init() local
860 struct amdgpu_device *adev = ip_block->adev; in soc21_common_sw_init() local
870 struct amdgpu_device *adev = ip_block->adev; in soc21_common_hw_init() local
875 adev->nbio.funcs->init_registers(adev); in soc21_common_hw_init()
890 struct amdgpu_device *adev = ip_block->adev; in soc21_common_hw_fini() local
[all …]
A Damdgpu_gfx.c1451 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_run_cleaner_shader_job() local
1567 if (adev->in_suspend && !adev->in_runpm) in amdgpu_gfx_set_run_cleaner_shader()
1735 if (!adev) in amdgpu_gfx_get_gfx_reset_mask()
1748 if (!adev) in amdgpu_gfx_get_compute_reset_mask()
2022 struct amdgpu_device *adev = isolation_work->adev; in amdgpu_gfx_enforce_isolation_handler() local
2126 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_enforce_isolation_ring_begin_use() local
2166 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_enforce_isolation_ring_end_use() local
2227 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_profile_ring_begin_use() local
2264 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_profile_ring_end_use() local
2353 if (!adev) in amdgpu_debugfs_gfx_sched_mask_set()
[all …]
A Damdgpu_virt.c68 adev->cg_flags = 0; in amdgpu_virt_init_setting()
69 adev->pg_flags = 0; in amdgpu_virt_init_setting()
198 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
226 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
474 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
490 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
524 adev->unique_id = in amdgpu_virt_read_pf2vf_data()
677 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
702 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
1062 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
[all …]
A Damdgpu_rlc.c44 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode()
47 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode()
50 adev->gfx.rlc.funcs->set_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_enter_safe_mode()
69 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode()
72 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode()
75 adev->gfx.rlc.funcs->unset_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_exit_safe_mode()
105 amdgpu_gfx_rlc_fini(adev); in amdgpu_gfx_rlc_init_sr()
134 adev->gfx.rlc.clear_state_size = dws = adev->gfx.rlc.funcs->get_csb_size(adev); in amdgpu_gfx_rlc_init_csb()
162 r = amdgpu_bo_create_reserved(adev, adev->gfx.rlc.cp_table_size, in amdgpu_gfx_rlc_init_cpt()
197 max_me = adev->gfx.rlc.funcs->get_cp_table_num(adev); in amdgpu_gfx_rlc_setup_cp_table()
[all …]
A Dnv.c308 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize()
520 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm()
613 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate()
640 struct amdgpu_device *adev = ip_block->adev; in nv_common_early_init() local
642 adev->nbio.funcs->set_reg_remap(adev); in nv_common_early_init()
687 adev->external_rev_id = adev->rev_id + 0x1; in nv_common_early_init()
708 adev->external_rev_id = adev->rev_id + 20; in nv_common_early_init()
737 adev->external_rev_id = adev->rev_id + 0xa; in nv_common_early_init()
950 struct amdgpu_device *adev = ip_block->adev; in nv_common_late_init() local
1000 adev->nbio.funcs->init_registers(adev); in nv_common_hw_init()
[all …]
A Damdgpu_bios.c89 kfree(adev->bios); in amdgpu_bios_release()
90 adev->bios = NULL; in amdgpu_bios_release()
115 adev->bios = NULL; in amdgpu_read_bios_from_vram()
122 if (!adev->bios) { in amdgpu_read_bios_from_vram()
171 if (!adev->asic_funcs || !adev->asic_funcs->read_bios_from_rom) in amdgpu_read_bios_from_rom()
196 amdgpu_asic_read_bios_from_rom(adev, adev->bios, len); in amdgpu_read_bios_from_rom()
218 if (!adev->bios) in amdgpu_read_platform_bios()
525 adev->is_atom_fw = adev->asic_type >= CHIP_VEGA10; in amdgpu_get_bios()
556 adev->smuio.funcs->get_rom_index_offset(adev); in amdgpu_soc15_read_bios_from_rom()
558 adev->smuio.funcs->get_rom_data_offset(adev); in amdgpu_soc15_read_bios_from_rom()
[all …]
A Damdgpu_discovery.c310 dev_err(adev->dev, in amdgpu_discovery_read_binary_from_mem()
471 adev, adev->mman.discovery_bin); in amdgpu_discovery_init()
478 dev_err(adev->dev, in amdgpu_discovery_init()
774 adev->umc.active_mask = ((1 << adev->umc.node_inst_num) - 1) & in amdgpu_discovery_read_from_harvest_table()
1022 struct amdgpu_device *adev = ip_top->adev; in ip_disc_release() local
1226 adev->ip_top = kzalloc(sizeof(*adev->ip_top), GFP_KERNEL); in amdgpu_discovery_sysfs_init()
1227 if (!adev->ip_top) in amdgpu_discovery_sysfs_init()
1230 adev->ip_top->adev = adev; in amdgpu_discovery_sysfs_init()
1395 adev->vcn.inst[adev->vcn.num_vcn_inst].vcn_config = in amdgpu_discovery_reg_base_init()
2369 if (!(adev->asic_type == CHIP_VEGA20 && amdgpu_sriov_vf(adev))) in amdgpu_discovery_set_mm_ip_blocks()
[all …]
A Damdgpu_acp.c103 struct amdgpu_device *adev = ip_block->adev; in acp_sw_init() local
105 adev->acp.parent = adev->dev; in acp_sw_init()
117 struct amdgpu_device *adev = ip_block->adev; in acp_sw_fini() local
136 adev = apd->adev; in acp_poweroff()
153 adev = apd->adev; in acp_poweron()
233 struct amdgpu_device *adev = ip_block->adev; in acp_hw_init() local
256 adev->acp.acp_genpd->adev = adev; in acp_hw_init()
507 struct amdgpu_device *adev = ip_block->adev; in acp_hw_fini() local
564 struct amdgpu_device *adev = ip_block->adev; in acp_suspend() local
574 struct amdgpu_device *adev = ip_block->adev; in acp_resume() local
[all …]
A Damdgpu_virt.h326 (amdgpu_sriov_vf((adev)) && !amdgpu_sriov_runtime((adev)))
329 (amdgpu_sriov_vf((adev)) && \
333 (amdgpu_sriov_vf((adev)) && \
337 (amdgpu_sriov_vf((adev)) && \
341 (amdgpu_sriov_vf((adev)) && \
345 (amdgpu_sriov_vf((adev)) && \
349 (amdgpu_sriov_reg_indirect_mmhub(adev) || amdgpu_sriov_reg_indirect_gc(adev))
387 (amdgpu_sriov_vf(adev) && !amdgpu_sriov_is_pp_one_vf(adev))
389 ((!amdgpu_in_reset(adev)) && adev->virt.tdr_debug)
391 ((!amdgpu_in_reset(adev)) && (!adev->virt.tdr_debug))
[all …]
A Damdgpu_jpeg.c133 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() local
152 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() local
189 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_set_reg() local
228 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ib() local
299 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init()
361 if (!adev) in amdgpu_debugfs_jpeg_sched_mask_set()
389 if (!adev) in amdgpu_debugfs_jpeg_sched_mask_get()
430 if (!adev) in amdgpu_get_jpeg_reset_mask()
463 adev->jpeg.ip_dump = kcalloc(adev->jpeg.num_jpeg_inst * count, in amdgpu_jpeg_reg_dump_init()
485 struct amdgpu_device *adev = ip_block->adev; in amdgpu_jpeg_dump_ip_state() local
[all …]
A Dmxgpu_nv.c166 r = xgpu_nv_poll_ack(adev); in xgpu_nv_mailbox_trans_msg()
181 if (amdgpu_ras_is_rma(adev)) in xgpu_nv_send_access_requests_with_param()
414 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq()
461 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_nv_mailbox_add_irq_id()
465 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id()
467 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_add_irq_id()
478 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
481 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq()
483 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq()
495 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq()
[all …]
A Damdgpu_irq.c171 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler()
192 amdgpu_ih_process(adev, &adev->irq.ih1); in amdgpu_irq_handle_ih1()
207 amdgpu_ih_process(adev, &adev->irq.ih2); in amdgpu_irq_handle_ih2()
222 amdgpu_ih_process(adev, &adev->irq.ih_soft); in amdgpu_irq_handle_ih_soft()
333 free_irq(adev->irq.irq, adev_to_drm(adev)); in amdgpu_irq_fini_hw()
339 amdgpu_ih_ring_fini(adev, &adev->irq.ih_soft); in amdgpu_irq_fini_hw()
340 amdgpu_ih_ring_fini(adev, &adev->irq.ih); in amdgpu_irq_fini_hw()
341 amdgpu_ih_ring_fini(adev, &adev->irq.ih1); in amdgpu_irq_fini_hw()
342 amdgpu_ih_ring_fini(adev, &adev->irq.ih2); in amdgpu_irq_fini_hw()
563 if (amdgpu_sriov_vf(adev) || amdgpu_passthrough(adev)) in amdgpu_irq_gpu_reset_resume_helper()
[all …]
A Dvi.c1460 struct amdgpu_device *adev = ip_block->adev; in vi_common_early_init() local
1480 adev->rev_id = vi_get_rev_id(adev); in vi_common_early_init()
1507 adev->external_rev_id = adev->rev_id + 0x3c; in vi_common_early_init()
1524 adev->external_rev_id = adev->rev_id + 0x14; in vi_common_early_init()
1644 adev->external_rev_id = adev->rev_id + 0x1; in vi_common_early_init()
1684 struct amdgpu_device *adev = ip_block->adev; in vi_common_late_init() local
1694 struct amdgpu_device *adev = ip_block->adev; in vi_common_sw_init() local
1704 struct amdgpu_device *adev = ip_block->adev; in vi_common_hw_init() local
1718 struct amdgpu_device *adev = ip_block->adev; in vi_common_hw_fini() local
1951 struct amdgpu_device *adev = ip_block->adev; in vi_common_set_clockgating_state() local
[all …]
A Damdgpu_gmc.c74 r = amdgpu_bo_create(adev, &bp, &adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
85 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc()
671 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
675 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
678 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, vmhub, in amdgpu_gmc_flush_gpu_tlb()
733 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
738 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
742 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
1222 adev->dev, in current_memory_partition_store()
1239 adev->dev, in current_memory_partition_store()
[all …]
A Damdgpu_amdkfd.c77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe()
112 } else if (adev->doorbell.size > adev->doorbell.num_kernel_doorbells * in amdgpu_doorbell_get_kfd_info()
152 if (!adev->kfd.init_complete || adev->kfd.client.dev) in amdgpu_amdkfd_drm_client_create()
155 ret = drm_client_init(&adev->ddev, &adev->kfd.client, "kfd", in amdgpu_amdkfd_drm_client_create()
226 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init()
247 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt()
253 if (adev->kfd.dev) in amdgpu_amdkfd_suspend()
261 if (adev->kfd.dev) in amdgpu_amdkfd_resume()
475 if (adev->gmc.real_vram_size == adev->gmc.visible_vram_size) in amdgpu_amdkfd_get_local_mem_info()
508 return adev->gfx.funcs->get_gpu_clock_counter(adev); in amdgpu_amdkfd_get_gpu_clock_counter()
[all …]
A Dmxgpu_ai.c156 r = xgpu_ai_poll_ack(adev); in xgpu_ai_mailbox_trans_msg()
341 &adev->virt.flr_work), in xgpu_ai_mailbox_rcv_irq()
348 &adev->virt.flr_work), in xgpu_ai_mailbox_rcv_irq()
391 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_ai_mailbox_add_irq_id()
395 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_ai_mailbox_add_irq_id()
397 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_add_irq_id()
408 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq()
411 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_get_irq()
413 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq()
425 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_put_irq()
[all …]
/drivers/gpu/drm/amd/pm/
A Damdgpu_dpm.c37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e)))
39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument
49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
65 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_mclk()
90 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_powergating_by_smu()
233 if (adev->in_s3) in amdgpu_dpm_is_baco_supported()
525 amdgpu_dpm_enable_bapm(adev, adev->pm.ac_power); in amdgpu_pm_acpi_event_handler()
693 (is_support_sw_smu(adev) && (adev->flags & AMD_IS_APU))) in amdgpu_pm_load_smu_firmware()
767 if (adev->cper.enabled) in amdgpu_dpm_send_rma_reason()
1372 adev->pm.dpm.current_ps = adev->pm.dpm.boot_ps; in amdgpu_dpm_set_sclk_od()
[all …]

Completed in 894 milliseconds

12345678910>>...37