/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_device.c | 468 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg() 553 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg() 580 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_mm_wreg_mmio_rlc() 1141 &adev->wb.wb_obj, &adev->wb.gpu_addr, in amdgpu_device_wb_init() 2542 if (memcmp(adev->gart.ptr, adev->reset_magic, in amdgpu_device_check_vram_lost() 2666 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost() 3449 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings() 3496 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings() 3498 adev->compute_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings() 3669 adev->rmmio = ioremap(adev->rmmio_base, adev->rmmio_size); in amdgpu_device_init() [all …]
|
A D | gmc_v10_0.c | 341 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v10_0_flush_gpu_tlb() 530 struct amdgpu_device *adev = ring->adev; in gmc_v10_0_emit_pasid_mapping() local 846 adev->gmc.aper_base = adev->gfxhub.funcs->get_mc_fb_offset(adev); in gmc_v10_0_mc_init() 901 adev->gfxhub.funcs->init(adev); in gmc_v10_0_sw_init() 903 adev->mmhub.funcs->init(adev); in gmc_v10_0_sw_init() 1081 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v10_0_gart_enable() 1085 adev->hdp.funcs->init_registers(adev); in gmc_v10_0_gart_enable() 1088 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v10_0_gart_enable() 1133 adev->umc.funcs->init_registers(adev); in gmc_v10_0_hw_init() 1148 adev->gfxhub.funcs->gart_disable(adev); in gmc_v10_0_gart_disable() [all …]
|
A D | gmc_v9_0.c | 990 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_flush_gpu_tlb() local 1038 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_pasid_mapping() local 1644 adev->gfxhub.funcs->init(adev); in gmc_v9_0_sw_init() 1646 adev->mmhub.funcs->init(adev); in gmc_v9_0_sw_init() 1648 adev->mca.funcs->init(adev); in gmc_v9_0_sw_init() 1888 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v9_0_gart_enable() 1922 adev->hdp.funcs->init_registers(adev); in gmc_v9_0_hw_init() 1925 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v9_0_hw_init() 1944 adev->umc.funcs->init_registers(adev); in gmc_v9_0_hw_init() 1966 adev->gfxhub.funcs->gart_disable(adev); in gmc_v9_0_gart_disable() [all …]
|
A D | soc21.c | 202 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize() 437 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm() 458 return adev->nbio.funcs->get_rev_id(adev); in soc21_get_rev_id() 601 adev->rev_id = soc21_get_rev_id(adev); in soc21_common_early_init() 647 adev->external_rev_id = adev->rev_id + 0x10; in soc21_common_early_init() 673 adev->external_rev_id = adev->rev_id + 0x1; in soc21_common_early_init() 688 adev->external_rev_id = adev->rev_id + 0x20; in soc21_common_early_init() 713 adev->external_rev_id = adev->rev_id + 0x1; in soc21_common_early_init() 763 adev->nbio.funcs->init_registers(adev); in soc21_common_hw_init() 769 adev->nbio.funcs->remap_hdp_registers(adev); in soc21_common_hw_init() [all …]
|
A D | gmc_v11_0.c | 287 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v11_0_flush_gpu_tlb() 425 struct amdgpu_device *adev = ring->adev; in gmc_v11_0_emit_pasid_mapping() local 682 adev->vm_manager.vram_base_offset = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_vram_gtt_location() 713 adev->gmc.aper_base = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_mc_init() 759 adev->mmhub.funcs->init(adev); in gmc_v11_0_sw_init() 897 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v11_0_gart_enable() 902 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v11_0_gart_enable() 930 adev->umc.funcs->init_registers(adev); in gmc_v11_0_hw_init() 944 adev->mmhub.funcs->gart_disable(adev); in gmc_v11_0_gart_disable() 957 amdgpu_irq_put(adev, &adev->gmc.ecc_irq, 0); in gmc_v11_0_hw_fini() [all …]
|
A D | nv.c | 354 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize() 586 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm() 613 return adev->nbio.funcs->get_rev_id(adev); in nv_get_rev_id() 703 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate() 757 adev->rev_id = nv_get_rev_id(adev); in nv_common_early_init() 783 adev->external_rev_id = adev->rev_id + 0x1; in nv_common_early_init() 804 adev->external_rev_id = adev->rev_id + 20; in nv_common_early_init() 833 adev->external_rev_id = adev->rev_id + 0xa; in nv_common_early_init() 857 adev->external_rev_id = adev->rev_id + 0x28; in nv_common_early_init() 1096 adev->nbio.funcs->init_registers(adev); in nv_common_hw_init() [all …]
|
A D | soc15.c | 337 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize() 679 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm() 700 return adev->nbio.funcs->get_rev_id(adev); in soc15_get_rev_id() 952 adev->rev_id = soc15_get_rev_id(adev); in soc15_common_early_init() 1003 adev->external_rev_id = adev->rev_id + 0x14; in soc15_common_early_init() 1200 adev->df.funcs->sw_init(adev); in soc15_common_sw_init() 1211 adev->df.funcs->sw_fini(adev); in soc15_common_sw_fini() 1238 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init() 1370 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state() 1388 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state() [all …]
|
A D | amdgpu_rlc.c | 43 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode() 46 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode() 49 adev->gfx.rlc.funcs->set_safe_mode(adev); in amdgpu_gfx_rlc_enter_safe_mode() 67 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode() 70 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode() 73 adev->gfx.rlc.funcs->unset_safe_mode(adev); in amdgpu_gfx_rlc_exit_safe_mode() 103 amdgpu_gfx_rlc_fini(adev); in amdgpu_gfx_rlc_init_sr() 132 adev->gfx.rlc.clear_state_size = dws = adev->gfx.rlc.funcs->get_csb_size(adev); in amdgpu_gfx_rlc_init_csb() 160 r = amdgpu_bo_create_reserved(adev, adev->gfx.rlc.cp_table_size, in amdgpu_gfx_rlc_init_cpt() 195 max_me = adev->gfx.rlc.funcs->get_cp_table_num(adev); in amdgpu_gfx_rlc_setup_cp_table() [all …]
|
A D | amdgpu_gfx.c | 304 ring->adev = NULL; in amdgpu_gfx_kiq_init_ring() 487 if (adev->gfx.kiq.ring.sched.ready && !adev->job_hang) in amdgpu_gfx_disable_kcq() 544 if (adev->enable_mes) in amdgpu_gfx_enable_kcq() 592 if (adev->in_s0ix) { in amdgpu_gfx_off_ctrl() 689 r = amdgpu_irq_get(adev, &adev->gfx.cp_ecc_error_irq, 0); in amdgpu_gfx_ras_late_init() 710 if (!adev->gfx.ras) in amdgpu_gfx_ras_sw_init() 713 ras = adev->gfx.ras; in amdgpu_gfx_ras_sw_init() 740 if (adev->gfx.ras && adev->gfx.ras->poison_consumption_handler) in amdgpu_gfx_poison_consumption_handler() 741 return adev->gfx.ras->poison_consumption_handler(adev, entry); in amdgpu_gfx_poison_consumption_handler() 758 if (adev->gfx.ras && adev->gfx.ras->ras_block.hw_ops && in amdgpu_gfx_process_ras_data_cb() [all …]
|
A D | amdgpu_virt.c | 65 adev->cg_flags = 0; in amdgpu_virt_init_setting() 66 adev->pg_flags = 0; in amdgpu_virt_init_setting() 231 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table() 259 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table() 525 adev->unique_id = in amdgpu_virt_read_pf2vf_data() 652 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 654 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 677 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data() 750 if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) in amdgpu_detect_virtualization() 970 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw() [all …]
|
A D | amdgpu_acp.c | 105 adev->acp.parent = adev->dev; in acp_sw_init() 126 void *adev; member 136 adev = apd->adev; in acp_poweroff() 153 adev = apd->adev; in acp_poweron() 251 if (adev->rmmio_size == 0 || adev->rmmio_size < 0x5289) in acp_hw_init() 262 adev->acp.acp_genpd->adev = adev; in acp_hw_init() 308 adev->acp.acp_res[2].end = adev->acp.acp_res[2].start; in acp_hw_init() 312 adev->acp.acp_cell[0].resources = &adev->acp.acp_res[0]; in acp_hw_init() 313 adev->acp.acp_cell[0].platform_data = &adev->asic_type; in acp_hw_init() 416 adev->acp.acp_res[4].end = adev->acp.acp_res[4].start; in acp_hw_init() [all …]
|
A D | amdgpu_discovery.c | 583 adev->umc.active_mask = ((1 << adev->umc.node_inst_num) - 1) & in amdgpu_discovery_read_from_harvest_table() 831 struct amdgpu_device *adev = ip_top->adev; in ip_disc_release() local 984 adev->ip_top = kzalloc(sizeof(*adev->ip_top), GFP_KERNEL); in amdgpu_discovery_sysfs_init() 985 if (!adev->ip_top) in amdgpu_discovery_sysfs_init() 988 adev->ip_top->adev = adev; in amdgpu_discovery_sysfs_init() 1143 adev->vcn.vcn_config[adev->vcn.num_vcn_inst] = in amdgpu_discovery_reg_base_init() 1377 dev_err(adev->dev, in amdgpu_discovery_get_gfx_info() 1428 dev_err(adev->dev, in amdgpu_discovery_get_mall_info() 1482 dev_err(adev->dev, in amdgpu_discovery_get_vcn_info() 1883 if (!(adev->asic_type == CHIP_VEGA20 && amdgpu_sriov_vf(adev))) in amdgpu_discovery_set_mm_ip_blocks() [all …]
|
A D | amdgpu_amdkfd.c | 77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe() 112 } else if (adev->doorbell.size > adev->doorbell.num_doorbells * in amdgpu_doorbell_get_kfd_info() 197 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init() 218 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt() 224 if (adev->kfd.dev) in amdgpu_amdkfd_suspend() 232 if (adev->kfd.dev) in amdgpu_amdkfd_resume_iommu() 242 if (adev->kfd.dev) in amdgpu_amdkfd_resume() 252 if (adev->kfd.dev) in amdgpu_amdkfd_pre_reset() 459 return adev->gfx.funcs->get_gpu_clock_counter(adev); in amdgpu_amdkfd_get_gpu_clock_counter() 768 if (adev->gfx.ras && adev->gfx.ras->query_utcl2_poison_status) in amdgpu_amdkfd_ras_query_utcl2_poison_status() [all …]
|
A D | vi.c | 1302 !(ASICID_IS_P23(adev->pdev->device, adev->pdev->revision))) || in vi_program_aspm() 1303 ASIC_IS_P22(adev->asic_type, adev->external_rev_id)) { in vi_program_aspm() 1513 adev->rev_id = vi_get_rev_id(adev); in vi_common_early_init() 1540 adev->external_rev_id = adev->rev_id + 0x3c; in vi_common_early_init() 1557 adev->external_rev_id = adev->rev_id + 0x14; in vi_common_early_init() 1580 adev->external_rev_id = adev->rev_id + 0x5A; in vi_common_early_init() 1603 adev->external_rev_id = adev->rev_id + 0x50; in vi_common_early_init() 1626 adev->external_rev_id = adev->rev_id + 0x64; in vi_common_early_init() 1650 adev->external_rev_id = adev->rev_id + 0x6E; in vi_common_early_init() 1677 adev->external_rev_id = adev->rev_id + 0x1; in vi_common_early_init() [all …]
|
A D | amdgpu_irq.c | 156 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler() 177 amdgpu_ih_process(adev, &adev->irq.ih1); in amdgpu_irq_handle_ih1() 192 amdgpu_ih_process(adev, &adev->irq.ih2); in amdgpu_irq_handle_ih2() 207 amdgpu_ih_process(adev, &adev->irq.ih_soft); in amdgpu_irq_handle_ih_soft() 300 adev->irq.irq = irq; in amdgpu_irq_init() 311 free_irq(adev->irq.irq, adev_to_drm(adev)); in amdgpu_irq_fini_hw() 317 amdgpu_ih_ring_fini(adev, &adev->irq.ih_soft); in amdgpu_irq_fini_hw() 318 amdgpu_ih_ring_fini(adev, &adev->irq.ih); in amdgpu_irq_fini_hw() 319 amdgpu_ih_ring_fini(adev, &adev->irq.ih1); in amdgpu_irq_fini_hw() 320 amdgpu_ih_ring_fini(adev, &adev->irq.ih2); in amdgpu_irq_fini_hw() [all …]
|
A D | amdgpu_bios.c | 104 adev->bios = NULL; in igp_read_bios_from_vram() 112 if (!adev->bios) { in igp_read_bios_from_vram() 133 adev->bios = NULL; in amdgpu_read_bios() 162 if (!adev->asic_funcs || !adev->asic_funcs->read_bios_from_rom) in amdgpu_read_bios_from_rom() 180 if (!adev->bios) { in amdgpu_read_bios_from_rom() 187 amdgpu_asic_read_bios_from_rom(adev, adev->bios, len); in amdgpu_read_bios_from_rom() 203 adev->bios = NULL; in amdgpu_read_platform_bios() 209 if (!adev->bios) in amdgpu_read_platform_bios() 495 adev->smuio.funcs->get_rom_index_offset(adev); in amdgpu_soc15_read_bios_from_rom() 497 adev->smuio.funcs->get_rom_data_offset(adev); in amdgpu_soc15_read_bios_from_rom() [all …]
|
A D | amdgpu_ras.c | 139 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready() 145 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready() 611 obj->adev = adev; in amdgpu_ras_create_obj() 1021 adev->smuio.funcs->get_die_id(adev), in amdgpu_ras_query_error_status() 1040 adev->smuio.funcs->get_die_id(adev), in amdgpu_ras_query_error_status() 1291 struct amdgpu_device *adev = con->adev; in amdgpu_ras_sysfs_badpages_read() local 1616 struct amdgpu_device *adev = obj->adev; in amdgpu_ras_interrupt_poison_consumption_handler() local 1976 struct amdgpu_device *adev = ras->adev; in amdgpu_ras_do_recovery() local 2244 con->adev = adev; in amdgpu_ras_recovery_init() 2455 struct amdgpu_device *adev = con->adev; in amdgpu_ras_counte_dw() local [all …]
|
A D | mxgpu_nv.c | 148 r = xgpu_nv_poll_ack(adev); in xgpu_nv_mailbox_trans_msg() 350 if (amdgpu_sriov_runtime(adev) && !amdgpu_in_reset(adev)) in xgpu_nv_mailbox_rcv_irq() 352 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq() 392 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_nv_mailbox_add_irq_id() 396 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id() 398 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_add_irq_id() 409 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq() 412 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq() 414 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq() 425 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq() [all …]
|
A D | amdgpu_umc.c | 37 dev_warn(adev->dev, in amdgpu_umc_convert_error_address() 55 dev_warn(adev->dev, in amdgpu_umc_page_retirement_mca() 91 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_do_page_retirement() 95 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_do_page_retirement() 115 if (adev->umc.ras && in amdgpu_umc_do_page_retirement() 119 if (adev->umc.ras && in amdgpu_umc_do_page_retirement() 194 if (adev->virt.ops && adev->virt.ops->ras_poison_handler) in amdgpu_umc_poison_handler() 195 adev->virt.ops->ras_poison_handler(adev); in amdgpu_umc_poison_handler() 220 r = amdgpu_irq_get(adev, &adev->gmc.ecc_irq, 0); in amdgpu_umc_ras_late_init() 226 if (adev->umc.ras && in amdgpu_umc_ras_late_init() [all …]
|
A D | vega20_reg_init.c | 29 int vega20_reg_base_init(struct amdgpu_device *adev) in vega20_reg_base_init() argument 58 void vega20_doorbell_index_init(struct amdgpu_device *adev) in vega20_doorbell_index_init() argument 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init() 61 adev->doorbell_index.mec_ring0 = AMDGPU_VEGA20_DOORBELL_MEC_RING0; in vega20_doorbell_index_init() 62 adev->doorbell_index.mec_ring1 = AMDGPU_VEGA20_DOORBELL_MEC_RING1; in vega20_doorbell_index_init() 63 adev->doorbell_index.mec_ring2 = AMDGPU_VEGA20_DOORBELL_MEC_RING2; in vega20_doorbell_index_init() 64 adev->doorbell_index.mec_ring3 = AMDGPU_VEGA20_DOORBELL_MEC_RING3; in vega20_doorbell_index_init() 65 adev->doorbell_index.mec_ring4 = AMDGPU_VEGA20_DOORBELL_MEC_RING4; in vega20_doorbell_index_init() 66 adev->doorbell_index.mec_ring5 = AMDGPU_VEGA20_DOORBELL_MEC_RING5; in vega20_doorbell_index_init() 80 adev->doorbell_index.ih = AMDGPU_VEGA20_DOORBELL_IH; in vega20_doorbell_index_init() [all …]
|
A D | sienna_cichlid.c | 40 adev->pm.fw_version >= 0x3a5500 && !amdgpu_sriov_vf(adev)) in sienna_cichlid_is_mode2_default() 85 r = adev->ip_blocks[i].version->funcs->suspend(adev); in sienna_cichlid_mode2_suspend_ip() 108 adev->gfxhub.funcs->mode2_save_regs(adev); in sienna_cichlid_mode2_prepare_hwcontext() 110 adev->gfxhub.funcs->halt(adev); in sienna_cichlid_mode2_prepare_hwcontext() 150 dev_err(adev->dev, in sienna_cichlid_mode2_perform_reset() 169 adev->gfxhub.funcs->mode2_restore_regs(adev); in sienna_cichlid_mode2_restore_ip() 170 adev->gfxhub.funcs->init(adev); in sienna_cichlid_mode2_restore_ip() 171 r = adev->gfxhub.funcs->gart_enable(adev); in sienna_cichlid_mode2_restore_ip() 179 r = adev->ip_blocks[i].version->funcs->resume(adev); in sienna_cichlid_mode2_restore_ip() 197 r = adev->ip_blocks[i].version->funcs->resume(adev); in sienna_cichlid_mode2_restore_ip() [all …]
|
A D | amdgpu_gart.c | 79 adev->dummy_page_addr = dma_map_page(&adev->pdev->dev, dummy_page, 0, in amdgpu_gart_dummy_page_init() 81 if (dma_mapping_error(&adev->pdev->dev, adev->dummy_page_addr)) { in amdgpu_gart_dummy_page_init() 100 dma_unmap_page(&adev->pdev->dev, adev->dummy_page_addr, PAGE_SIZE, in amdgpu_gart_dummy_page_fini() 120 return amdgpu_bo_create_kernel(adev, adev->gart.table_size, PAGE_SIZE, in amdgpu_gart_table_vram_alloc() 136 amdgpu_bo_free_kernel(&adev->gart.bo, NULL, (void *)&adev->gart.ptr); in amdgpu_gart_table_vram_free() 164 if (!adev->gart.ptr) in amdgpu_gart_unbind() 178 amdgpu_gmc_set_pte_pde(adev, adev->gart.ptr, in amdgpu_gart_unbind() 247 amdgpu_gart_map(adev, offset, pages, dma_addr, flags, adev->gart.ptr); in amdgpu_gart_bind() 295 adev->gart.num_cpu_pages = adev->gmc.gart_size / PAGE_SIZE; in amdgpu_gart_init() 296 adev->gart.num_gpu_pages = adev->gmc.gart_size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_gart_init() [all …]
|
A D | mxgpu_ai.c | 133 trn = xgpu_ai_peek_ack(adev); in xgpu_ai_mailbox_trans_msg() 156 r = xgpu_ai_poll_ack(adev); in xgpu_ai_mailbox_trans_msg() 320 if (amdgpu_sriov_runtime(adev) && !amdgpu_in_reset(adev)) in xgpu_ai_mailbox_rcv_irq() 365 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_ai_mailbox_add_irq_id() 369 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_ai_mailbox_add_irq_id() 371 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_add_irq_id() 382 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 385 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_get_irq() 387 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 398 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_put_irq() [all …]
|
/linux-6.3-rc2/drivers/gpu/drm/amd/pm/ |
A D | amdgpu_dpm.c | 37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e))) 47 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 63 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_mclk() 83 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_powergating_by_smu() 115 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_gfx_power_up_by_imu() 199 if (adev->in_s3) in amdgpu_dpm_is_baco_supported() 307 if (amdgpu_sriov_vf(adev)) in amdgpu_dpm_switch_power_profile() 429 amdgpu_dpm_enable_bapm(adev, adev->pm.ac_power); in amdgpu_pm_acpi_event_handler() 464 if (!adev->pm.dpm_enabled) in amdgpu_dpm_compute_clocks() 1149 adev->pm.dpm.current_ps = adev->pm.dpm.boot_ps; in amdgpu_dpm_set_sclk_od() [all …]
|
/linux-6.3-rc2/sound/soc/intel/avs/ |
A D | loader.c | 164 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_cldma_load_basefw() 176 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_cldma_load_basefw() 400 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_hda_load_basefw() 671 ret = avs_ipc_get_hw_config(adev, &adev->hw_cfg); in avs_dsp_first_boot_firmware() 677 ret = avs_ipc_get_fw_config(adev, &adev->fw_cfg); in avs_dsp_first_boot_firmware() 683 adev->core_refs = devm_kcalloc(adev->dev, adev->hw_cfg.dsp_cores, in avs_dsp_first_boot_firmware() 685 adev->lib_names = devm_kcalloc(adev->dev, adev->fw_cfg.max_libs_count, in avs_dsp_first_boot_firmware() 687 if (!adev->core_refs || !adev->lib_names) in avs_dsp_first_boot_firmware() 691 adev->lib_names[i] = devm_kzalloc(adev->dev, AVS_LIB_NAME_SIZE, GFP_KERNEL); in avs_dsp_first_boot_firmware() 692 if (!adev->lib_names[i]) in avs_dsp_first_boot_firmware() [all …]
|