| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_device.c | 624 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg() 692 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_xcc_rreg() 751 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg() 780 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_mm_wreg_mmio_rlc() 823 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_xcc_wreg() 3100 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost() 3888 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings() 3935 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings() 4132 adev->gfx.enforce_isolation[i].adev = adev; in amdgpu_device_init() 4167 adev->rmmio = ioremap(adev->rmmio_base, adev->rmmio_size); in amdgpu_device_init() [all …]
|
| A D | gmc_v10_0.c | 268 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v10_0_flush_gpu_tlb() 425 struct amdgpu_device *adev = ring->adev; in gmc_v10_0_emit_pasid_mapping() local 722 adev->gmc.aper_base = adev->gfxhub.funcs->get_mc_fb_offset(adev); in gmc_v10_0_mc_init() 777 adev->gfxhub.funcs->init(adev); in gmc_v10_0_sw_init() 779 adev->mmhub.funcs->init(adev); in gmc_v10_0_sw_init() 962 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v10_0_gart_enable() 966 adev->hdp.funcs->init_registers(adev); in gmc_v10_0_gart_enable() 969 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v10_0_gart_enable() 1016 adev->umc.funcs->init_registers(adev); in gmc_v10_0_hw_init() 1031 adev->gfxhub.funcs->gart_disable(adev); in gmc_v10_0_gart_disable() [all …]
|
| A D | gmc_v11_0.c | 76 if (!adev->in_s0ix && (adev->in_runpm || adev->in_suspend || in gmc_v11_0_vm_fault_interrupt_state() 229 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v11_0_flush_gpu_tlb() 390 struct amdgpu_device *adev = ring->adev; in gmc_v11_0_emit_pasid_mapping() local 660 adev->vm_manager.vram_base_offset = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_vram_gtt_location() 691 adev->gmc.aper_base = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v11_0_mc_init() 737 adev->mmhub.funcs->init(adev); in gmc_v11_0_sw_init() 739 adev->gfxhub.funcs->init(adev); in gmc_v11_0_sw_init() 891 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v11_0_gart_enable() 896 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v11_0_gart_enable() 926 adev->umc.funcs->init_registers(adev); in gmc_v11_0_hw_init() [all …]
|
| A D | gmc_v9_0.c | 979 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_flush_gpu_tlb() local 1027 struct amdgpu_device *adev = ring->adev; in gmc_v9_0_emit_pasid_mapping() local 1464 adev->umc.active_mask = adev->aid_mask; in gmc_v9_0_set_umc_funcs() 1574 adev->smuio.funcs->get_pkg_type(adev); in gmc_v9_0_early_init() 1999 adev->gfxhub.funcs->init(adev); in gmc_v9_0_sw_init() 2001 adev->mmhub.funcs->init(adev); in gmc_v9_0_sw_init() 2351 adev->hdp.funcs->init_registers(adev); in gmc_v9_0_hw_init() 2354 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v9_0_hw_init() 2373 adev->umc.funcs->init_registers(adev); in gmc_v9_0_hw_init() 2395 adev->gfxhub.funcs->gart_disable(adev); in gmc_v9_0_gart_disable() [all …]
|
| A D | gmc_v12_0.c | 297 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v12_0_flush_gpu_tlb() 410 struct amdgpu_device *adev = ring->adev; in gmc_v12_0_emit_pasid_mapping() local 660 adev->vm_manager.vram_base_offset = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v12_0_vram_gtt_location() 692 adev->gmc.aper_base = adev->mmhub.funcs->get_mc_fb_offset(adev); in gmc_v12_0_mc_init() 739 adev->mmhub.funcs->init(adev); in gmc_v12_0_sw_init() 741 adev->gfxhub.funcs->init(adev); in gmc_v12_0_sw_init() 877 r = adev->mmhub.funcs->gart_enable(adev); in gmc_v12_0_gart_enable() 882 adev->hdp.funcs->flush_hdp(adev, NULL); in gmc_v12_0_gart_enable() 910 adev->umc.funcs->init_registers(adev); in gmc_v12_0_hw_init() 924 adev->mmhub.funcs->gart_disable(adev); in gmc_v12_0_gart_disable() [all …]
|
| A D | soc24.c | 94 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize() 250 adev->nbio.funcs->program_aspm(adev); in soc24_program_aspm() 344 adev->gfx.funcs->update_perfmon_mgcg(adev, !enter); in soc24_update_umd_stable_pstate() 370 adev->nbio.funcs->set_reg_remap(adev); in soc24_common_early_init() 386 adev->rev_id = amdgpu_device_get_rev_id(adev); in soc24_common_early_init() 407 adev->external_rev_id = adev->rev_id + 0x40; in soc24_common_early_init() 428 adev->external_rev_id = adev->rev_id + 0x50; in soc24_common_early_init() 480 adev->nbio.funcs->init_registers(adev); in soc24_common_hw_init() 486 adev->nbio.funcs->remap_hdp_registers(adev); in soc24_common_hw_init() 489 adev->df.funcs->hw_init(adev); in soc24_common_hw_init() [all …]
|
| A D | soc15.c | 320 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize() 589 if (adev->flags & AMD_IS_APU && adev->in_s3 && in soc15_need_reset_on_resume() 675 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm() 936 adev->nbio.funcs->set_reg_remap(adev); in soc15_common_early_init() 1007 adev->external_rev_id = adev->rev_id + 0x14; in soc15_common_early_init() 1225 adev->df.funcs->sw_init(adev); in soc15_common_sw_init() 1236 adev->df.funcs->sw_fini(adev); in soc15_common_sw_fini() 1261 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init() 1405 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state() 1423 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state() [all …]
|
| A D | amdgpu_rlc.c | 44 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode() 47 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode() 50 adev->gfx.rlc.funcs->set_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_enter_safe_mode() 69 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode() 72 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode() 75 adev->gfx.rlc.funcs->unset_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_exit_safe_mode() 105 amdgpu_gfx_rlc_fini(adev); in amdgpu_gfx_rlc_init_sr() 134 adev->gfx.rlc.clear_state_size = dws = adev->gfx.rlc.funcs->get_csb_size(adev); in amdgpu_gfx_rlc_init_csb() 162 r = amdgpu_bo_create_reserved(adev, adev->gfx.rlc.cp_table_size, in amdgpu_gfx_rlc_init_cpt() 197 max_me = adev->gfx.rlc.funcs->get_cp_table_num(adev); in amdgpu_gfx_rlc_setup_cp_table() [all …]
|
| A D | soc21.c | 229 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize() 446 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm() 563 adev->nbio.funcs->set_reg_remap(adev); in soc21_common_early_init() 628 adev->external_rev_id = adev->rev_id + 0x10; in soc21_common_early_init() 654 adev->external_rev_id = adev->rev_id + 0x1; in soc21_common_early_init() 669 adev->external_rev_id = adev->rev_id + 0x20; in soc21_common_early_init() 694 adev->external_rev_id = adev->rev_id + 0x80; in soc21_common_early_init() 754 adev->external_rev_id = adev->rev_id + 0xc1; in soc21_common_early_init() 782 adev->external_rev_id = adev->rev_id + 0x40; in soc21_common_early_init() 857 adev->nbio.funcs->init_registers(adev); in soc21_common_hw_init() [all …]
|
| A D | amdgpu_virt.c | 68 adev->cg_flags = 0; in amdgpu_virt_init_setting() 69 adev->pg_flags = 0; in amdgpu_virt_init_setting() 198 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table() 226 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table() 474 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data() 490 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data() 524 adev->unique_id = in amdgpu_virt_read_pf2vf_data() 673 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 675 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 698 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data() [all …]
|
| A D | nv.c | 308 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize() 519 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm() 612 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate() 641 adev->nbio.funcs->set_reg_remap(adev); in nv_common_early_init() 686 adev->external_rev_id = adev->rev_id + 0x1; in nv_common_early_init() 707 adev->external_rev_id = adev->rev_id + 20; in nv_common_early_init() 736 adev->external_rev_id = adev->rev_id + 0xa; in nv_common_early_init() 760 adev->external_rev_id = adev->rev_id + 0x28; in nv_common_early_init() 779 adev->external_rev_id = adev->rev_id + 0x32; in nv_common_early_init() 1004 adev->nbio.funcs->init_registers(adev); in nv_common_hw_init() [all …]
|
| A D | amdgpu_discovery.c | 432 adev, adev->mman.discovery_bin); in amdgpu_discovery_init() 439 dev_err(adev->dev, in amdgpu_discovery_init() 730 adev->umc.active_mask = ((1 << adev->umc.node_inst_num) - 1) & in amdgpu_discovery_read_from_harvest_table() 978 struct amdgpu_device *adev = ip_top->adev; in ip_disc_release() local 1178 adev->ip_top = kzalloc(sizeof(*adev->ip_top), GFP_KERNEL); in amdgpu_discovery_sysfs_init() 1179 if (!adev->ip_top) in amdgpu_discovery_sysfs_init() 1182 adev->ip_top->adev = adev; in amdgpu_discovery_sysfs_init() 1343 adev->vcn.vcn_config[adev->vcn.num_vcn_inst] = in amdgpu_discovery_reg_base_init() 1602 dev_err(adev->dev, in amdgpu_discovery_get_gfx_info() 2248 if (!(adev->asic_type == CHIP_VEGA20 && amdgpu_sriov_vf(adev))) in amdgpu_discovery_set_mm_ip_blocks() [all …]
|
| A D | amdgpu_gfx.c | 218 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire() 598 if (adev->gfx.kiq[0].ring.sched.ready && !adev->job_hang) in amdgpu_gfx_disable_kgq() 899 r = amdgpu_irq_get(adev, &adev->gfx.cp_ecc_error_irq, 0); in amdgpu_gfx_ras_late_init() 951 if (adev->gfx.ras && adev->gfx.ras->poison_consumption_handler) in amdgpu_gfx_poison_consumption_handler() 952 return adev->gfx.ras->poison_consumption_handler(adev, entry); in amdgpu_gfx_poison_consumption_handler() 969 if (adev->gfx.ras && adev->gfx.ras->ras_block.hw_ops && in amdgpu_gfx_process_ras_data_cb() 1399 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_run_cleaner_shader_job() local 1487 if (adev->in_suspend && !adev->in_runpm) in amdgpu_gfx_set_run_cleaner_shader() 1774 struct amdgpu_device *adev = isolation_work->adev; in amdgpu_gfx_enforce_isolation_handler() local 1811 struct amdgpu_device *adev = ring->adev; in amdgpu_gfx_enforce_isolation_ring_begin_use() local [all …]
|
| A D | amdgpu_bios.c | 106 adev->bios = NULL; in amdgpu_read_bios_from_vram() 113 if (!adev->bios) { in amdgpu_read_bios_from_vram() 134 adev->bios = NULL; in amdgpu_read_bios() 162 if (!adev->asic_funcs || !adev->asic_funcs->read_bios_from_rom) in amdgpu_read_bios_from_rom() 180 if (!adev->bios) { in amdgpu_read_bios_from_rom() 187 amdgpu_asic_read_bios_from_rom(adev, adev->bios, len); in amdgpu_read_bios_from_rom() 209 if (!adev->bios) in amdgpu_read_platform_bios() 495 adev->is_atom_fw = adev->asic_type >= CHIP_VEGA10; in amdgpu_get_bios() 526 adev->smuio.funcs->get_rom_index_offset(adev); in amdgpu_soc15_read_bios_from_rom() 528 adev->smuio.funcs->get_rom_data_offset(adev); in amdgpu_soc15_read_bios_from_rom() [all …]
|
| A D | amdgpu_acp.c | 105 adev->acp.parent = adev->dev; in acp_sw_init() 126 void *adev; member 136 adev = apd->adev; in acp_poweroff() 153 adev = apd->adev; in acp_poweron() 251 if (adev->rmmio_size == 0 || adev->rmmio_size < 0x5289) in acp_hw_init() 262 adev->acp.acp_genpd->adev = adev; in acp_hw_init() 308 adev->acp.acp_res[2].end = adev->acp.acp_res[2].start; in acp_hw_init() 312 adev->acp.acp_cell[0].resources = &adev->acp.acp_res[0]; in acp_hw_init() 313 adev->acp.acp_cell[0].platform_data = &adev->asic_type; in acp_hw_init() 416 adev->acp.acp_res[4].end = adev->acp.acp_res[4].start; in acp_hw_init() [all …]
|
| A D | mxgpu_nv.c | 70 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg() 152 r = xgpu_nv_poll_ack(adev); in xgpu_nv_mailbox_trans_msg() 363 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq() 403 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_nv_mailbox_add_irq_id() 407 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_nv_mailbox_add_irq_id() 409 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_add_irq_id() 420 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq() 423 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_get_irq() 425 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_nv_mailbox_get_irq() 436 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq() [all …]
|
| A D | amdgpu_amdkfd.c | 77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe() 152 if (!adev->kfd.init_complete || adev->kfd.client.dev) in amdgpu_amdkfd_drm_client_create() 155 ret = drm_client_init(&adev->ddev, &adev->kfd.client, "kfd", in amdgpu_amdkfd_drm_client_create() 226 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init() 247 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt() 253 if (adev->kfd.dev) in amdgpu_amdkfd_suspend() 261 if (adev->kfd.dev) in amdgpu_amdkfd_resume() 272 if (adev->kfd.dev) in amdgpu_amdkfd_pre_reset() 282 if (adev->kfd.dev) in amdgpu_amdkfd_post_reset() 456 if (adev->gmc.real_vram_size == adev->gmc.visible_vram_size) in amdgpu_amdkfd_get_local_mem_info() [all …]
|
| A D | amdgpu_irq.c | 170 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler() 191 amdgpu_ih_process(adev, &adev->irq.ih1); in amdgpu_irq_handle_ih1() 206 amdgpu_ih_process(adev, &adev->irq.ih2); in amdgpu_irq_handle_ih2() 221 amdgpu_ih_process(adev, &adev->irq.ih_soft); in amdgpu_irq_handle_ih_soft() 315 adev->irq.irq = irq; in amdgpu_irq_init() 332 free_irq(adev->irq.irq, adev_to_drm(adev)); in amdgpu_irq_fini_hw() 338 amdgpu_ih_ring_fini(adev, &adev->irq.ih_soft); in amdgpu_irq_fini_hw() 339 amdgpu_ih_ring_fini(adev, &adev->irq.ih); in amdgpu_irq_fini_hw() 340 amdgpu_ih_ring_fini(adev, &adev->irq.ih1); in amdgpu_irq_fini_hw() 341 amdgpu_ih_ring_fini(adev, &adev->irq.ih2); in amdgpu_irq_fini_hw() [all …]
|
| A D | vi.c | 1270 !(ASICID_IS_P23(adev->pdev->device, adev->pdev->revision))) || in vi_program_aspm() 1271 ASIC_IS_P22(adev->asic_type, adev->external_rev_id)) { in vi_program_aspm() 1480 adev->rev_id = vi_get_rev_id(adev); in vi_common_early_init() 1507 adev->external_rev_id = adev->rev_id + 0x3c; in vi_common_early_init() 1524 adev->external_rev_id = adev->rev_id + 0x14; in vi_common_early_init() 1547 adev->external_rev_id = adev->rev_id + 0x5A; in vi_common_early_init() 1570 adev->external_rev_id = adev->rev_id + 0x50; in vi_common_early_init() 1593 adev->external_rev_id = adev->rev_id + 0x64; in vi_common_early_init() 1617 adev->external_rev_id = adev->rev_id + 0x6E; in vi_common_early_init() 1644 adev->external_rev_id = adev->rev_id + 0x1; in vi_common_early_init() [all …]
|
| A D | amdgpu_virt.h | 286 #define amdgpu_sriov_vf(adev) \ argument 289 #define amdgpu_sriov_bios(adev) \ argument 296 (amdgpu_sriov_vf((adev)) && !amdgpu_sriov_runtime((adev))) 299 (amdgpu_sriov_vf((adev)) && \ 303 (amdgpu_sriov_vf((adev)) && \ 307 (amdgpu_sriov_vf((adev)) && \ 311 (amdgpu_sriov_vf((adev)) && \ 315 (amdgpu_sriov_reg_indirect_mmhub(adev) || amdgpu_sriov_reg_indirect_gc(adev)) 317 #define amdgpu_passthrough(adev) \ argument 337 ((!amdgpu_in_reset(adev)) && adev->virt.tdr_debug) [all …]
|
| A D | amdgpu_jpeg.c | 61 dev_err(adev->dev, in amdgpu_jpeg_sw_init() 107 struct amdgpu_device *adev = in amdgpu_jpeg_idle_work_handler() local 129 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_ring_begin_use() local 148 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ring() local 154 if (amdgpu_sriov_vf(adev)) in amdgpu_jpeg_dec_ring_test_ring() 176 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ring() 185 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_set_reg() local 224 struct amdgpu_device *adev = ring->adev; in amdgpu_jpeg_dec_ring_test_ib() local 295 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init() 312 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init() [all …]
|
| A D | mxgpu_ai.c | 73 xgpu_ai_mailbox_send_ack(adev); in xgpu_ai_mailbox_rcv_msg() 133 trn = xgpu_ai_peek_ack(adev); in xgpu_ai_mailbox_trans_msg() 156 r = xgpu_ai_poll_ack(adev); in xgpu_ai_mailbox_trans_msg() 363 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_ai_mailbox_add_irq_id() 367 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 138, &adev->virt.ack_irq); in xgpu_ai_mailbox_add_irq_id() 369 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_add_irq_id() 380 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 383 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_get_irq() 385 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 396 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_put_irq() [all …]
|
| A D | amdgpu_ras.c | 152 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready() 158 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready() 694 obj->adev = adev; in amdgpu_ras_create_obj() 1685 struct amdgpu_device *adev = con->adev; in amdgpu_ras_sysfs_badpages_read() local 2118 struct amdgpu_device *adev = obj->adev; in amdgpu_ras_interrupt_poison_consumption_handler() local 2171 struct amdgpu_device *adev = obj->adev; in amdgpu_ras_interrupt_poison_creation_handler() local 2561 struct amdgpu_device *adev = ras->adev; in amdgpu_ras_do_recovery() local 2930 struct amdgpu_device *adev = con->adev; in amdgpu_ras_do_page_retirement() local 3164 con->adev = adev; in amdgpu_ras_recovery_init() 3482 struct amdgpu_device *adev = con->adev; in amdgpu_ras_counte_dw() local [all …]
|
| A D | vega20_reg_init.c | 29 int vega20_reg_base_init(struct amdgpu_device *adev) in vega20_reg_base_init() argument 58 void vega20_doorbell_index_init(struct amdgpu_device *adev) in vega20_doorbell_index_init() argument 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init() 61 adev->doorbell_index.mec_ring0 = AMDGPU_VEGA20_DOORBELL_MEC_RING0; in vega20_doorbell_index_init() 62 adev->doorbell_index.mec_ring1 = AMDGPU_VEGA20_DOORBELL_MEC_RING1; in vega20_doorbell_index_init() 63 adev->doorbell_index.mec_ring2 = AMDGPU_VEGA20_DOORBELL_MEC_RING2; in vega20_doorbell_index_init() 64 adev->doorbell_index.mec_ring3 = AMDGPU_VEGA20_DOORBELL_MEC_RING3; in vega20_doorbell_index_init() 65 adev->doorbell_index.mec_ring4 = AMDGPU_VEGA20_DOORBELL_MEC_RING4; in vega20_doorbell_index_init() 66 adev->doorbell_index.mec_ring5 = AMDGPU_VEGA20_DOORBELL_MEC_RING5; in vega20_doorbell_index_init() 80 adev->doorbell_index.ih = AMDGPU_VEGA20_DOORBELL_IH; in vega20_doorbell_index_init() [all …]
|
| /linux/drivers/gpu/drm/amd/pm/ |
| A D | amdgpu_dpm.c | 37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e))) 39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument 49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 65 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_mclk() 85 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_powergating_by_smu() 173 if (amdgpu_sriov_vf(adev)) in amdgpu_dpm_set_mp1_state() 223 if (adev->in_s3) in amdgpu_dpm_is_baco_supported() 468 amdgpu_dpm_enable_bapm(adev, adev->pm.ac_power); in amdgpu_pm_acpi_event_handler() 626 (is_support_sw_smu(adev) && (adev->flags & AMD_IS_APU))) in amdgpu_pm_load_smu_firmware() 1242 adev->pm.dpm.current_ps = adev->pm.dpm.boot_ps; in amdgpu_dpm_set_sclk_od() [all …]
|