Lines Matching refs:adev

36 #define amdgpu_dpm_enable_bapm(adev, e) \  argument
37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e)))
39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument
41 int amdgpu_dpm_get_sclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_sclk() argument
43 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk()
49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
50 ret = pp_funcs->get_sclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_sclk()
52 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
57 int amdgpu_dpm_get_mclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_mclk() argument
59 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk()
65 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_mclk()
66 ret = pp_funcs->get_mclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_mclk()
68 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_mclk()
73 int amdgpu_dpm_set_powergating_by_smu(struct amdgpu_device *adev, in amdgpu_dpm_set_powergating_by_smu() argument
79 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_powergating_by_smu()
83 if (atomic_read(&adev->pm.pwr_state[block_type]) == pwr_state && in amdgpu_dpm_set_powergating_by_smu()
84 (!is_vcn || adev->vcn.num_vcn_inst == 1)) { in amdgpu_dpm_set_powergating_by_smu()
85 dev_dbg(adev->dev, "IP block%d already in the target %s state!", in amdgpu_dpm_set_powergating_by_smu()
90 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_powergating_by_smu()
104 (adev)->powerplay.pp_handle, block_type, gate, 0)); in amdgpu_dpm_set_powergating_by_smu()
109 (adev)->powerplay.pp_handle, block_type, gate, inst)); in amdgpu_dpm_set_powergating_by_smu()
116 atomic_set(&adev->pm.pwr_state[block_type], pwr_state); in amdgpu_dpm_set_powergating_by_smu()
118 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_powergating_by_smu()
123 int amdgpu_dpm_set_gfx_power_up_by_imu(struct amdgpu_device *adev) in amdgpu_dpm_set_gfx_power_up_by_imu() argument
125 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_set_gfx_power_up_by_imu()
128 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_gfx_power_up_by_imu()
130 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_gfx_power_up_by_imu()
137 int amdgpu_dpm_baco_enter(struct amdgpu_device *adev) in amdgpu_dpm_baco_enter() argument
139 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_baco_enter()
140 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_baco_enter()
146 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_baco_enter()
151 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_baco_enter()
156 int amdgpu_dpm_baco_exit(struct amdgpu_device *adev) in amdgpu_dpm_baco_exit() argument
158 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_baco_exit()
159 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_baco_exit()
165 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_baco_exit()
170 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_baco_exit()
175 int amdgpu_dpm_set_mp1_state(struct amdgpu_device *adev, in amdgpu_dpm_set_mp1_state() argument
179 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_mp1_state()
183 if (amdgpu_sriov_vf(adev)) in amdgpu_dpm_set_mp1_state()
184 adev->pm.dpm_enabled = false; in amdgpu_dpm_set_mp1_state()
186 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_mp1_state()
189 adev->powerplay.pp_handle, in amdgpu_dpm_set_mp1_state()
192 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_mp1_state()
198 int amdgpu_dpm_notify_rlc_state(struct amdgpu_device *adev, bool en) in amdgpu_dpm_notify_rlc_state() argument
201 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_notify_rlc_state()
204 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_notify_rlc_state()
207 adev->powerplay.pp_handle, in amdgpu_dpm_notify_rlc_state()
210 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_notify_rlc_state()
216 int amdgpu_dpm_is_baco_supported(struct amdgpu_device *adev) in amdgpu_dpm_is_baco_supported() argument
218 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_is_baco_supported()
219 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_is_baco_supported()
233 if (adev->in_s3) in amdgpu_dpm_is_baco_supported()
236 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_is_baco_supported()
240 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_is_baco_supported()
245 int amdgpu_dpm_mode2_reset(struct amdgpu_device *adev) in amdgpu_dpm_mode2_reset() argument
247 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_mode2_reset()
248 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_mode2_reset()
254 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_mode2_reset()
258 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_mode2_reset()
263 int amdgpu_dpm_enable_gfx_features(struct amdgpu_device *adev) in amdgpu_dpm_enable_gfx_features() argument
265 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_enable_gfx_features()
266 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_enable_gfx_features()
272 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_enable_gfx_features()
276 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_enable_gfx_features()
281 int amdgpu_dpm_baco_reset(struct amdgpu_device *adev) in amdgpu_dpm_baco_reset() argument
283 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_baco_reset()
284 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_baco_reset()
290 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_baco_reset()
301 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_baco_reset()
305 bool amdgpu_dpm_is_mode1_reset_supported(struct amdgpu_device *adev) in amdgpu_dpm_is_mode1_reset_supported() argument
307 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_is_mode1_reset_supported()
310 if (is_support_sw_smu(adev)) { in amdgpu_dpm_is_mode1_reset_supported()
311 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_is_mode1_reset_supported()
313 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_is_mode1_reset_supported()
319 int amdgpu_dpm_mode1_reset(struct amdgpu_device *adev) in amdgpu_dpm_mode1_reset() argument
321 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_mode1_reset()
324 if (is_support_sw_smu(adev)) { in amdgpu_dpm_mode1_reset()
325 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_mode1_reset()
327 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_mode1_reset()
333 bool amdgpu_dpm_is_link_reset_supported(struct amdgpu_device *adev) in amdgpu_dpm_is_link_reset_supported() argument
335 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_is_link_reset_supported()
338 if (is_support_sw_smu(adev)) { in amdgpu_dpm_is_link_reset_supported()
339 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_is_link_reset_supported()
341 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_is_link_reset_supported()
347 int amdgpu_dpm_link_reset(struct amdgpu_device *adev) in amdgpu_dpm_link_reset() argument
349 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_link_reset()
352 if (is_support_sw_smu(adev)) { in amdgpu_dpm_link_reset()
353 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_link_reset()
355 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_link_reset()
361 int amdgpu_dpm_switch_power_profile(struct amdgpu_device *adev, in amdgpu_dpm_switch_power_profile() argument
365 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_switch_power_profile()
368 if (amdgpu_sriov_vf(adev)) in amdgpu_dpm_switch_power_profile()
372 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_switch_power_profile()
374 adev->powerplay.pp_handle, type, en); in amdgpu_dpm_switch_power_profile()
375 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_switch_power_profile()
381 int amdgpu_dpm_pause_power_profile(struct amdgpu_device *adev, in amdgpu_dpm_pause_power_profile() argument
384 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_pause_power_profile()
387 if (amdgpu_sriov_vf(adev)) in amdgpu_dpm_pause_power_profile()
391 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_pause_power_profile()
393 adev->powerplay.pp_handle, pause); in amdgpu_dpm_pause_power_profile()
394 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_pause_power_profile()
400 int amdgpu_dpm_set_xgmi_pstate(struct amdgpu_device *adev, in amdgpu_dpm_set_xgmi_pstate() argument
403 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_xgmi_pstate()
407 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_xgmi_pstate()
408 ret = pp_funcs->set_xgmi_pstate(adev->powerplay.pp_handle, in amdgpu_dpm_set_xgmi_pstate()
410 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_xgmi_pstate()
416 int amdgpu_dpm_set_df_cstate(struct amdgpu_device *adev, in amdgpu_dpm_set_df_cstate() argument
420 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_df_cstate()
421 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_set_df_cstate()
424 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_df_cstate()
426 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_df_cstate()
432 ssize_t amdgpu_dpm_get_pm_policy_info(struct amdgpu_device *adev, in amdgpu_dpm_get_pm_policy_info() argument
435 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_pm_policy_info()
438 if (is_support_sw_smu(adev)) { in amdgpu_dpm_get_pm_policy_info()
439 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_pm_policy_info()
441 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_pm_policy_info()
447 int amdgpu_dpm_set_pm_policy(struct amdgpu_device *adev, int policy_type, in amdgpu_dpm_set_pm_policy() argument
450 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_set_pm_policy()
453 if (is_support_sw_smu(adev)) { in amdgpu_dpm_set_pm_policy()
454 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_pm_policy()
456 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_pm_policy()
462 int amdgpu_dpm_enable_mgpu_fan_boost(struct amdgpu_device *adev) in amdgpu_dpm_enable_mgpu_fan_boost() argument
464 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_enable_mgpu_fan_boost()
466 adev->powerplay.pp_funcs; in amdgpu_dpm_enable_mgpu_fan_boost()
470 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_enable_mgpu_fan_boost()
472 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_enable_mgpu_fan_boost()
478 int amdgpu_dpm_set_clockgating_by_smu(struct amdgpu_device *adev, in amdgpu_dpm_set_clockgating_by_smu() argument
481 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_set_clockgating_by_smu()
483 adev->powerplay.pp_funcs; in amdgpu_dpm_set_clockgating_by_smu()
487 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_clockgating_by_smu()
490 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_clockgating_by_smu()
496 int amdgpu_dpm_smu_i2c_bus_access(struct amdgpu_device *adev, in amdgpu_dpm_smu_i2c_bus_access() argument
499 void *pp_handle = adev->powerplay.pp_handle; in amdgpu_dpm_smu_i2c_bus_access()
501 adev->powerplay.pp_funcs; in amdgpu_dpm_smu_i2c_bus_access()
505 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_smu_i2c_bus_access()
508 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_smu_i2c_bus_access()
514 void amdgpu_pm_acpi_event_handler(struct amdgpu_device *adev) in amdgpu_pm_acpi_event_handler() argument
516 if (adev->pm.dpm_enabled) { in amdgpu_pm_acpi_event_handler()
517 mutex_lock(&adev->pm.mutex); in amdgpu_pm_acpi_event_handler()
519 adev->pm.ac_power = true; in amdgpu_pm_acpi_event_handler()
521 adev->pm.ac_power = false; in amdgpu_pm_acpi_event_handler()
523 if (adev->powerplay.pp_funcs && in amdgpu_pm_acpi_event_handler()
524 adev->powerplay.pp_funcs->enable_bapm) in amdgpu_pm_acpi_event_handler()
525 amdgpu_dpm_enable_bapm(adev, adev->pm.ac_power); in amdgpu_pm_acpi_event_handler()
527 if (is_support_sw_smu(adev)) in amdgpu_pm_acpi_event_handler()
528 smu_set_ac_dc(adev->powerplay.pp_handle); in amdgpu_pm_acpi_event_handler()
530 mutex_unlock(&adev->pm.mutex); in amdgpu_pm_acpi_event_handler()
534 int amdgpu_dpm_read_sensor(struct amdgpu_device *adev, enum amd_pp_sensors sensor, in amdgpu_dpm_read_sensor() argument
537 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_read_sensor()
544 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_read_sensor()
545 ret = pp_funcs->read_sensor(adev->powerplay.pp_handle, in amdgpu_dpm_read_sensor()
549 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_read_sensor()
555 int amdgpu_dpm_get_apu_thermal_limit(struct amdgpu_device *adev, uint32_t *limit) in amdgpu_dpm_get_apu_thermal_limit() argument
557 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_apu_thermal_limit()
561 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_apu_thermal_limit()
562 ret = pp_funcs->get_apu_thermal_limit(adev->powerplay.pp_handle, limit); in amdgpu_dpm_get_apu_thermal_limit()
563 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_apu_thermal_limit()
569 int amdgpu_dpm_set_apu_thermal_limit(struct amdgpu_device *adev, uint32_t limit) in amdgpu_dpm_set_apu_thermal_limit() argument
571 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_apu_thermal_limit()
575 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_apu_thermal_limit()
576 ret = pp_funcs->set_apu_thermal_limit(adev->powerplay.pp_handle, limit); in amdgpu_dpm_set_apu_thermal_limit()
577 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_apu_thermal_limit()
583 void amdgpu_dpm_compute_clocks(struct amdgpu_device *adev) in amdgpu_dpm_compute_clocks() argument
585 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_compute_clocks()
588 if (!adev->pm.dpm_enabled) in amdgpu_dpm_compute_clocks()
594 if (adev->mode_info.num_crtc) in amdgpu_dpm_compute_clocks()
595 amdgpu_display_bandwidth_update(adev); in amdgpu_dpm_compute_clocks()
598 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_dpm_compute_clocks()
603 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_compute_clocks()
604 pp_funcs->pm_compute_clocks(adev->powerplay.pp_handle); in amdgpu_dpm_compute_clocks()
605 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_compute_clocks()
608 void amdgpu_dpm_enable_uvd(struct amdgpu_device *adev, bool enable) in amdgpu_dpm_enable_uvd() argument
612 if (adev->family == AMDGPU_FAMILY_SI) { in amdgpu_dpm_enable_uvd()
613 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_enable_uvd()
615 adev->pm.dpm.uvd_active = true; in amdgpu_dpm_enable_uvd()
616 adev->pm.dpm.state = POWER_STATE_TYPE_INTERNAL_UVD; in amdgpu_dpm_enable_uvd()
618 adev->pm.dpm.uvd_active = false; in amdgpu_dpm_enable_uvd()
620 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_enable_uvd()
622 amdgpu_dpm_compute_clocks(adev); in amdgpu_dpm_enable_uvd()
626 ret = amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_UVD, !enable, 0); in amdgpu_dpm_enable_uvd()
632 void amdgpu_dpm_enable_vcn(struct amdgpu_device *adev, bool enable, int inst) in amdgpu_dpm_enable_vcn() argument
636 ret = amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_VCN, !enable, inst); in amdgpu_dpm_enable_vcn()
642 void amdgpu_dpm_enable_vce(struct amdgpu_device *adev, bool enable) in amdgpu_dpm_enable_vce() argument
646 if (adev->family == AMDGPU_FAMILY_SI) { in amdgpu_dpm_enable_vce()
647 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_enable_vce()
649 adev->pm.dpm.vce_active = true; in amdgpu_dpm_enable_vce()
651 adev->pm.dpm.vce_level = AMD_VCE_LEVEL_AC_ALL; in amdgpu_dpm_enable_vce()
653 adev->pm.dpm.vce_active = false; in amdgpu_dpm_enable_vce()
655 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_enable_vce()
657 amdgpu_dpm_compute_clocks(adev); in amdgpu_dpm_enable_vce()
661 ret = amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_VCE, !enable, 0); in amdgpu_dpm_enable_vce()
667 void amdgpu_dpm_enable_jpeg(struct amdgpu_device *adev, bool enable) in amdgpu_dpm_enable_jpeg() argument
671 ret = amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_JPEG, !enable, 0); in amdgpu_dpm_enable_jpeg()
677 void amdgpu_dpm_enable_vpe(struct amdgpu_device *adev, bool enable) in amdgpu_dpm_enable_vpe() argument
681 ret = amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_VPE, !enable, 0); in amdgpu_dpm_enable_vpe()
687 int amdgpu_pm_load_smu_firmware(struct amdgpu_device *adev, uint32_t *smu_version) in amdgpu_pm_load_smu_firmware() argument
689 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_pm_load_smu_firmware()
693 (is_support_sw_smu(adev) && (adev->flags & AMD_IS_APU))) in amdgpu_pm_load_smu_firmware()
696 mutex_lock(&adev->pm.mutex); in amdgpu_pm_load_smu_firmware()
697 r = pp_funcs->load_firmware(adev->powerplay.pp_handle); in amdgpu_pm_load_smu_firmware()
704 *smu_version = adev->pm.fw_version; in amdgpu_pm_load_smu_firmware()
707 mutex_unlock(&adev->pm.mutex); in amdgpu_pm_load_smu_firmware()
711 int amdgpu_dpm_handle_passthrough_sbr(struct amdgpu_device *adev, bool enable) in amdgpu_dpm_handle_passthrough_sbr() argument
715 if (is_support_sw_smu(adev)) { in amdgpu_dpm_handle_passthrough_sbr()
716 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_handle_passthrough_sbr()
717 ret = smu_handle_passthrough_sbr(adev->powerplay.pp_handle, in amdgpu_dpm_handle_passthrough_sbr()
719 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_handle_passthrough_sbr()
725 int amdgpu_dpm_send_hbm_bad_pages_num(struct amdgpu_device *adev, uint32_t size) in amdgpu_dpm_send_hbm_bad_pages_num() argument
727 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_send_hbm_bad_pages_num()
730 if (!is_support_sw_smu(adev)) in amdgpu_dpm_send_hbm_bad_pages_num()
733 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_send_hbm_bad_pages_num()
735 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_send_hbm_bad_pages_num()
740 int amdgpu_dpm_send_hbm_bad_channel_flag(struct amdgpu_device *adev, uint32_t size) in amdgpu_dpm_send_hbm_bad_channel_flag() argument
742 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_send_hbm_bad_channel_flag()
745 if (!is_support_sw_smu(adev)) in amdgpu_dpm_send_hbm_bad_channel_flag()
748 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_send_hbm_bad_channel_flag()
750 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_send_hbm_bad_channel_flag()
755 int amdgpu_dpm_send_rma_reason(struct amdgpu_device *adev) in amdgpu_dpm_send_rma_reason() argument
757 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_send_rma_reason()
760 if (!is_support_sw_smu(adev)) in amdgpu_dpm_send_rma_reason()
763 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_send_rma_reason()
765 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_send_rma_reason()
767 if (adev->cper.enabled) in amdgpu_dpm_send_rma_reason()
768 if (amdgpu_cper_generate_bp_threshold_record(adev)) in amdgpu_dpm_send_rma_reason()
769 dev_warn(adev->dev, "fail to generate bad page threshold cper records\n"); in amdgpu_dpm_send_rma_reason()
782 bool amdgpu_dpm_reset_sdma_is_supported(struct amdgpu_device *adev) in amdgpu_dpm_reset_sdma_is_supported() argument
784 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_reset_sdma_is_supported()
787 if (!is_support_sw_smu(adev)) in amdgpu_dpm_reset_sdma_is_supported()
790 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_reset_sdma_is_supported()
792 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_reset_sdma_is_supported()
797 int amdgpu_dpm_reset_sdma(struct amdgpu_device *adev, uint32_t inst_mask) in amdgpu_dpm_reset_sdma() argument
799 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_reset_sdma()
802 if (!is_support_sw_smu(adev)) in amdgpu_dpm_reset_sdma()
805 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_reset_sdma()
807 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_reset_sdma()
812 int amdgpu_dpm_reset_vcn(struct amdgpu_device *adev, uint32_t inst_mask) in amdgpu_dpm_reset_vcn() argument
814 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_reset_vcn()
817 if (!is_support_sw_smu(adev)) in amdgpu_dpm_reset_vcn()
820 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_reset_vcn()
822 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_reset_vcn()
827 int amdgpu_dpm_get_dpm_freq_range(struct amdgpu_device *adev, in amdgpu_dpm_get_dpm_freq_range() argument
837 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_dpm_freq_range()
840 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_dpm_freq_range()
841 ret = smu_get_dpm_freq_range(adev->powerplay.pp_handle, in amdgpu_dpm_get_dpm_freq_range()
845 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_dpm_freq_range()
850 int amdgpu_dpm_set_soft_freq_range(struct amdgpu_device *adev, in amdgpu_dpm_set_soft_freq_range() argument
855 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_set_soft_freq_range()
857 if (!is_support_sw_smu(adev)) in amdgpu_dpm_set_soft_freq_range()
860 guard(mutex)(&adev->pm.mutex); in amdgpu_dpm_set_soft_freq_range()
868 int amdgpu_dpm_write_watermarks_table(struct amdgpu_device *adev) in amdgpu_dpm_write_watermarks_table() argument
870 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_write_watermarks_table()
873 if (!is_support_sw_smu(adev)) in amdgpu_dpm_write_watermarks_table()
876 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_write_watermarks_table()
878 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_write_watermarks_table()
883 int amdgpu_dpm_wait_for_event(struct amdgpu_device *adev, in amdgpu_dpm_wait_for_event() argument
887 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_wait_for_event()
890 if (!is_support_sw_smu(adev)) in amdgpu_dpm_wait_for_event()
893 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_wait_for_event()
895 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_wait_for_event()
900 int amdgpu_dpm_set_residency_gfxoff(struct amdgpu_device *adev, bool value) in amdgpu_dpm_set_residency_gfxoff() argument
902 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_set_residency_gfxoff()
905 if (!is_support_sw_smu(adev)) in amdgpu_dpm_set_residency_gfxoff()
908 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_residency_gfxoff()
910 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_residency_gfxoff()
915 int amdgpu_dpm_get_residency_gfxoff(struct amdgpu_device *adev, u32 *value) in amdgpu_dpm_get_residency_gfxoff() argument
917 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_residency_gfxoff()
920 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_residency_gfxoff()
923 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_residency_gfxoff()
925 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_residency_gfxoff()
930 int amdgpu_dpm_get_entrycount_gfxoff(struct amdgpu_device *adev, u64 *value) in amdgpu_dpm_get_entrycount_gfxoff() argument
932 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_entrycount_gfxoff()
935 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_entrycount_gfxoff()
938 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_entrycount_gfxoff()
940 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_entrycount_gfxoff()
945 int amdgpu_dpm_get_status_gfxoff(struct amdgpu_device *adev, uint32_t *value) in amdgpu_dpm_get_status_gfxoff() argument
947 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_status_gfxoff()
950 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_status_gfxoff()
953 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_status_gfxoff()
955 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_status_gfxoff()
960 uint64_t amdgpu_dpm_get_thermal_throttling_counter(struct amdgpu_device *adev) in amdgpu_dpm_get_thermal_throttling_counter() argument
962 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_thermal_throttling_counter()
964 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_thermal_throttling_counter()
975 void amdgpu_dpm_gfx_state_change(struct amdgpu_device *adev, in amdgpu_dpm_gfx_state_change() argument
978 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_gfx_state_change()
979 if (adev->powerplay.pp_funcs && in amdgpu_dpm_gfx_state_change()
980 adev->powerplay.pp_funcs->gfx_state_change_set) in amdgpu_dpm_gfx_state_change()
981 ((adev)->powerplay.pp_funcs->gfx_state_change_set( in amdgpu_dpm_gfx_state_change()
982 (adev)->powerplay.pp_handle, state)); in amdgpu_dpm_gfx_state_change()
983 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_gfx_state_change()
986 int amdgpu_dpm_get_ecc_info(struct amdgpu_device *adev, in amdgpu_dpm_get_ecc_info() argument
989 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_ecc_info()
992 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_ecc_info()
995 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_ecc_info()
997 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_ecc_info()
1002 struct amd_vce_state *amdgpu_dpm_get_vce_clock_state(struct amdgpu_device *adev, in amdgpu_dpm_get_vce_clock_state() argument
1005 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_vce_clock_state()
1011 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_vce_clock_state()
1012 vstate = pp_funcs->get_vce_clock_state(adev->powerplay.pp_handle, in amdgpu_dpm_get_vce_clock_state()
1014 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_vce_clock_state()
1019 void amdgpu_dpm_get_current_power_state(struct amdgpu_device *adev, in amdgpu_dpm_get_current_power_state() argument
1022 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_current_power_state()
1024 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_current_power_state()
1027 *state = adev->pm.dpm.user_state; in amdgpu_dpm_get_current_power_state()
1031 *state = pp_funcs->get_current_power_state(adev->powerplay.pp_handle); in amdgpu_dpm_get_current_power_state()
1034 *state = adev->pm.dpm.user_state; in amdgpu_dpm_get_current_power_state()
1037 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_current_power_state()
1040 void amdgpu_dpm_set_power_state(struct amdgpu_device *adev, in amdgpu_dpm_set_power_state() argument
1043 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_power_state()
1044 adev->pm.dpm.user_state = state; in amdgpu_dpm_set_power_state()
1045 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_power_state()
1047 if (is_support_sw_smu(adev)) in amdgpu_dpm_set_power_state()
1050 if (amdgpu_dpm_dispatch_task(adev, in amdgpu_dpm_set_power_state()
1053 amdgpu_dpm_compute_clocks(adev); in amdgpu_dpm_set_power_state()
1056 enum amd_dpm_forced_level amdgpu_dpm_get_performance_level(struct amdgpu_device *adev) in amdgpu_dpm_get_performance_level() argument
1058 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_performance_level()
1064 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_performance_level()
1066 level = pp_funcs->get_performance_level(adev->powerplay.pp_handle); in amdgpu_dpm_get_performance_level()
1068 level = adev->pm.dpm.forced_level; in amdgpu_dpm_get_performance_level()
1069 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_performance_level()
1074 static void amdgpu_dpm_enter_umd_state(struct amdgpu_device *adev) in amdgpu_dpm_enter_umd_state() argument
1077 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_GFX, in amdgpu_dpm_enter_umd_state()
1079 amdgpu_device_ip_set_clockgating_state(adev, AMD_IP_BLOCK_TYPE_GFX, in amdgpu_dpm_enter_umd_state()
1083 static void amdgpu_dpm_exit_umd_state(struct amdgpu_device *adev) in amdgpu_dpm_exit_umd_state() argument
1086 amdgpu_device_ip_set_clockgating_state(adev, AMD_IP_BLOCK_TYPE_GFX, in amdgpu_dpm_exit_umd_state()
1088 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_GFX, in amdgpu_dpm_exit_umd_state()
1092 int amdgpu_dpm_force_performance_level(struct amdgpu_device *adev, in amdgpu_dpm_force_performance_level() argument
1095 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_force_performance_level()
1105 if (adev->pm.dpm.thermal_active) in amdgpu_dpm_force_performance_level()
1108 current_level = amdgpu_dpm_get_performance_level(adev); in amdgpu_dpm_force_performance_level()
1116 if (adev->asic_type == CHIP_RAVEN) { in amdgpu_dpm_force_performance_level()
1117 if (!(adev->apu_flags & AMD_APU_IS_RAVEN2)) { in amdgpu_dpm_force_performance_level()
1120 amdgpu_gfx_off_ctrl(adev, false); in amdgpu_dpm_force_performance_level()
1123 amdgpu_gfx_off_ctrl(adev, true); in amdgpu_dpm_force_performance_level()
1128 amdgpu_dpm_enter_umd_state(adev); in amdgpu_dpm_force_performance_level()
1131 amdgpu_dpm_exit_umd_state(adev); in amdgpu_dpm_force_performance_level()
1133 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_force_performance_level()
1135 if (pp_funcs->force_performance_level(adev->powerplay.pp_handle, in amdgpu_dpm_force_performance_level()
1137 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_force_performance_level()
1141 amdgpu_dpm_exit_umd_state(adev); in amdgpu_dpm_force_performance_level()
1144 amdgpu_dpm_enter_umd_state(adev); in amdgpu_dpm_force_performance_level()
1149 adev->pm.dpm.forced_level = level; in amdgpu_dpm_force_performance_level()
1151 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_force_performance_level()
1156 int amdgpu_dpm_get_pp_num_states(struct amdgpu_device *adev, in amdgpu_dpm_get_pp_num_states() argument
1159 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_pp_num_states()
1165 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_pp_num_states()
1166 ret = pp_funcs->get_pp_num_states(adev->powerplay.pp_handle, in amdgpu_dpm_get_pp_num_states()
1168 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_pp_num_states()
1173 int amdgpu_dpm_dispatch_task(struct amdgpu_device *adev, in amdgpu_dpm_dispatch_task() argument
1177 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_dispatch_task()
1183 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_dispatch_task()
1184 ret = pp_funcs->dispatch_tasks(adev->powerplay.pp_handle, in amdgpu_dpm_dispatch_task()
1187 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_dispatch_task()
1192 int amdgpu_dpm_get_pp_table(struct amdgpu_device *adev, char **table) in amdgpu_dpm_get_pp_table() argument
1194 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_pp_table()
1200 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_pp_table()
1201 ret = pp_funcs->get_pp_table(adev->powerplay.pp_handle, in amdgpu_dpm_get_pp_table()
1203 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_pp_table()
1208 int amdgpu_dpm_set_fine_grain_clk_vol(struct amdgpu_device *adev, in amdgpu_dpm_set_fine_grain_clk_vol() argument
1213 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_fine_grain_clk_vol()
1219 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_fine_grain_clk_vol()
1220 ret = pp_funcs->set_fine_grain_clk_vol(adev->powerplay.pp_handle, in amdgpu_dpm_set_fine_grain_clk_vol()
1224 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_fine_grain_clk_vol()
1229 int amdgpu_dpm_odn_edit_dpm_table(struct amdgpu_device *adev, in amdgpu_dpm_odn_edit_dpm_table() argument
1234 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_odn_edit_dpm_table()
1240 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_odn_edit_dpm_table()
1241 ret = pp_funcs->odn_edit_dpm_table(adev->powerplay.pp_handle, in amdgpu_dpm_odn_edit_dpm_table()
1245 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_odn_edit_dpm_table()
1250 int amdgpu_dpm_print_clock_levels(struct amdgpu_device *adev, in amdgpu_dpm_print_clock_levels() argument
1254 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_print_clock_levels()
1260 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_print_clock_levels()
1261 ret = pp_funcs->print_clock_levels(adev->powerplay.pp_handle, in amdgpu_dpm_print_clock_levels()
1264 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_print_clock_levels()
1269 int amdgpu_dpm_emit_clock_levels(struct amdgpu_device *adev, in amdgpu_dpm_emit_clock_levels() argument
1274 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_emit_clock_levels()
1280 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_emit_clock_levels()
1281 ret = pp_funcs->emit_clock_levels(adev->powerplay.pp_handle, in amdgpu_dpm_emit_clock_levels()
1285 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_emit_clock_levels()
1290 int amdgpu_dpm_set_ppfeature_status(struct amdgpu_device *adev, in amdgpu_dpm_set_ppfeature_status() argument
1293 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_ppfeature_status()
1299 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_ppfeature_status()
1300 ret = pp_funcs->set_ppfeature_status(adev->powerplay.pp_handle, in amdgpu_dpm_set_ppfeature_status()
1302 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_ppfeature_status()
1307 int amdgpu_dpm_get_ppfeature_status(struct amdgpu_device *adev, char *buf) in amdgpu_dpm_get_ppfeature_status() argument
1309 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_ppfeature_status()
1315 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_ppfeature_status()
1316 ret = pp_funcs->get_ppfeature_status(adev->powerplay.pp_handle, in amdgpu_dpm_get_ppfeature_status()
1318 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_ppfeature_status()
1323 int amdgpu_dpm_force_clock_level(struct amdgpu_device *adev, in amdgpu_dpm_force_clock_level() argument
1327 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_force_clock_level()
1333 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_force_clock_level()
1334 ret = pp_funcs->force_clock_level(adev->powerplay.pp_handle, in amdgpu_dpm_force_clock_level()
1337 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_force_clock_level()
1342 int amdgpu_dpm_get_sclk_od(struct amdgpu_device *adev) in amdgpu_dpm_get_sclk_od() argument
1344 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk_od()
1350 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk_od()
1351 ret = pp_funcs->get_sclk_od(adev->powerplay.pp_handle); in amdgpu_dpm_get_sclk_od()
1352 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk_od()
1357 int amdgpu_dpm_set_sclk_od(struct amdgpu_device *adev, uint32_t value) in amdgpu_dpm_set_sclk_od() argument
1359 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_sclk_od()
1361 if (is_support_sw_smu(adev)) in amdgpu_dpm_set_sclk_od()
1364 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_sclk_od()
1366 pp_funcs->set_sclk_od(adev->powerplay.pp_handle, value); in amdgpu_dpm_set_sclk_od()
1367 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_sclk_od()
1369 if (amdgpu_dpm_dispatch_task(adev, in amdgpu_dpm_set_sclk_od()
1372 adev->pm.dpm.current_ps = adev->pm.dpm.boot_ps; in amdgpu_dpm_set_sclk_od()
1373 amdgpu_dpm_compute_clocks(adev); in amdgpu_dpm_set_sclk_od()
1379 int amdgpu_dpm_get_mclk_od(struct amdgpu_device *adev) in amdgpu_dpm_get_mclk_od() argument
1381 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk_od()
1387 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_mclk_od()
1388 ret = pp_funcs->get_mclk_od(adev->powerplay.pp_handle); in amdgpu_dpm_get_mclk_od()
1389 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_mclk_od()
1394 int amdgpu_dpm_set_mclk_od(struct amdgpu_device *adev, uint32_t value) in amdgpu_dpm_set_mclk_od() argument
1396 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_mclk_od()
1398 if (is_support_sw_smu(adev)) in amdgpu_dpm_set_mclk_od()
1401 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_mclk_od()
1403 pp_funcs->set_mclk_od(adev->powerplay.pp_handle, value); in amdgpu_dpm_set_mclk_od()
1404 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_mclk_od()
1406 if (amdgpu_dpm_dispatch_task(adev, in amdgpu_dpm_set_mclk_od()
1409 adev->pm.dpm.current_ps = adev->pm.dpm.boot_ps; in amdgpu_dpm_set_mclk_od()
1410 amdgpu_dpm_compute_clocks(adev); in amdgpu_dpm_set_mclk_od()
1416 int amdgpu_dpm_get_power_profile_mode(struct amdgpu_device *adev, in amdgpu_dpm_get_power_profile_mode() argument
1419 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_power_profile_mode()
1425 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_power_profile_mode()
1426 ret = pp_funcs->get_power_profile_mode(adev->powerplay.pp_handle, in amdgpu_dpm_get_power_profile_mode()
1428 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_power_profile_mode()
1433 int amdgpu_dpm_set_power_profile_mode(struct amdgpu_device *adev, in amdgpu_dpm_set_power_profile_mode() argument
1436 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_power_profile_mode()
1442 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_power_profile_mode()
1443 ret = pp_funcs->set_power_profile_mode(adev->powerplay.pp_handle, in amdgpu_dpm_set_power_profile_mode()
1446 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_power_profile_mode()
1451 int amdgpu_dpm_get_gpu_metrics(struct amdgpu_device *adev, void **table) in amdgpu_dpm_get_gpu_metrics() argument
1453 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_gpu_metrics()
1459 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_gpu_metrics()
1460 ret = pp_funcs->get_gpu_metrics(adev->powerplay.pp_handle, in amdgpu_dpm_get_gpu_metrics()
1462 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_gpu_metrics()
1467 ssize_t amdgpu_dpm_get_pm_metrics(struct amdgpu_device *adev, void *pm_metrics, in amdgpu_dpm_get_pm_metrics() argument
1470 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_pm_metrics()
1476 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_pm_metrics()
1477 ret = pp_funcs->get_pm_metrics(adev->powerplay.pp_handle, pm_metrics, in amdgpu_dpm_get_pm_metrics()
1479 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_pm_metrics()
1484 int amdgpu_dpm_get_fan_control_mode(struct amdgpu_device *adev, in amdgpu_dpm_get_fan_control_mode() argument
1487 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_fan_control_mode()
1493 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_fan_control_mode()
1494 ret = pp_funcs->get_fan_control_mode(adev->powerplay.pp_handle, in amdgpu_dpm_get_fan_control_mode()
1496 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_fan_control_mode()
1501 int amdgpu_dpm_set_fan_speed_pwm(struct amdgpu_device *adev, in amdgpu_dpm_set_fan_speed_pwm() argument
1504 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_fan_speed_pwm()
1510 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_fan_speed_pwm()
1511 ret = pp_funcs->set_fan_speed_pwm(adev->powerplay.pp_handle, in amdgpu_dpm_set_fan_speed_pwm()
1513 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_fan_speed_pwm()
1518 int amdgpu_dpm_get_fan_speed_pwm(struct amdgpu_device *adev, in amdgpu_dpm_get_fan_speed_pwm() argument
1521 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_fan_speed_pwm()
1527 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_fan_speed_pwm()
1528 ret = pp_funcs->get_fan_speed_pwm(adev->powerplay.pp_handle, in amdgpu_dpm_get_fan_speed_pwm()
1530 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_fan_speed_pwm()
1535 int amdgpu_dpm_get_fan_speed_rpm(struct amdgpu_device *adev, in amdgpu_dpm_get_fan_speed_rpm() argument
1538 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_fan_speed_rpm()
1544 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_fan_speed_rpm()
1545 ret = pp_funcs->get_fan_speed_rpm(adev->powerplay.pp_handle, in amdgpu_dpm_get_fan_speed_rpm()
1547 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_fan_speed_rpm()
1552 int amdgpu_dpm_set_fan_speed_rpm(struct amdgpu_device *adev, in amdgpu_dpm_set_fan_speed_rpm() argument
1555 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_fan_speed_rpm()
1561 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_fan_speed_rpm()
1562 ret = pp_funcs->set_fan_speed_rpm(adev->powerplay.pp_handle, in amdgpu_dpm_set_fan_speed_rpm()
1564 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_fan_speed_rpm()
1569 int amdgpu_dpm_set_fan_control_mode(struct amdgpu_device *adev, in amdgpu_dpm_set_fan_control_mode() argument
1572 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_fan_control_mode()
1578 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_fan_control_mode()
1579 ret = pp_funcs->set_fan_control_mode(adev->powerplay.pp_handle, in amdgpu_dpm_set_fan_control_mode()
1581 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_fan_control_mode()
1586 int amdgpu_dpm_get_power_limit(struct amdgpu_device *adev, in amdgpu_dpm_get_power_limit() argument
1591 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_power_limit()
1597 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_power_limit()
1598 ret = pp_funcs->get_power_limit(adev->powerplay.pp_handle, in amdgpu_dpm_get_power_limit()
1602 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_power_limit()
1607 int amdgpu_dpm_set_power_limit(struct amdgpu_device *adev, in amdgpu_dpm_set_power_limit() argument
1610 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_power_limit()
1616 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_power_limit()
1617 ret = pp_funcs->set_power_limit(adev->powerplay.pp_handle, in amdgpu_dpm_set_power_limit()
1619 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_power_limit()
1624 int amdgpu_dpm_is_cclk_dpm_supported(struct amdgpu_device *adev) in amdgpu_dpm_is_cclk_dpm_supported() argument
1628 if (!is_support_sw_smu(adev)) in amdgpu_dpm_is_cclk_dpm_supported()
1631 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_is_cclk_dpm_supported()
1632 cclk_dpm_supported = is_support_cclk_dpm(adev); in amdgpu_dpm_is_cclk_dpm_supported()
1633 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_is_cclk_dpm_supported()
1638 int amdgpu_dpm_debugfs_print_current_performance_level(struct amdgpu_device *adev, in amdgpu_dpm_debugfs_print_current_performance_level() argument
1641 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_debugfs_print_current_performance_level()
1646 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_debugfs_print_current_performance_level()
1647 pp_funcs->debugfs_print_current_performance_level(adev->powerplay.pp_handle, in amdgpu_dpm_debugfs_print_current_performance_level()
1649 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_debugfs_print_current_performance_level()
1654 int amdgpu_dpm_get_smu_prv_buf_details(struct amdgpu_device *adev, in amdgpu_dpm_get_smu_prv_buf_details() argument
1658 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_smu_prv_buf_details()
1664 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_smu_prv_buf_details()
1665 ret = pp_funcs->get_smu_prv_buf_details(adev->powerplay.pp_handle, in amdgpu_dpm_get_smu_prv_buf_details()
1668 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_smu_prv_buf_details()
1673 int amdgpu_dpm_is_overdrive_supported(struct amdgpu_device *adev) in amdgpu_dpm_is_overdrive_supported() argument
1675 if (is_support_sw_smu(adev)) { in amdgpu_dpm_is_overdrive_supported()
1676 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_is_overdrive_supported()
1686 if (amdgpu_dpm_is_legacy_dpm(adev)) in amdgpu_dpm_is_overdrive_supported()
1689 hwmgr = (struct pp_hwmgr *)adev->powerplay.pp_handle; in amdgpu_dpm_is_overdrive_supported()
1695 int amdgpu_dpm_is_overdrive_enabled(struct amdgpu_device *adev) in amdgpu_dpm_is_overdrive_enabled() argument
1697 if (is_support_sw_smu(adev)) { in amdgpu_dpm_is_overdrive_enabled()
1698 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_is_overdrive_enabled()
1708 if (amdgpu_dpm_is_legacy_dpm(adev)) in amdgpu_dpm_is_overdrive_enabled()
1711 hwmgr = (struct pp_hwmgr *)adev->powerplay.pp_handle; in amdgpu_dpm_is_overdrive_enabled()
1717 int amdgpu_dpm_set_pp_table(struct amdgpu_device *adev, in amdgpu_dpm_set_pp_table() argument
1721 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_pp_table()
1727 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_pp_table()
1728 ret = pp_funcs->set_pp_table(adev->powerplay.pp_handle, in amdgpu_dpm_set_pp_table()
1731 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_pp_table()
1736 int amdgpu_dpm_get_num_cpu_cores(struct amdgpu_device *adev) in amdgpu_dpm_get_num_cpu_cores() argument
1738 struct smu_context *smu = adev->powerplay.pp_handle; in amdgpu_dpm_get_num_cpu_cores()
1740 if (!is_support_sw_smu(adev)) in amdgpu_dpm_get_num_cpu_cores()
1746 void amdgpu_dpm_stb_debug_fs_init(struct amdgpu_device *adev) in amdgpu_dpm_stb_debug_fs_init() argument
1748 if (!is_support_sw_smu(adev)) in amdgpu_dpm_stb_debug_fs_init()
1751 amdgpu_smu_stb_debug_fs_init(adev); in amdgpu_dpm_stb_debug_fs_init()
1754 int amdgpu_dpm_display_configuration_change(struct amdgpu_device *adev, in amdgpu_dpm_display_configuration_change() argument
1757 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_display_configuration_change()
1763 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_display_configuration_change()
1764 ret = pp_funcs->display_configuration_change(adev->powerplay.pp_handle, in amdgpu_dpm_display_configuration_change()
1766 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_display_configuration_change()
1771 int amdgpu_dpm_get_clock_by_type(struct amdgpu_device *adev, in amdgpu_dpm_get_clock_by_type() argument
1775 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_clock_by_type()
1781 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type()
1782 ret = pp_funcs->get_clock_by_type(adev->powerplay.pp_handle, in amdgpu_dpm_get_clock_by_type()
1785 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type()
1790 int amdgpu_dpm_get_display_mode_validation_clks(struct amdgpu_device *adev, in amdgpu_dpm_get_display_mode_validation_clks() argument
1793 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_display_mode_validation_clks()
1799 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_display_mode_validation_clks()
1800 ret = pp_funcs->get_display_mode_validation_clocks(adev->powerplay.pp_handle, in amdgpu_dpm_get_display_mode_validation_clks()
1802 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_display_mode_validation_clks()
1807 int amdgpu_dpm_get_clock_by_type_with_latency(struct amdgpu_device *adev, in amdgpu_dpm_get_clock_by_type_with_latency() argument
1811 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_clock_by_type_with_latency()
1817 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type_with_latency()
1818 ret = pp_funcs->get_clock_by_type_with_latency(adev->powerplay.pp_handle, in amdgpu_dpm_get_clock_by_type_with_latency()
1821 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type_with_latency()
1826 int amdgpu_dpm_get_clock_by_type_with_voltage(struct amdgpu_device *adev, in amdgpu_dpm_get_clock_by_type_with_voltage() argument
1830 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_clock_by_type_with_voltage()
1836 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type_with_voltage()
1837 ret = pp_funcs->get_clock_by_type_with_voltage(adev->powerplay.pp_handle, in amdgpu_dpm_get_clock_by_type_with_voltage()
1840 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_clock_by_type_with_voltage()
1845 int amdgpu_dpm_set_watermarks_for_clocks_ranges(struct amdgpu_device *adev, in amdgpu_dpm_set_watermarks_for_clocks_ranges() argument
1848 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_watermarks_for_clocks_ranges()
1854 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_watermarks_for_clocks_ranges()
1855 ret = pp_funcs->set_watermarks_for_clocks_ranges(adev->powerplay.pp_handle, in amdgpu_dpm_set_watermarks_for_clocks_ranges()
1857 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_watermarks_for_clocks_ranges()
1862 int amdgpu_dpm_display_clock_voltage_request(struct amdgpu_device *adev, in amdgpu_dpm_display_clock_voltage_request() argument
1865 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_display_clock_voltage_request()
1871 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_display_clock_voltage_request()
1872 ret = pp_funcs->display_clock_voltage_request(adev->powerplay.pp_handle, in amdgpu_dpm_display_clock_voltage_request()
1874 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_display_clock_voltage_request()
1879 int amdgpu_dpm_get_current_clocks(struct amdgpu_device *adev, in amdgpu_dpm_get_current_clocks() argument
1882 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_current_clocks()
1888 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_current_clocks()
1889 ret = pp_funcs->get_current_clocks(adev->powerplay.pp_handle, in amdgpu_dpm_get_current_clocks()
1891 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_current_clocks()
1896 void amdgpu_dpm_notify_smu_enable_pwe(struct amdgpu_device *adev) in amdgpu_dpm_notify_smu_enable_pwe() argument
1898 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_notify_smu_enable_pwe()
1903 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_notify_smu_enable_pwe()
1904 pp_funcs->notify_smu_enable_pwe(adev->powerplay.pp_handle); in amdgpu_dpm_notify_smu_enable_pwe()
1905 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_notify_smu_enable_pwe()
1908 int amdgpu_dpm_set_active_display_count(struct amdgpu_device *adev, in amdgpu_dpm_set_active_display_count() argument
1911 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_active_display_count()
1917 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_active_display_count()
1918 ret = pp_funcs->set_active_display_count(adev->powerplay.pp_handle, in amdgpu_dpm_set_active_display_count()
1920 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_active_display_count()
1925 int amdgpu_dpm_set_min_deep_sleep_dcefclk(struct amdgpu_device *adev, in amdgpu_dpm_set_min_deep_sleep_dcefclk() argument
1928 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_min_deep_sleep_dcefclk()
1934 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_min_deep_sleep_dcefclk()
1935 ret = pp_funcs->set_min_deep_sleep_dcefclk(adev->powerplay.pp_handle, in amdgpu_dpm_set_min_deep_sleep_dcefclk()
1937 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_min_deep_sleep_dcefclk()
1942 void amdgpu_dpm_set_hard_min_dcefclk_by_freq(struct amdgpu_device *adev, in amdgpu_dpm_set_hard_min_dcefclk_by_freq() argument
1945 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_hard_min_dcefclk_by_freq()
1950 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_hard_min_dcefclk_by_freq()
1951 pp_funcs->set_hard_min_dcefclk_by_freq(adev->powerplay.pp_handle, in amdgpu_dpm_set_hard_min_dcefclk_by_freq()
1953 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_hard_min_dcefclk_by_freq()
1956 void amdgpu_dpm_set_hard_min_fclk_by_freq(struct amdgpu_device *adev, in amdgpu_dpm_set_hard_min_fclk_by_freq() argument
1959 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_set_hard_min_fclk_by_freq()
1964 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_set_hard_min_fclk_by_freq()
1965 pp_funcs->set_hard_min_fclk_by_freq(adev->powerplay.pp_handle, in amdgpu_dpm_set_hard_min_fclk_by_freq()
1967 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_set_hard_min_fclk_by_freq()
1970 int amdgpu_dpm_display_disable_memory_clock_switch(struct amdgpu_device *adev, in amdgpu_dpm_display_disable_memory_clock_switch() argument
1973 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_display_disable_memory_clock_switch()
1979 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_display_disable_memory_clock_switch()
1980 ret = pp_funcs->display_disable_memory_clock_switch(adev->powerplay.pp_handle, in amdgpu_dpm_display_disable_memory_clock_switch()
1982 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_display_disable_memory_clock_switch()
1987 int amdgpu_dpm_get_max_sustainable_clocks_by_dc(struct amdgpu_device *adev, in amdgpu_dpm_get_max_sustainable_clocks_by_dc() argument
1990 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_max_sustainable_clocks_by_dc()
1996 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_max_sustainable_clocks_by_dc()
1997 ret = pp_funcs->get_max_sustainable_clocks_by_dc(adev->powerplay.pp_handle, in amdgpu_dpm_get_max_sustainable_clocks_by_dc()
1999 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_max_sustainable_clocks_by_dc()
2004 enum pp_smu_status amdgpu_dpm_get_uclk_dpm_states(struct amdgpu_device *adev, in amdgpu_dpm_get_uclk_dpm_states() argument
2008 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_uclk_dpm_states()
2014 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_uclk_dpm_states()
2015 ret = pp_funcs->get_uclk_dpm_states(adev->powerplay.pp_handle, in amdgpu_dpm_get_uclk_dpm_states()
2018 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_uclk_dpm_states()
2023 int amdgpu_dpm_get_dpm_clock_table(struct amdgpu_device *adev, in amdgpu_dpm_get_dpm_clock_table() argument
2026 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_dpm_clock_table()
2032 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_dpm_clock_table()
2033 ret = pp_funcs->get_dpm_clock_table(adev->powerplay.pp_handle, in amdgpu_dpm_get_dpm_clock_table()
2035 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_dpm_clock_table()
2055 ssize_t amdgpu_dpm_get_xcp_metrics(struct amdgpu_device *adev, int xcp_id, in amdgpu_dpm_get_xcp_metrics() argument
2058 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_xcp_metrics()
2064 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_xcp_metrics()
2065 ret = pp_funcs->get_xcp_metrics(adev->powerplay.pp_handle, xcp_id, in amdgpu_dpm_get_xcp_metrics()
2067 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_xcp_metrics()