Lines Matching refs:gfx

956 	adev->gfx.kiq.pmf = &gfx_v9_0_kiq_pm4_funcs;  in gfx_v9_0_set_kiq_pm4_funcs()
1025 adev->gfx.scratch.num_reg = 8; in gfx_v9_0_scratch_init()
1026 adev->gfx.scratch.reg_base = SOC15_REG_OFFSET(GC, 0, mmSCRATCH_REG0); in gfx_v9_0_scratch_init()
1027 adev->gfx.scratch.free_mask = (1u << adev->gfx.scratch.num_reg) - 1; in gfx_v9_0_scratch_init()
1160 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_free_microcode()
1161 adev->gfx.pfp_fw = NULL; in gfx_v9_0_free_microcode()
1162 release_firmware(adev->gfx.me_fw); in gfx_v9_0_free_microcode()
1163 adev->gfx.me_fw = NULL; in gfx_v9_0_free_microcode()
1164 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_free_microcode()
1165 adev->gfx.ce_fw = NULL; in gfx_v9_0_free_microcode()
1166 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_free_microcode()
1167 adev->gfx.rlc_fw = NULL; in gfx_v9_0_free_microcode()
1168 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_free_microcode()
1169 adev->gfx.mec_fw = NULL; in gfx_v9_0_free_microcode()
1170 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_free_microcode()
1171 adev->gfx.mec2_fw = NULL; in gfx_v9_0_free_microcode()
1173 kfree(adev->gfx.rlc.register_list_format); in gfx_v9_0_free_microcode()
1180 rlc_hdr = (const struct rlc_firmware_header_v2_1 *)adev->gfx.rlc_fw->data; in gfx_v9_0_init_rlc_ext_microcode()
1181 adev->gfx.rlc_srlc_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
1182 adev->gfx.rlc_srlc_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
1183 …adev->gfx.rlc.save_restore_list_cntl_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_cntl_size… in gfx_v9_0_init_rlc_ext_microcode()
1184 …adev->gfx.rlc.save_restore_list_cntl = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_cntl… in gfx_v9_0_init_rlc_ext_microcode()
1185 adev->gfx.rlc_srlg_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
1186 adev->gfx.rlc_srlg_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
1187 …adev->gfx.rlc.save_restore_list_gpm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_gpm_size_b… in gfx_v9_0_init_rlc_ext_microcode()
1188 …adev->gfx.rlc.save_restore_list_gpm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_gpm_o… in gfx_v9_0_init_rlc_ext_microcode()
1189 adev->gfx.rlc_srls_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
1190 adev->gfx.rlc_srls_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
1191 …adev->gfx.rlc.save_restore_list_srm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_srm_size_b… in gfx_v9_0_init_rlc_ext_microcode()
1192 …adev->gfx.rlc.save_restore_list_srm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_srm_o… in gfx_v9_0_init_rlc_ext_microcode()
1193 adev->gfx.rlc.reg_list_format_direct_reg_list_length = in gfx_v9_0_init_rlc_ext_microcode()
1199 adev->gfx.me_fw_write_wait = false; in gfx_v9_0_check_fw_write_wait()
1200 adev->gfx.mec_fw_write_wait = false; in gfx_v9_0_check_fw_write_wait()
1203 ((adev->gfx.mec_fw_version < 0x000001a5) || in gfx_v9_0_check_fw_write_wait()
1204 (adev->gfx.mec_feature_version < 46) || in gfx_v9_0_check_fw_write_wait()
1205 (adev->gfx.pfp_fw_version < 0x000000b7) || in gfx_v9_0_check_fw_write_wait()
1206 (adev->gfx.pfp_feature_version < 46))) in gfx_v9_0_check_fw_write_wait()
1211 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1212 (adev->gfx.me_feature_version >= 42) && in gfx_v9_0_check_fw_write_wait()
1213 (adev->gfx.pfp_fw_version >= 0x000000b1) && in gfx_v9_0_check_fw_write_wait()
1214 (adev->gfx.pfp_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1215 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1217 if ((adev->gfx.mec_fw_version >= 0x00000193) && in gfx_v9_0_check_fw_write_wait()
1218 (adev->gfx.mec_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1219 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1222 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1223 (adev->gfx.me_feature_version >= 44) && in gfx_v9_0_check_fw_write_wait()
1224 (adev->gfx.pfp_fw_version >= 0x000000b2) && in gfx_v9_0_check_fw_write_wait()
1225 (adev->gfx.pfp_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1226 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1228 if ((adev->gfx.mec_fw_version >= 0x00000196) && in gfx_v9_0_check_fw_write_wait()
1229 (adev->gfx.mec_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1230 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1233 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1234 (adev->gfx.me_feature_version >= 44) && in gfx_v9_0_check_fw_write_wait()
1235 (adev->gfx.pfp_fw_version >= 0x000000b2) && in gfx_v9_0_check_fw_write_wait()
1236 (adev->gfx.pfp_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1237 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1239 if ((adev->gfx.mec_fw_version >= 0x00000197) && in gfx_v9_0_check_fw_write_wait()
1240 (adev->gfx.mec_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1241 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1245 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1246 (adev->gfx.me_feature_version >= 42) && in gfx_v9_0_check_fw_write_wait()
1247 (adev->gfx.pfp_fw_version >= 0x000000b1) && in gfx_v9_0_check_fw_write_wait()
1248 (adev->gfx.pfp_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1249 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1251 if ((adev->gfx.mec_fw_version >= 0x00000192) && in gfx_v9_0_check_fw_write_wait()
1252 (adev->gfx.mec_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1253 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1256 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1257 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1308 (adev->gfx.me_fw_version >= 0x000000a5) && in check_if_enlarge_doorbell_range()
1309 (adev->gfx.me_feature_version >= 52)) in check_if_enlarge_doorbell_range()
1330 adev->gfx.rlc_fw_version < 531) || in gfx_v9_0_check_if_need_gfxoff()
1331 (adev->gfx.rlc_feature_version < 1) || in gfx_v9_0_check_if_need_gfxoff()
1332 !adev->gfx.rlc.is_rlc_v2_1)) in gfx_v9_0_check_if_need_gfxoff()
1361 err = request_firmware(&adev->gfx.pfp_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1364 err = amdgpu_ucode_validate(adev->gfx.pfp_fw); in gfx_v9_0_init_cp_gfx_microcode()
1367 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.pfp_fw->data; in gfx_v9_0_init_cp_gfx_microcode()
1368 adev->gfx.pfp_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_cp_gfx_microcode()
1369 adev->gfx.pfp_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_cp_gfx_microcode()
1372 err = request_firmware(&adev->gfx.me_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1375 err = amdgpu_ucode_validate(adev->gfx.me_fw); in gfx_v9_0_init_cp_gfx_microcode()
1378 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.me_fw->data; in gfx_v9_0_init_cp_gfx_microcode()
1379 adev->gfx.me_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_cp_gfx_microcode()
1380 adev->gfx.me_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_cp_gfx_microcode()
1383 err = request_firmware(&adev->gfx.ce_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1386 err = amdgpu_ucode_validate(adev->gfx.ce_fw); in gfx_v9_0_init_cp_gfx_microcode()
1389 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.ce_fw->data; in gfx_v9_0_init_cp_gfx_microcode()
1390 adev->gfx.ce_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_cp_gfx_microcode()
1391 adev->gfx.ce_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_cp_gfx_microcode()
1396 info->fw = adev->gfx.pfp_fw; in gfx_v9_0_init_cp_gfx_microcode()
1403 info->fw = adev->gfx.me_fw; in gfx_v9_0_init_cp_gfx_microcode()
1410 info->fw = adev->gfx.ce_fw; in gfx_v9_0_init_cp_gfx_microcode()
1421 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_init_cp_gfx_microcode()
1422 adev->gfx.pfp_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1423 release_firmware(adev->gfx.me_fw); in gfx_v9_0_init_cp_gfx_microcode()
1424 adev->gfx.me_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1425 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_init_cp_gfx_microcode()
1426 adev->gfx.ce_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1465 err = request_firmware(&adev->gfx.rlc_fw, fw_name, adev->dev); in gfx_v9_0_init_rlc_microcode()
1468 err = amdgpu_ucode_validate(adev->gfx.rlc_fw); in gfx_v9_0_init_rlc_microcode()
1469 rlc_hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_init_rlc_microcode()
1474 adev->gfx.rlc.is_rlc_v2_1 = true; in gfx_v9_0_init_rlc_microcode()
1476 adev->gfx.rlc_fw_version = le32_to_cpu(rlc_hdr->header.ucode_version); in gfx_v9_0_init_rlc_microcode()
1477 adev->gfx.rlc_feature_version = le32_to_cpu(rlc_hdr->ucode_feature_version); in gfx_v9_0_init_rlc_microcode()
1478 adev->gfx.rlc.save_and_restore_offset = in gfx_v9_0_init_rlc_microcode()
1480 adev->gfx.rlc.clear_state_descriptor_offset = in gfx_v9_0_init_rlc_microcode()
1482 adev->gfx.rlc.avail_scratch_ram_locations = in gfx_v9_0_init_rlc_microcode()
1484 adev->gfx.rlc.reg_restore_list_size = in gfx_v9_0_init_rlc_microcode()
1486 adev->gfx.rlc.reg_list_format_start = in gfx_v9_0_init_rlc_microcode()
1488 adev->gfx.rlc.reg_list_format_separate_start = in gfx_v9_0_init_rlc_microcode()
1490 adev->gfx.rlc.starting_offsets_start = in gfx_v9_0_init_rlc_microcode()
1492 adev->gfx.rlc.reg_list_format_size_bytes = in gfx_v9_0_init_rlc_microcode()
1494 adev->gfx.rlc.reg_list_size_bytes = in gfx_v9_0_init_rlc_microcode()
1496 adev->gfx.rlc.register_list_format = in gfx_v9_0_init_rlc_microcode()
1497 kmalloc(adev->gfx.rlc.reg_list_format_size_bytes + in gfx_v9_0_init_rlc_microcode()
1498 adev->gfx.rlc.reg_list_size_bytes, GFP_KERNEL); in gfx_v9_0_init_rlc_microcode()
1499 if (!adev->gfx.rlc.register_list_format) { in gfx_v9_0_init_rlc_microcode()
1506 for (i = 0 ; i < (adev->gfx.rlc.reg_list_format_size_bytes >> 2); i++) in gfx_v9_0_init_rlc_microcode()
1507 adev->gfx.rlc.register_list_format[i] = le32_to_cpu(tmp[i]); in gfx_v9_0_init_rlc_microcode()
1509 adev->gfx.rlc.register_restore = adev->gfx.rlc.register_list_format + i; in gfx_v9_0_init_rlc_microcode()
1513 for (i = 0 ; i < (adev->gfx.rlc.reg_list_size_bytes >> 2); i++) in gfx_v9_0_init_rlc_microcode()
1514 adev->gfx.rlc.register_restore[i] = le32_to_cpu(tmp[i]); in gfx_v9_0_init_rlc_microcode()
1516 if (adev->gfx.rlc.is_rlc_v2_1) in gfx_v9_0_init_rlc_microcode()
1522 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_rlc_microcode()
1527 if (adev->gfx.rlc.is_rlc_v2_1 && in gfx_v9_0_init_rlc_microcode()
1528 adev->gfx.rlc.save_restore_list_cntl_size_bytes && in gfx_v9_0_init_rlc_microcode()
1529 adev->gfx.rlc.save_restore_list_gpm_size_bytes && in gfx_v9_0_init_rlc_microcode()
1530 adev->gfx.rlc.save_restore_list_srm_size_bytes) { in gfx_v9_0_init_rlc_microcode()
1533 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_rlc_microcode()
1535 ALIGN(adev->gfx.rlc.save_restore_list_cntl_size_bytes, PAGE_SIZE); in gfx_v9_0_init_rlc_microcode()
1539 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_rlc_microcode()
1541 ALIGN(adev->gfx.rlc.save_restore_list_gpm_size_bytes, PAGE_SIZE); in gfx_v9_0_init_rlc_microcode()
1545 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_rlc_microcode()
1547 ALIGN(adev->gfx.rlc.save_restore_list_srm_size_bytes, PAGE_SIZE); in gfx_v9_0_init_rlc_microcode()
1556 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_init_rlc_microcode()
1557 adev->gfx.rlc_fw = NULL; in gfx_v9_0_init_rlc_microcode()
1582 err = request_firmware(&adev->gfx.mec_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_compute_microcode()
1585 err = amdgpu_ucode_validate(adev->gfx.mec_fw); in gfx_v9_0_init_cp_compute_microcode()
1588 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_init_cp_compute_microcode()
1589 adev->gfx.mec_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_cp_compute_microcode()
1590 adev->gfx.mec_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_cp_compute_microcode()
1595 err = request_firmware(&adev->gfx.mec2_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_compute_microcode()
1597 err = amdgpu_ucode_validate(adev->gfx.mec2_fw); in gfx_v9_0_init_cp_compute_microcode()
1601 adev->gfx.mec2_fw->data; in gfx_v9_0_init_cp_compute_microcode()
1602 adev->gfx.mec2_fw_version = in gfx_v9_0_init_cp_compute_microcode()
1604 adev->gfx.mec2_feature_version = in gfx_v9_0_init_cp_compute_microcode()
1608 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1611 adev->gfx.mec2_fw_version = adev->gfx.mec_fw_version; in gfx_v9_0_init_cp_compute_microcode()
1612 adev->gfx.mec2_feature_version = adev->gfx.mec_feature_version; in gfx_v9_0_init_cp_compute_microcode()
1618 info->fw = adev->gfx.mec_fw; in gfx_v9_0_init_cp_compute_microcode()
1626 info->fw = adev->gfx.mec_fw; in gfx_v9_0_init_cp_compute_microcode()
1630 if (adev->gfx.mec2_fw) { in gfx_v9_0_init_cp_compute_microcode()
1633 info->fw = adev->gfx.mec2_fw; in gfx_v9_0_init_cp_compute_microcode()
1644 info->fw = adev->gfx.mec2_fw; in gfx_v9_0_init_cp_compute_microcode()
1659 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_init_cp_compute_microcode()
1660 adev->gfx.mec_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1661 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_init_cp_compute_microcode()
1662 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1710 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_init_microcode()
1762 if (adev->gfx.rlc.cs_data == NULL) in gfx_v9_0_get_csb_buffer()
1774 for (sect = adev->gfx.rlc.cs_data; sect->section != NULL; ++sect) { in gfx_v9_0_get_csb_buffer()
1798 struct amdgpu_cu_info *cu_info = &adev->gfx.cu_info; in gfx_v9_0_init_always_on_cu_mask()
1812 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_init_always_on_cu_mask()
1813 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_init_always_on_cu_mask()
1819 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k ++) { in gfx_v9_0_init_always_on_cu_mask()
1956 adev->gfx.rlc.cs_data = gfx9_cs_data; in gfx_v9_0_rlc_init()
1958 cs_data = adev->gfx.rlc.cs_data; in gfx_v9_0_rlc_init()
1969 adev->gfx.rlc.cp_table_size = ALIGN(96 * 5 * 4, 2048) + (64 * 1024); /* JT + GDS */ in gfx_v9_0_rlc_init()
1988 if (adev->gfx.rlc.funcs->update_spm_vmid) in gfx_v9_0_rlc_init()
1989 adev->gfx.rlc.funcs->update_spm_vmid(adev, 0xf); in gfx_v9_0_rlc_init()
1996 amdgpu_bo_free_kernel(&adev->gfx.mec.hpd_eop_obj, NULL, NULL); in gfx_v9_0_mec_fini()
1997 amdgpu_bo_free_kernel(&adev->gfx.mec.mec_fw_obj, NULL, NULL); in gfx_v9_0_mec_fini()
2011 bitmap_zero(adev->gfx.mec.queue_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in gfx_v9_0_mec_init()
2015 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()
2019 &adev->gfx.mec.hpd_eop_obj, in gfx_v9_0_mec_init()
2020 &adev->gfx.mec.hpd_eop_gpu_addr, in gfx_v9_0_mec_init()
2030 amdgpu_bo_kunmap(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
2031 amdgpu_bo_unreserve(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
2034 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_mec_init()
2037 (adev->gfx.mec_fw->data + in gfx_v9_0_mec_init()
2043 &adev->gfx.mec.mec_fw_obj, in gfx_v9_0_mec_init()
2044 &adev->gfx.mec.mec_fw_gpu_addr, in gfx_v9_0_mec_init()
2054 amdgpu_bo_kunmap(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
2055 amdgpu_bo_unreserve(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
2153 adev->gfx.funcs = &gfx_v9_0_gfx_funcs; in gfx_v9_0_gpu_early_init()
2157 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2158 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2159 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2160 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2161 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2165 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2166 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2167 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2168 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2169 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2174 adev->gfx.ras_funcs = &gfx_v9_0_ras_funcs; in gfx_v9_0_gpu_early_init()
2175 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2176 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2177 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2178 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2179 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2190 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2191 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2192 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2193 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2194 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2201 adev->gfx.ras_funcs = &gfx_v9_4_ras_funcs; in gfx_v9_0_gpu_early_init()
2202 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2203 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2204 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2205 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2206 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2212 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2213 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2214 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2215 adev->gfx.config.sc_hiz_tile_fifo_size = 0x80; in gfx_v9_0_gpu_early_init()
2216 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2222 adev->gfx.ras_funcs = &gfx_v9_4_2_ras_funcs; in gfx_v9_0_gpu_early_init()
2223 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
2224 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
2225 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
2226 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
2227 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2241 adev->gfx.config.gb_addr_config = gb_addr_config; in gfx_v9_0_gpu_early_init()
2243 adev->gfx.config.gb_addr_config_fields.num_pipes = 1 << in gfx_v9_0_gpu_early_init()
2245 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2249 adev->gfx.config.max_tile_pipes = in gfx_v9_0_gpu_early_init()
2250 adev->gfx.config.gb_addr_config_fields.num_pipes; in gfx_v9_0_gpu_early_init()
2252 adev->gfx.config.gb_addr_config_fields.num_banks = 1 << in gfx_v9_0_gpu_early_init()
2254 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2257 adev->gfx.config.gb_addr_config_fields.max_compress_frags = 1 << in gfx_v9_0_gpu_early_init()
2259 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2262 adev->gfx.config.gb_addr_config_fields.num_rb_per_se = 1 << in gfx_v9_0_gpu_early_init()
2264 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2267 adev->gfx.config.gb_addr_config_fields.num_se = 1 << in gfx_v9_0_gpu_early_init()
2269 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2272 adev->gfx.config.gb_addr_config_fields.pipe_interleave_size = 1 << (8 + in gfx_v9_0_gpu_early_init()
2274 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2285 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
2288 ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
2298 ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr in gfx_v9_0_compute_ring_init()
2303 + ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec) in gfx_v9_0_compute_ring_init()
2308 return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type, in gfx_v9_0_compute_ring_init()
2328 adev->gfx.mec.num_mec = 2; in gfx_v9_0_sw_init()
2331 adev->gfx.mec.num_mec = 1; in gfx_v9_0_sw_init()
2335 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v9_0_sw_init()
2336 adev->gfx.mec.num_queue_per_pipe = 8; in gfx_v9_0_sw_init()
2339 …_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_EOP_INTERRUPT, &adev->gfx.eop_irq); in gfx_v9_0_sw_init()
2345 &adev->gfx.priv_reg_irq); in gfx_v9_0_sw_init()
2351 &adev->gfx.priv_inst_irq); in gfx_v9_0_sw_init()
2357 &adev->gfx.cp_ecc_error_irq); in gfx_v9_0_sw_init()
2363 &adev->gfx.cp_ecc_error_irq); in gfx_v9_0_sw_init()
2367 adev->gfx.gfx_current_status = AMDGPU_GFX_NORMAL_MODE; in gfx_v9_0_sw_init()
2377 r = adev->gfx.rlc.funcs->init(adev); in gfx_v9_0_sw_init()
2390 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v9_0_sw_init()
2391 ring = &adev->gfx.gfx_ring[i]; in gfx_v9_0_sw_init()
2399 r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, in gfx_v9_0_sw_init()
2408 for (i = 0; i < adev->gfx.mec.num_mec; ++i) { in gfx_v9_0_sw_init()
2409 for (j = 0; j < adev->gfx.mec.num_queue_per_pipe; j++) { in gfx_v9_0_sw_init()
2410 for (k = 0; k < adev->gfx.mec.num_pipe_per_mec; k++) { in gfx_v9_0_sw_init()
2431 kiq = &adev->gfx.kiq; in gfx_v9_0_sw_init()
2441 adev->gfx.ce_ram_size = 0x8000; in gfx_v9_0_sw_init()
2456 if (adev->gfx.ras_funcs && in gfx_v9_0_sw_fini()
2457 adev->gfx.ras_funcs->ras_fini) in gfx_v9_0_sw_fini()
2458 adev->gfx.ras_funcs->ras_fini(adev); in gfx_v9_0_sw_fini()
2460 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_sw_fini()
2461 amdgpu_ring_fini(&adev->gfx.gfx_ring[i]); in gfx_v9_0_sw_fini()
2462 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()
2463 amdgpu_ring_fini(&adev->gfx.compute_ring[i]); in gfx_v9_0_sw_fini()
2466 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq.ring); in gfx_v9_0_sw_fini()
2470 amdgpu_bo_free_kernel(&adev->gfx.rlc.clear_state_obj, in gfx_v9_0_sw_fini()
2471 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v9_0_sw_fini()
2472 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v9_0_sw_fini()
2474 amdgpu_bo_free_kernel(&adev->gfx.rlc.cp_table_obj, in gfx_v9_0_sw_fini()
2475 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v9_0_sw_fini()
2476 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v9_0_sw_fini()
2522 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_backends_per_se / in gfx_v9_0_get_rb_active_bitmap()
2523 adev->gfx.config.max_sh_per_se); in gfx_v9_0_get_rb_active_bitmap()
2533 u32 rb_bitmap_width_per_sh = adev->gfx.config.max_backends_per_se / in gfx_v9_0_setup_rb()
2534 adev->gfx.config.max_sh_per_se; in gfx_v9_0_setup_rb()
2537 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_setup_rb()
2538 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_setup_rb()
2541 active_rbs |= data << ((i * adev->gfx.config.max_sh_per_se + j) * in gfx_v9_0_setup_rb()
2548 adev->gfx.config.backend_enable_mask = active_rbs; in gfx_v9_0_setup_rb()
2549 adev->gfx.config.num_rbs = hweight32(active_rbs); in gfx_v9_0_setup_rb()
2635 gfx_v9_0_get_cu_info(adev, &adev->gfx.cu_info); in gfx_v9_0_constants_init()
2636 adev->gfx.config.db_debug2 = RREG32_SOC15(GC, 0, mmDB_DEBUG2); in gfx_v9_0_constants_init()
2679 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_wait_for_rlc_serdes()
2680 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_wait_for_rlc_serdes()
2723 if(adev->gfx.num_gfx_rings) in gfx_v9_0_enable_gui_idle_interrupt()
2731 adev->gfx.rlc.funcs->get_csb_buffer(adev, adev->gfx.rlc.cs_ptr); in gfx_v9_0_init_csb()
2734 adev->gfx.rlc.clear_state_gpu_addr >> 32); in gfx_v9_0_init_csb()
2736 adev->gfx.rlc.clear_state_gpu_addr & 0xfffffffc); in gfx_v9_0_init_csb()
2738 adev->gfx.rlc.clear_state_size); in gfx_v9_0_init_csb()
2791 kmemdup(adev->gfx.rlc.register_list_format, in gfx_v9_1_init_rlc_save_restore_list()
2792 adev->gfx.rlc.reg_list_format_size_bytes, GFP_KERNEL); in gfx_v9_1_init_rlc_save_restore_list()
2799 adev->gfx.rlc.reg_list_format_direct_reg_list_length, in gfx_v9_1_init_rlc_save_restore_list()
2800 adev->gfx.rlc.reg_list_format_size_bytes >> 2, in gfx_v9_1_init_rlc_save_restore_list()
2815 for (i = 0; i < adev->gfx.rlc.reg_list_size_bytes >> 2; i++) in gfx_v9_1_init_rlc_save_restore_list()
2817 adev->gfx.rlc.register_restore[i]); in gfx_v9_1_init_rlc_save_restore_list()
2821 adev->gfx.rlc.reg_list_format_start); in gfx_v9_1_init_rlc_save_restore_list()
2824 for (i = 0; i < adev->gfx.rlc.reg_list_format_direct_reg_list_length; i++) in gfx_v9_1_init_rlc_save_restore_list()
2829 while (i < (adev->gfx.rlc.reg_list_format_size_bytes >> 2)) { in gfx_v9_1_init_rlc_save_restore_list()
2851 list_size = adev->gfx.rlc.reg_list_size_bytes >> 2; in gfx_v9_1_init_rlc_save_restore_list()
2854 adev->gfx.rlc.reg_restore_list_size); in gfx_v9_1_init_rlc_save_restore_list()
2859 adev->gfx.rlc.starting_offsets_start); in gfx_v9_1_init_rlc_save_restore_list()
3060 if (adev->gfx.rlc.is_rlc_v2_1) { in gfx_v9_0_init_pg()
3074 adev->gfx.rlc.cp_table_gpu_addr >> 8); in gfx_v9_0_init_pg()
3114 rlc_ucode_ver, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_start()
3132 if (!adev->gfx.rlc_fw) in gfx_v9_0_rlc_load_microcode()
3135 hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_rlc_load_microcode()
3138 fw_data = (const __le32 *)(adev->gfx.rlc_fw->data + in gfx_v9_0_rlc_load_microcode()
3146 WREG32_SOC15(GC, 0, mmRLC_GPM_UCODE_ADDR, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_load_microcode()
3160 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_rlc_resume()
3192 adev->gfx.rlc.funcs->start(adev); in gfx_v9_0_rlc_resume()
3216 if (!adev->gfx.me_fw || !adev->gfx.pfp_fw || !adev->gfx.ce_fw) in gfx_v9_0_cp_gfx_load_microcode()
3220 adev->gfx.pfp_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3222 adev->gfx.ce_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3224 adev->gfx.me_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3234 (adev->gfx.pfp_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3240 WREG32_SOC15(GC, 0, mmCP_PFP_UCODE_ADDR, adev->gfx.pfp_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3244 (adev->gfx.ce_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3250 WREG32_SOC15(GC, 0, mmCP_CE_UCODE_ADDR, adev->gfx.ce_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3254 (adev->gfx.me_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3260 WREG32_SOC15(GC, 0, mmCP_ME_RAM_WADDR, adev->gfx.me_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3267 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_start()
3273 WREG32_SOC15(GC, 0, mmCP_MAX_CONTEXT, adev->gfx.config.max_hw_contexts - 1); in gfx_v9_0_cp_gfx_start()
3341 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_resume()
3404 adev->gfx.kiq.ring.sched.ready = false; in gfx_v9_0_cp_compute_enable()
3416 if (!adev->gfx.mec_fw) in gfx_v9_0_cp_compute_load_microcode()
3421 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_cp_compute_load_microcode()
3425 (adev->gfx.mec_fw->data + in gfx_v9_0_cp_compute_load_microcode()
3433 adev->gfx.mec.mec_fw_gpu_addr & 0xFFFFF000); in gfx_v9_0_cp_compute_load_microcode()
3435 upper_32_bits(adev->gfx.mec.mec_fw_gpu_addr)); in gfx_v9_0_cp_compute_load_microcode()
3445 adev->gfx.mec_fw_version); in gfx_v9_0_cp_compute_load_microcode()
3795 tmp_mqd = (struct v9_mqd *)adev->gfx.mec.mqd_backup[mqd_idx]; in gfx_v9_0_kiq_init_queue()
3798 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
3799 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3821 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
3822 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3832 int mqd_idx = ring - &adev->gfx.compute_ring[0]; in gfx_v9_0_kcq_init_queue()
3838 tmp_mqd = (struct v9_mqd *)adev->gfx.mec.mqd_backup[mqd_idx]; in gfx_v9_0_kcq_init_queue()
3851 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3852 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3855 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3856 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3874 ring = &adev->gfx.kiq.ring; in gfx_v9_0_kiq_resume()
3899 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()
3900 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_kcq_resume()
3930 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3946 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3956 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3957 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_resume()
3963 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()
3964 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_cp_resume()
3993 if (adev->gfx.num_gfx_rings) in gfx_v9_0_cp_enable()
4010 r = adev->gfx.rlc.funcs->resume(adev); in gfx_v9_0_hw_init()
4028 amdgpu_irq_put(adev, &adev->gfx.cp_ecc_error_irq, 0); in gfx_v9_0_hw_fini()
4029 amdgpu_irq_put(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_hw_fini()
4030 amdgpu_irq_put(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_hw_fini()
4053 soc15_grbm_select(adev, adev->gfx.kiq.ring.me, in gfx_v9_0_hw_fini()
4054 adev->gfx.kiq.ring.pipe, in gfx_v9_0_hw_fini()
4055 adev->gfx.kiq.ring.queue, 0); in gfx_v9_0_hw_fini()
4056 gfx_v9_0_kiq_fini_register(&adev->gfx.kiq.ring); in gfx_v9_0_hw_fini()
4070 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_hw_fini()
4142 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_soft_reset()
4144 if (adev->gfx.num_gfx_rings) in gfx_v9_0_soft_reset()
4177 struct amdgpu_kiq *kiq = &adev->gfx.kiq; in gfx_v9_0_kiq_read_clock()
4267 mutex_lock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
4275 mutex_unlock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
4549 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; in gfx_v9_0_do_edc_gds_workarounds()
4596 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; in gfx_v9_0_do_edc_gpr_workarounds()
4603 int compute_dim_x = adev->gfx.config.max_shader_engines * in gfx_v9_0_do_edc_gpr_workarounds()
4604 adev->gfx.config.max_cu_per_sh * in gfx_v9_0_do_edc_gpr_workarounds()
4605 adev->gfx.config.max_sh_per_se; in gfx_v9_0_do_edc_gpr_workarounds()
4607 int gpr_reg_size = adev->gfx.config.max_shader_engines + 6; in gfx_v9_0_do_edc_gpr_workarounds()
4772 adev->gfx.num_gfx_rings = 0; in gfx_v9_0_early_init()
4774 adev->gfx.num_gfx_rings = GFX9_NUM_GFX_RINGS; in gfx_v9_0_early_init()
4775 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()
4813 if (adev->gfx.ras_funcs && in gfx_v9_0_ecc_late_init()
4814 adev->gfx.ras_funcs->ras_late_init) { in gfx_v9_0_ecc_late_init()
4815 r = adev->gfx.ras_funcs->ras_late_init(adev); in gfx_v9_0_ecc_late_init()
4820 if (adev->gfx.ras_funcs && in gfx_v9_0_ecc_late_init()
4821 adev->gfx.ras_funcs->enable_watchdog_timer) in gfx_v9_0_ecc_late_init()
4822 adev->gfx.ras_funcs->enable_watchdog_timer(adev); in gfx_v9_0_ecc_late_init()
4832 r = amdgpu_irq_get(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_late_init()
4836 r = amdgpu_irq_get(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_late_init()
5001 if (!adev->gfx.num_gfx_rings) in gfx_v9_0_update_3d_clock_gating()
5725 adev->gfx.me_fw_write_wait : adev->gfx.mec_fw_write_wait; in gfx_v9_0_ring_emit_reg_write_reg_wait()
5946 amdgpu_fence_process(&adev->gfx.gfx_ring[0]); in gfx_v9_0_eop_irq()
5950 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()
5951 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_eop_irq()
5976 drm_sched_fault(&adev->gfx.gfx_ring[0].sched); in gfx_v9_0_fault()
5980 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()
5981 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_fault()
6874 for (i = 0; i < adev->gfx.mec.num_pipe_per_mec; i++) { in gfx_v9_0_emit_wave_limit()
7024 adev->gfx.kiq.ring.funcs = &gfx_v9_0_ring_funcs_kiq; in gfx_v9_0_set_ring_funcs()
7026 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_set_ring_funcs()
7027 adev->gfx.gfx_ring[i].funcs = &gfx_v9_0_ring_funcs_gfx; in gfx_v9_0_set_ring_funcs()
7029 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
7030 adev->gfx.compute_ring[i].funcs = &gfx_v9_0_ring_funcs_compute; in gfx_v9_0_set_ring_funcs()
7056 adev->gfx.eop_irq.num_types = AMDGPU_CP_IRQ_LAST; in gfx_v9_0_set_irq_funcs()
7057 adev->gfx.eop_irq.funcs = &gfx_v9_0_eop_irq_funcs; in gfx_v9_0_set_irq_funcs()
7059 adev->gfx.priv_reg_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
7060 adev->gfx.priv_reg_irq.funcs = &gfx_v9_0_priv_reg_irq_funcs; in gfx_v9_0_set_irq_funcs()
7062 adev->gfx.priv_inst_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
7063 adev->gfx.priv_inst_irq.funcs = &gfx_v9_0_priv_inst_irq_funcs; in gfx_v9_0_set_irq_funcs()
7065 adev->gfx.cp_ecc_error_irq.num_types = 2; /*C5 ECC error and C9 FUE error*/ in gfx_v9_0_set_irq_funcs()
7066 adev->gfx.cp_ecc_error_irq.funcs = &gfx_v9_0_cp_ecc_error_irq_funcs; in gfx_v9_0_set_irq_funcs()
7080 adev->gfx.rlc.funcs = &gfx_v9_0_rlc_funcs; in gfx_v9_0_set_rlc_funcs()
7168 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_cu_per_sh); in gfx_v9_0_get_cu_active_bitmap()
7186 if (adev->gfx.config.max_shader_engines * in gfx_v9_0_get_cu_info()
7187 adev->gfx.config.max_sh_per_se > 16) in gfx_v9_0_get_cu_info()
7191 adev->gfx.config.max_shader_engines, in gfx_v9_0_get_cu_info()
7192 adev->gfx.config.max_sh_per_se); in gfx_v9_0_get_cu_info()
7195 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_get_cu_info()
7196 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_get_cu_info()
7202 adev, disable_masks[i * adev->gfx.config.max_sh_per_se + j]); in gfx_v9_0_get_cu_info()
7219 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k ++) { in gfx_v9_0_get_cu_info()
7221 if (counter < adev->gfx.config.max_cu_per_sh) in gfx_v9_0_get_cu_info()