| /drivers/gpu/drm/amd/amdgpu/ |
| A D | soc24.c | 136 u32 se_num, in soc24_read_indexed_register() argument 143 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc24_read_indexed_register() 144 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in soc24_read_indexed_register() 148 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc24_read_indexed_register() 155 bool indexed, u32 se_num, in soc24_get_register_value() argument 159 return soc24_read_indexed_register(adev, se_num, sh_num, reg_offset); in soc24_get_register_value() 168 static int soc24_read_register(struct amdgpu_device *adev, u32 se_num, in soc24_read_register() argument 185 se_num, sh_num, reg_offset); in soc24_read_register()
|
| A D | soc21.c | 272 static uint32_t soc21_read_indexed_register(struct amdgpu_device *adev, u32 se_num, in soc21_read_indexed_register() argument 278 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc21_read_indexed_register() 279 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in soc21_read_indexed_register() 283 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc21_read_indexed_register() 290 bool indexed, u32 se_num, in soc21_get_register_value() argument 294 return soc21_read_indexed_register(adev, se_num, sh_num, reg_offset); in soc21_get_register_value() 302 static int soc21_read_register(struct amdgpu_device *adev, u32 se_num, in soc21_read_register() argument 319 se_num, sh_num, reg_offset); in soc21_read_register()
|
| A D | nv.c | 357 static uint32_t nv_read_indexed_register(struct amdgpu_device *adev, u32 se_num, in nv_read_indexed_register() argument 363 if (se_num != 0xffffffff || sh_num != 0xffffffff) in nv_read_indexed_register() 364 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in nv_read_indexed_register() 368 if (se_num != 0xffffffff || sh_num != 0xffffffff) in nv_read_indexed_register() 375 bool indexed, u32 se_num, in nv_get_register_value() argument 379 return nv_read_indexed_register(adev, se_num, sh_num, reg_offset); in nv_get_register_value() 387 static int nv_read_register(struct amdgpu_device *adev, u32 se_num, in nv_read_register() argument 404 se_num, sh_num, reg_offset); in nv_read_register()
|
| A D | soc15.c | 404 static uint32_t soc15_read_indexed_register(struct amdgpu_device *adev, u32 se_num, in soc15_read_indexed_register() argument 410 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc15_read_indexed_register() 411 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in soc15_read_indexed_register() 415 if (se_num != 0xffffffff || sh_num != 0xffffffff) in soc15_read_indexed_register() 422 bool indexed, u32 se_num, in soc15_get_register_value() argument 426 return soc15_read_indexed_register(adev, se_num, sh_num, reg_offset); in soc15_get_register_value() 436 static int soc15_read_register(struct amdgpu_device *adev, u32 se_num, in soc15_read_register() argument 453 se_num, sh_num, reg_offset); in soc15_read_register()
|
| A D | gfx_v9_0.h | 29 void gfx_v9_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, u32 sh_num,
|
| A D | cik.c | 1123 bool indexed, u32 se_num, in cik_get_register_value() argument 1128 unsigned se_idx = (se_num == 0xffffffff) ? 0 : se_num; in cik_get_register_value() 1143 if (se_num != 0xffffffff || sh_num != 0xffffffff) in cik_get_register_value() 1144 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in cik_get_register_value() 1148 if (se_num != 0xffffffff || sh_num != 0xffffffff) in cik_get_register_value() 1218 static int cik_read_register(struct amdgpu_device *adev, u32 se_num, in cik_read_register() argument 1230 *value = cik_get_register_value(adev, indexed, se_num, sh_num, in cik_read_register()
|
| A D | gfx_v9_4.c | 92 static void gfx_v9_4_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v9_4_select_se_sh() argument 104 if (se_num == 0xffffffff) in gfx_v9_4_select_se_sh() 108 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v9_4_select_se_sh() 882 for (j = 0; j < gfx_v9_4_edc_counter_regs[i].se_num; j++) { in gfx_v9_4_query_ras_error_count() 916 for (j = 0; j < gfx_v9_4_edc_counter_regs[i].se_num; j++) { in gfx_v9_4_reset_ras_error_count() 986 for (i = 0; i < gfx_v9_4_ea_err_status_regs.se_num; i++) { in gfx_v9_4_query_ras_error_status()
|
| A D | vi.c | 746 bool indexed, u32 se_num, in vi_get_register_value() argument 751 unsigned se_idx = (se_num == 0xffffffff) ? 0 : se_num; in vi_get_register_value() 766 if (se_num != 0xffffffff || sh_num != 0xffffffff) in vi_get_register_value() 767 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in vi_get_register_value() 771 if (se_num != 0xffffffff || sh_num != 0xffffffff) in vi_get_register_value() 841 static int vi_read_register(struct amdgpu_device *adev, u32 se_num, in vi_read_register() argument 853 *value = vi_get_register_value(adev, indexed, se_num, sh_num, in vi_read_register()
|
| A D | si.c | 1179 bool indexed, u32 se_num, in si_get_register_value() argument 1184 unsigned se_idx = (se_num == 0xffffffff) ? 0 : se_num; in si_get_register_value() 1197 if (se_num != 0xffffffff || sh_num != 0xffffffff) in si_get_register_value() 1198 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in si_get_register_value() 1202 if (se_num != 0xffffffff || sh_num != 0xffffffff) in si_get_register_value() 1253 static int si_read_register(struct amdgpu_device *adev, u32 se_num, in si_read_register() argument 1265 *value = si_get_register_value(adev, indexed, se_num, sh_num, in si_read_register()
|
| A D | soc15.h | 66 uint32_t se_num; member
|
| A D | amdgpu_gfx.h | 336 void (*select_se_sh)(struct amdgpu_device *adev, u32 se_num, 539 uint32_t se_num; member
|
| A D | gfx_v9_4_2.c | 847 static void gfx_v9_4_2_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v9_4_2_select_se_sh() argument 859 if (se_num == 0xffffffff) in gfx_v9_4_2_select_se_sh() 863 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v9_4_2_select_se_sh() 1503 for (j = 0; j < gfx_v9_4_2_edc_counter_regs[i].se_num; j++) { in gfx_v9_4_2_query_sram_edc_count() 1680 for (i = 0; i < gfx_v9_4_2_ea_err_status_regs.se_num; i++) { in gfx_v9_4_2_reset_ea_err_status() 1710 for (i = 0; i < gfx_v9_4_2_ea_err_status_regs.se_num; i++) { in gfx_v9_4_2_query_ea_err_status()
|
| A D | amdgpu_kms.c | 836 unsigned int se_num = (info->read_mmr_reg.instance >> in amdgpu_info_ioctl() local 849 if (se_num == AMDGPU_INFO_MMR_SE_INDEX_MASK) { in amdgpu_info_ioctl() 850 se_num = 0xffffffff; in amdgpu_info_ioctl() 851 } else if (se_num >= AMDGPU_GFX_MAX_SE) { in amdgpu_info_ioctl() 878 if (amdgpu_asic_read_register(adev, se_num, sh_num, in amdgpu_info_ioctl()
|
| A D | gfx_v9_4_3.c | 689 static void gfx_v9_4_3_xcc_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v9_4_3_xcc_select_se_sh() argument 701 if (se_num == 0xffffffff) in gfx_v9_4_3_xcc_select_se_sh() 705 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v9_4_3_xcc_select_se_sh() 4387 for (j = 0; j < gfx_v9_4_3_ce_reg_list[i].se_num; j++) { in gfx_v9_4_3_inst_query_ras_err_count() 4390 if (gfx_v9_4_3_ce_reg_list[i].se_num > 1 || in gfx_v9_4_3_inst_query_ras_err_count() 4417 for (j = 0; j < gfx_v9_4_3_ue_reg_list[i].se_num; j++) { in gfx_v9_4_3_inst_query_ras_err_count() 4420 if (gfx_v9_4_3_ue_reg_list[i].se_num > 1 || in gfx_v9_4_3_inst_query_ras_err_count() 4455 for (j = 0; j < gfx_v9_4_3_ce_reg_list[i].se_num; j++) { in gfx_v9_4_3_inst_reset_ras_err_count() 4458 if (gfx_v9_4_3_ce_reg_list[i].se_num > 1 || in gfx_v9_4_3_inst_reset_ras_err_count() 4477 for (j = 0; j < gfx_v9_4_3_ue_reg_list[i].se_num; j++) { in gfx_v9_4_3_inst_reset_ras_err_count() [all …]
|
| A D | gfx_v6_0.c | 1304 static void gfx_v6_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v6_0_select_se_sh() argument 1314 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff)) in gfx_v6_0_select_se_sh() 1317 else if (se_num == 0xffffffff) in gfx_v6_0_select_se_sh() 1322 (se_num << GRBM_GFX_INDEX__SE_INDEX__SHIFT); in gfx_v6_0_select_se_sh() 1325 (se_num << GRBM_GFX_INDEX__SE_INDEX__SHIFT); in gfx_v6_0_select_se_sh()
|
| A D | gfx_v7_0.c | 1559 u32 se_num, u32 sh_num, u32 instance, in gfx_v7_0_select_se_sh() argument 1569 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff)) in gfx_v7_0_select_se_sh() 1572 else if (se_num == 0xffffffff) in gfx_v7_0_select_se_sh() 1577 (se_num << GRBM_GFX_INDEX__SE_INDEX__SHIFT); in gfx_v7_0_select_se_sh() 1580 (se_num << GRBM_GFX_INDEX__SE_INDEX__SHIFT); in gfx_v7_0_select_se_sh()
|
| A D | gfx_v12_0.c | 277 static void gfx_v12_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, 1662 static void gfx_v12_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v12_0_select_se_sh() argument 1674 if (se_num == 0xffffffff) in gfx_v12_0_select_se_sh() 1678 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v12_0_select_se_sh()
|
| A D | gfx_v9_0.c | 2499 void gfx_v9_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, u32 sh_num, in gfx_v9_0_select_se_sh() argument 2509 if (se_num == 0xffffffff) in gfx_v9_0_select_se_sh() 2512 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v9_0_select_se_sh() 6998 for (j = 0; j < gfx_v9_0_edc_counter_regs[i].se_num; j++) { in gfx_v9_0_reset_ras_error_count() 7060 for (j = 0; j < gfx_v9_0_edc_counter_regs[i].se_num; j++) { in gfx_v9_0_query_ras_error_count()
|
| A D | amdgpu.h | 660 int (*read_register)(struct amdgpu_device *adev, u32 se_num,
|
| A D | gfx_v11_0.c | 330 static void gfx_v11_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, 1931 static void gfx_v11_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v11_0_select_se_sh() argument 1943 if (se_num == 0xffffffff) in gfx_v11_0_select_se_sh() 1947 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v11_0_select_se_sh()
|
| A D | gfx_v8_0.c | 3384 u32 se_num, u32 sh_num, u32 instance, in gfx_v8_0_select_se_sh() argument 3394 if (se_num == 0xffffffff) in gfx_v8_0_select_se_sh() 3397 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v8_0_select_se_sh()
|
| A D | gfx_v10_0.c | 3685 static void gfx_v10_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, 5056 static void gfx_v10_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, in gfx_v10_0_select_se_sh() argument 5068 if (se_num == 0xffffffff) in gfx_v10_0_select_se_sh() 5072 data = REG_SET_FIELD(data, GRBM_GFX_INDEX, SE_INDEX, se_num); in gfx_v10_0_select_se_sh()
|
| /drivers/gpu/drm/radeon/ |
| A D | si.c | 2928 u32 se_num, u32 sh_num) in si_select_se_sh() argument 2932 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff)) in si_select_se_sh() 2934 else if (se_num == 0xffffffff) in si_select_se_sh() 2937 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num); in si_select_se_sh() 2939 data |= SH_INDEX(sh_num) | SE_INDEX(se_num); in si_select_se_sh() 2973 u32 se_num, u32 sh_per_se, in si_setup_spi() argument 2979 for (i = 0; i < se_num; i++) { in si_setup_spi() 3020 u32 se_num, u32 sh_per_se, in si_setup_rb() argument 3028 for (i = 0; i < se_num; i++) { in si_setup_rb() 3038 for (i = 0; i < max_rb_num_per_se * se_num; i++) { in si_setup_rb() [all …]
|
| A D | cik.c | 3027 u32 se_num, u32 sh_num) in cik_select_se_sh() argument 3031 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff)) in cik_select_se_sh() 3033 else if (se_num == 0xffffffff) in cik_select_se_sh() 3036 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num); in cik_select_se_sh() 3038 data |= SH_INDEX(sh_num) | SE_INDEX(se_num); in cik_select_se_sh() 3102 u32 se_num, u32 sh_per_se, in cik_setup_rb() argument 3110 for (i = 0; i < se_num; i++) { in cik_setup_rb() 3123 for (i = 0; i < max_rb_num_per_se * se_num; i++) { in cik_setup_rb() 3131 for (i = 0; i < se_num; i++) { in cik_setup_rb()
|