Lines Matching refs:rdev
128 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
129 static void si_pcie_gen3_enable(struct radeon_device *rdev);
130 static void si_program_aspm(struct radeon_device *rdev);
131 extern void sumo_rlc_fini(struct radeon_device *rdev);
132 extern int sumo_rlc_init(struct radeon_device *rdev);
133 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
135 static void si_init_pg(struct radeon_device *rdev);
136 static void si_init_cg(struct radeon_device *rdev);
137 static void si_fini_pg(struct radeon_device *rdev);
138 static void si_fini_cg(struct radeon_device *rdev);
139 static void si_rlc_stop(struct radeon_device *rdev);
1208 static void si_init_golden_registers(struct radeon_device *rdev) in si_init_golden_registers() argument
1210 switch (rdev->family) { in si_init_golden_registers()
1212 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1215 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1218 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1221 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1226 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1229 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1232 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1237 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1240 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1243 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1246 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1251 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1254 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1257 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1262 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1265 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1268 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1287 int si_get_allowed_info_register(struct radeon_device *rdev, in si_get_allowed_info_register() argument
1318 u32 si_get_xclk(struct radeon_device *rdev) in si_get_xclk() argument
1320 u32 reference_clock = rdev->clock.spll.reference_freq; in si_get_xclk()
1335 int si_get_temp(struct radeon_device *rdev) in si_get_temp() argument
1551 int si_mc_load_microcode(struct radeon_device *rdev) in si_mc_load_microcode() argument
1560 if (!rdev->mc_fw) in si_mc_load_microcode()
1563 if (rdev->new_fw) { in si_mc_load_microcode()
1565 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; in si_mc_load_microcode()
1570 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in si_mc_load_microcode()
1573 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_mc_load_microcode()
1575 ucode_size = rdev->mc_fw->size / 4; in si_mc_load_microcode()
1577 switch (rdev->family) { in si_mc_load_microcode()
1600 fw_data = (const __be32 *)rdev->mc_fw->data; in si_mc_load_microcode()
1612 if (rdev->new_fw) { in si_mc_load_microcode()
1622 if (rdev->new_fw) in si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1639 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1649 static int si_init_microcode(struct radeon_device *rdev) in si_init_microcode() argument
1664 switch (rdev->family) { in si_init_microcode()
1678 if ((rdev->pdev->revision == 0x81) && in si_init_microcode()
1679 ((rdev->pdev->device == 0x6810) || in si_init_microcode()
1680 (rdev->pdev->device == 0x6811))) in si_init_microcode()
1693 if (((rdev->pdev->device == 0x6820) && in si_init_microcode()
1694 ((rdev->pdev->revision == 0x81) || in si_init_microcode()
1695 (rdev->pdev->revision == 0x83))) || in si_init_microcode()
1696 ((rdev->pdev->device == 0x6821) && in si_init_microcode()
1697 ((rdev->pdev->revision == 0x83) || in si_init_microcode()
1698 (rdev->pdev->revision == 0x87))) || in si_init_microcode()
1699 ((rdev->pdev->revision == 0x87) && in si_init_microcode()
1700 ((rdev->pdev->device == 0x6823) || in si_init_microcode()
1701 (rdev->pdev->device == 0x682b)))) in si_init_microcode()
1714 if (((rdev->pdev->revision == 0x81) && in si_init_microcode()
1715 ((rdev->pdev->device == 0x6600) || in si_init_microcode()
1716 (rdev->pdev->device == 0x6604) || in si_init_microcode()
1717 (rdev->pdev->device == 0x6605) || in si_init_microcode()
1718 (rdev->pdev->device == 0x6610))) || in si_init_microcode()
1719 ((rdev->pdev->revision == 0x83) && in si_init_microcode()
1720 (rdev->pdev->device == 0x6610))) in si_init_microcode()
1732 if (((rdev->pdev->revision == 0x81) && in si_init_microcode()
1733 (rdev->pdev->device == 0x6660)) || in si_init_microcode()
1734 ((rdev->pdev->revision == 0x83) && in si_init_microcode()
1735 ((rdev->pdev->device == 0x6660) || in si_init_microcode()
1736 (rdev->pdev->device == 0x6663) || in si_init_microcode()
1737 (rdev->pdev->device == 0x6665) || in si_init_microcode()
1738 (rdev->pdev->device == 0x6667)))) in si_init_microcode()
1740 else if ((rdev->pdev->revision == 0xc3) && in si_init_microcode()
1741 (rdev->pdev->device == 0x6665)) in si_init_microcode()
1762 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1765 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1768 if (rdev->pfp_fw->size != pfp_req_size) { in si_init_microcode()
1770 rdev->pfp_fw->size, fw_name); in si_init_microcode()
1775 err = radeon_ucode_validate(rdev->pfp_fw); in si_init_microcode()
1786 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1789 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1792 if (rdev->me_fw->size != me_req_size) { in si_init_microcode()
1794 rdev->me_fw->size, fw_name); in si_init_microcode()
1798 err = radeon_ucode_validate(rdev->me_fw); in si_init_microcode()
1809 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1812 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1815 if (rdev->ce_fw->size != ce_req_size) { in si_init_microcode()
1817 rdev->ce_fw->size, fw_name); in si_init_microcode()
1821 err = radeon_ucode_validate(rdev->ce_fw); in si_init_microcode()
1832 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1835 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1838 if (rdev->rlc_fw->size != rlc_req_size) { in si_init_microcode()
1840 rdev->rlc_fw->size, fw_name); in si_init_microcode()
1844 err = radeon_ucode_validate(rdev->rlc_fw); in si_init_microcode()
1858 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1861 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1864 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1868 if ((rdev->mc_fw->size != mc_req_size) && in si_init_microcode()
1869 (rdev->mc_fw->size != mc2_req_size)) { in si_init_microcode()
1871 rdev->mc_fw->size, fw_name); in si_init_microcode()
1874 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); in si_init_microcode()
1876 err = radeon_ucode_validate(rdev->mc_fw); in si_init_microcode()
1892 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1895 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1898 release_firmware(rdev->smc_fw); in si_init_microcode()
1899 rdev->smc_fw = NULL; in si_init_microcode()
1901 } else if (rdev->smc_fw->size != smc_req_size) { in si_init_microcode()
1903 rdev->smc_fw->size, fw_name); in si_init_microcode()
1907 err = radeon_ucode_validate(rdev->smc_fw); in si_init_microcode()
1918 rdev->new_fw = false; in si_init_microcode()
1923 rdev->new_fw = true; in si_init_microcode()
1930 release_firmware(rdev->pfp_fw); in si_init_microcode()
1931 rdev->pfp_fw = NULL; in si_init_microcode()
1932 release_firmware(rdev->me_fw); in si_init_microcode()
1933 rdev->me_fw = NULL; in si_init_microcode()
1934 release_firmware(rdev->ce_fw); in si_init_microcode()
1935 rdev->ce_fw = NULL; in si_init_microcode()
1936 release_firmware(rdev->rlc_fw); in si_init_microcode()
1937 rdev->rlc_fw = NULL; in si_init_microcode()
1938 release_firmware(rdev->mc_fw); in si_init_microcode()
1939 rdev->mc_fw = NULL; in si_init_microcode()
1940 release_firmware(rdev->smc_fw); in si_init_microcode()
1941 rdev->smc_fw = NULL; in si_init_microcode()
1947 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev, in dce6_line_buffer_adjust() argument
1985 for (i = 0; i < rdev->usec_timeout; i++) { in dce6_line_buffer_adjust()
2006 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev) in si_get_number_of_dram_channels() argument
2274 static void dce6_program_watermarks(struct radeon_device *rdev, in dce6_program_watermarks() argument
2299 if (rdev->family == CHIP_ARUBA) in dce6_program_watermarks()
2300 dram_channels = evergreen_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2302 dram_channels = si_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2305 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2307 radeon_dpm_get_mclk(rdev, false) * 10; in dce6_program_watermarks()
2309 radeon_dpm_get_sclk(rdev, false) * 10; in dce6_program_watermarks()
2311 wm_high.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2312 wm_high.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2332 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2334 radeon_dpm_get_mclk(rdev, true) * 10; in dce6_program_watermarks()
2336 radeon_dpm_get_sclk(rdev, true) * 10; in dce6_program_watermarks()
2338 wm_low.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2339 wm_low.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2368 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2376 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2440 void dce6_bandwidth_update(struct radeon_device *rdev) in dce6_bandwidth_update() argument
2447 if (!rdev->mode_info.mode_config_initialized) in dce6_bandwidth_update()
2450 radeon_update_display_priority(rdev); in dce6_bandwidth_update()
2452 for (i = 0; i < rdev->num_crtc; i++) { in dce6_bandwidth_update()
2453 if (rdev->mode_info.crtcs[i]->base.enabled) in dce6_bandwidth_update()
2456 for (i = 0; i < rdev->num_crtc; i += 2) { in dce6_bandwidth_update()
2457 mode0 = &rdev->mode_info.crtcs[i]->base.mode; in dce6_bandwidth_update()
2458 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; in dce6_bandwidth_update()
2459 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); in dce6_bandwidth_update()
2460 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); in dce6_bandwidth_update()
2461 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); in dce6_bandwidth_update()
2462 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); in dce6_bandwidth_update()
2469 static void si_tiling_mode_table_init(struct radeon_device *rdev) in si_tiling_mode_table_init() argument
2471 u32 *tile = rdev->config.si.tile_mode_array; in si_tiling_mode_table_init()
2473 ARRAY_SIZE(rdev->config.si.tile_mode_array); in si_tiling_mode_table_init()
2476 switch (rdev->config.si.mem_row_size_in_kb) { in si_tiling_mode_table_init()
2492 switch(rdev->family) { in si_tiling_mode_table_init()
2923 DRM_ERROR("unknown asic: 0x%x\n", rdev->family); in si_tiling_mode_table_init()
2927 static void si_select_se_sh(struct radeon_device *rdev, in si_select_se_sh() argument
2954 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh) in si_get_cu_enabled() argument
2972 static void si_setup_spi(struct radeon_device *rdev, in si_setup_spi() argument
2981 si_select_se_sh(rdev, i, j); in si_setup_spi()
2983 active_cu = si_get_cu_enabled(rdev, cu_per_sh); in si_setup_spi()
2996 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_spi()
2999 static u32 si_get_rb_disabled(struct radeon_device *rdev, in si_get_rb_disabled() argument
3019 static void si_setup_rb(struct radeon_device *rdev, in si_setup_rb() argument
3030 si_select_se_sh(rdev, i, j); in si_setup_rb()
3031 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se); in si_setup_rb()
3035 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3044 rdev->config.si.backend_enable_mask = enabled_rbs; in si_setup_rb()
3047 si_select_se_sh(rdev, i, 0xffffffff); in si_setup_rb()
3066 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3069 static void si_gpu_init(struct radeon_device *rdev) in si_gpu_init() argument
3078 switch (rdev->family) { in si_gpu_init()
3080 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3081 rdev->config.si.max_tile_pipes = 12; in si_gpu_init()
3082 rdev->config.si.max_cu_per_sh = 8; in si_gpu_init()
3083 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3084 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3085 rdev->config.si.max_texture_channel_caches = 12; in si_gpu_init()
3086 rdev->config.si.max_gprs = 256; in si_gpu_init()
3087 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3088 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3090 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3091 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3092 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3093 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3097 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3098 rdev->config.si.max_tile_pipes = 8; in si_gpu_init()
3099 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3100 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3101 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3102 rdev->config.si.max_texture_channel_caches = 8; in si_gpu_init()
3103 rdev->config.si.max_gprs = 256; in si_gpu_init()
3104 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3105 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3107 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3108 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3109 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3110 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3115 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3116 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3117 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3118 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3119 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3120 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3121 rdev->config.si.max_gprs = 256; in si_gpu_init()
3122 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3123 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3125 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3126 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3127 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3128 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3132 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3133 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3134 rdev->config.si.max_cu_per_sh = 6; in si_gpu_init()
3135 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3136 rdev->config.si.max_backends_per_se = 2; in si_gpu_init()
3137 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3138 rdev->config.si.max_gprs = 256; in si_gpu_init()
3139 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3140 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3142 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3143 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3144 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3145 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3149 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3150 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3151 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3152 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3153 rdev->config.si.max_backends_per_se = 1; in si_gpu_init()
3154 rdev->config.si.max_texture_channel_caches = 2; in si_gpu_init()
3155 rdev->config.si.max_gprs = 256; in si_gpu_init()
3156 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3157 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3159 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3160 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3161 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3162 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3180 evergreen_fix_pci_max_read_req_size(rdev); in si_gpu_init()
3187 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes; in si_gpu_init()
3188 rdev->config.si.mem_max_burst_length_bytes = 256; in si_gpu_init()
3190 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in si_gpu_init()
3191 if (rdev->config.si.mem_row_size_in_kb > 4) in si_gpu_init()
3192 rdev->config.si.mem_row_size_in_kb = 4; in si_gpu_init()
3194 rdev->config.si.shader_engine_tile_size = 32; in si_gpu_init()
3195 rdev->config.si.num_gpus = 1; in si_gpu_init()
3196 rdev->config.si.multi_gpu_tile_size = 64; in si_gpu_init()
3200 switch (rdev->config.si.mem_row_size_in_kb) { in si_gpu_init()
3220 rdev->config.si.tile_config = 0; in si_gpu_init()
3221 switch (rdev->config.si.num_tile_pipes) { in si_gpu_init()
3223 rdev->config.si.tile_config |= (0 << 0); in si_gpu_init()
3226 rdev->config.si.tile_config |= (1 << 0); in si_gpu_init()
3229 rdev->config.si.tile_config |= (2 << 0); in si_gpu_init()
3234 rdev->config.si.tile_config |= (3 << 0); in si_gpu_init()
3239 rdev->config.si.tile_config |= 0 << 4; in si_gpu_init()
3242 rdev->config.si.tile_config |= 1 << 4; in si_gpu_init()
3246 rdev->config.si.tile_config |= 2 << 4; in si_gpu_init()
3249 rdev->config.si.tile_config |= in si_gpu_init()
3251 rdev->config.si.tile_config |= in si_gpu_init()
3260 if (rdev->has_uvd) { in si_gpu_init()
3266 si_tiling_mode_table_init(rdev); in si_gpu_init()
3268 si_setup_rb(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3269 rdev->config.si.max_sh_per_se, in si_gpu_init()
3270 rdev->config.si.max_backends_per_se); in si_gpu_init()
3272 si_setup_spi(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3273 rdev->config.si.max_sh_per_se, in si_gpu_init()
3274 rdev->config.si.max_cu_per_sh); in si_gpu_init()
3276 rdev->config.si.active_cus = 0; in si_gpu_init()
3277 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_gpu_init()
3278 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_gpu_init()
3279 rdev->config.si.active_cus += in si_gpu_init()
3280 hweight32(si_get_cu_active_bitmap(rdev, i, j)); in si_gpu_init()
3294 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) | in si_gpu_init()
3295 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) | in si_gpu_init()
3296 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) | in si_gpu_init()
3297 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size))); in si_gpu_init()
3338 static void si_scratch_init(struct radeon_device *rdev) in si_scratch_init() argument
3342 rdev->scratch.num_reg = 7; in si_scratch_init()
3343 rdev->scratch.reg_base = SCRATCH_REG0; in si_scratch_init()
3344 for (i = 0; i < rdev->scratch.num_reg; i++) { in si_scratch_init()
3345 rdev->scratch.free[i] = true; in si_scratch_init()
3346 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); in si_scratch_init()
3350 void si_fence_ring_emit(struct radeon_device *rdev, in si_fence_ring_emit() argument
3353 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit()
3354 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit()
3380 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in si_ring_ib_execute() argument
3382 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute()
3400 } else if (rdev->wb.enabled) { in si_ring_ib_execute()
3440 static void si_cp_enable(struct radeon_device *rdev, bool enable) in si_cp_enable() argument
3445 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_enable()
3446 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in si_cp_enable()
3449 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_enable()
3450 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_enable()
3451 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_enable()
3456 static int si_cp_load_microcode(struct radeon_device *rdev) in si_cp_load_microcode() argument
3460 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) in si_cp_load_microcode()
3463 si_cp_enable(rdev, false); in si_cp_load_microcode()
3465 if (rdev->new_fw) { in si_cp_load_microcode()
3467 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3469 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; in si_cp_load_microcode()
3471 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; in si_cp_load_microcode()
3481 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3490 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3499 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3509 fw_data = (const __be32 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3516 fw_data = (const __be32 *)rdev->ce_fw->data; in si_cp_load_microcode()
3523 fw_data = (const __be32 *)rdev->me_fw->data; in si_cp_load_microcode()
3537 static int si_cp_start(struct radeon_device *rdev) in si_cp_start() argument
3539 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_start()
3542 r = radeon_ring_lock(rdev, ring, 7 + 4); in si_cp_start()
3551 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1); in si_cp_start()
3561 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3563 si_cp_enable(rdev, true); in si_cp_start()
3565 r = radeon_ring_lock(rdev, ring, si_default_size + 10); in si_cp_start()
3590 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3593 ring = &rdev->ring[i]; in si_cp_start()
3594 r = radeon_ring_lock(rdev, ring, 2); in si_cp_start()
3604 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3610 static void si_cp_fini(struct radeon_device *rdev) in si_cp_fini() argument
3613 si_cp_enable(rdev, false); in si_cp_fini()
3615 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_fini()
3616 radeon_ring_fini(rdev, ring); in si_cp_fini()
3617 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3619 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_fini()
3620 radeon_ring_fini(rdev, ring); in si_cp_fini()
3621 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3623 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_fini()
3624 radeon_ring_fini(rdev, ring); in si_cp_fini()
3625 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3628 static int si_cp_resume(struct radeon_device *rdev) in si_cp_resume() argument
3635 si_enable_gui_idle_interrupt(rdev, false); in si_cp_resume()
3644 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in si_cp_resume()
3648 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_resume()
3662 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3663 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3665 if (rdev->wb.enabled) in si_cp_resume()
3679 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_resume()
3693 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3694 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3703 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_resume()
3717 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3718 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3726 si_cp_start(rdev); in si_cp_resume()
3727 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in si_cp_resume()
3728 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true; in si_cp_resume()
3729 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true; in si_cp_resume()
3730 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in si_cp_resume()
3732 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_resume()
3733 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3734 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3737 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); in si_cp_resume()
3739 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3741 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); in si_cp_resume()
3743 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3746 si_enable_gui_idle_interrupt(rdev, true); in si_cp_resume()
3748 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_resume()
3749 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in si_cp_resume()
3754 u32 si_gpu_check_soft_reset(struct radeon_device *rdev) in si_gpu_check_soft_reset() argument
3818 if (evergreen_is_display_hung(rdev)) in si_gpu_check_soft_reset()
3835 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) in si_gpu_soft_reset() argument
3844 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in si_gpu_soft_reset()
3846 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3847 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_gpu_soft_reset()
3849 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_gpu_soft_reset()
3853 si_fini_pg(rdev); in si_gpu_soft_reset()
3854 si_fini_cg(rdev); in si_gpu_soft_reset()
3857 si_rlc_stop(rdev); in si_gpu_soft_reset()
3877 evergreen_mc_stop(rdev, &save); in si_gpu_soft_reset()
3878 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_soft_reset()
3879 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_gpu_soft_reset()
3933 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3947 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3961 evergreen_mc_resume(rdev, &save); in si_gpu_soft_reset()
3964 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3967 static void si_set_clk_bypass_mode(struct radeon_device *rdev) in si_set_clk_bypass_mode() argument
3979 for (i = 0; i < rdev->usec_timeout; i++) { in si_set_clk_bypass_mode()
3994 static void si_spll_powerdown(struct radeon_device *rdev) in si_spll_powerdown() argument
4015 static void si_gpu_pci_config_reset(struct radeon_device *rdev) in si_gpu_pci_config_reset() argument
4020 dev_info(rdev->dev, "GPU pci config reset\n"); in si_gpu_pci_config_reset()
4025 si_fini_pg(rdev); in si_gpu_pci_config_reset()
4026 si_fini_cg(rdev); in si_gpu_pci_config_reset()
4041 si_rlc_stop(rdev); in si_gpu_pci_config_reset()
4046 evergreen_mc_stop(rdev, &save); in si_gpu_pci_config_reset()
4047 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_pci_config_reset()
4048 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); in si_gpu_pci_config_reset()
4052 si_set_clk_bypass_mode(rdev); in si_gpu_pci_config_reset()
4054 si_spll_powerdown(rdev); in si_gpu_pci_config_reset()
4056 pci_clear_master(rdev->pdev); in si_gpu_pci_config_reset()
4058 radeon_pci_config_reset(rdev); in si_gpu_pci_config_reset()
4060 for (i = 0; i < rdev->usec_timeout; i++) { in si_gpu_pci_config_reset()
4067 int si_asic_reset(struct radeon_device *rdev, bool hard) in si_asic_reset() argument
4072 si_gpu_pci_config_reset(rdev); in si_asic_reset()
4076 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4079 r600_set_bios_scratch_engine_hung(rdev, true); in si_asic_reset()
4082 si_gpu_soft_reset(rdev, reset_mask); in si_asic_reset()
4084 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4088 si_gpu_pci_config_reset(rdev); in si_asic_reset()
4090 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4093 r600_set_bios_scratch_engine_hung(rdev, false); in si_asic_reset()
4107 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in si_gfx_is_lockup() argument
4109 u32 reset_mask = si_gpu_check_soft_reset(rdev); in si_gfx_is_lockup()
4114 radeon_ring_lockup_update(rdev, ring); in si_gfx_is_lockup()
4117 return radeon_ring_test_lockup(rdev, ring); in si_gfx_is_lockup()
4121 static void si_mc_program(struct radeon_device *rdev) in si_mc_program() argument
4137 evergreen_mc_stop(rdev, &save); in si_mc_program()
4138 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4139 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4141 if (!ASIC_IS_NODCE(rdev)) in si_mc_program()
4146 rdev->mc.vram_start >> 12); in si_mc_program()
4148 rdev->mc.vram_end >> 12); in si_mc_program()
4150 rdev->vram_scratch.gpu_addr >> 12); in si_mc_program()
4151 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; in si_mc_program()
4152 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); in si_mc_program()
4155 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); in si_mc_program()
4161 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4162 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4164 evergreen_mc_resume(rdev, &save); in si_mc_program()
4165 if (!ASIC_IS_NODCE(rdev)) { in si_mc_program()
4168 rv515_vga_render_disable(rdev); in si_mc_program()
4172 void si_vram_gtt_location(struct radeon_device *rdev, in si_vram_gtt_location() argument
4177 dev_warn(rdev->dev, "limiting VRAM\n"); in si_vram_gtt_location()
4181 radeon_vram_location(rdev, &rdev->mc, 0); in si_vram_gtt_location()
4182 rdev->mc.gtt_base_align = 0; in si_vram_gtt_location()
4183 radeon_gtt_location(rdev, mc); in si_vram_gtt_location()
4186 static int si_mc_init(struct radeon_device *rdev) in si_mc_init() argument
4192 rdev->mc.vram_is_ddr = true; in si_mc_init()
4232 rdev->mc.vram_width = numchan * chansize; in si_mc_init()
4234 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); in si_mc_init()
4235 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); in si_mc_init()
4244 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL; in si_mc_init()
4245 rdev->mc.real_vram_size = rdev->mc.mc_vram_size; in si_mc_init()
4246 rdev->mc.visible_vram_size = rdev->mc.aper_size; in si_mc_init()
4247 si_vram_gtt_location(rdev, &rdev->mc); in si_mc_init()
4248 radeon_update_bandwidth_info(rdev); in si_mc_init()
4256 void si_pcie_gart_tlb_flush(struct radeon_device *rdev) in si_pcie_gart_tlb_flush() argument
4265 static int si_pcie_gart_enable(struct radeon_device *rdev) in si_pcie_gart_enable() argument
4269 if (rdev->gart.robj == NULL) { in si_pcie_gart_enable()
4270 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in si_pcie_gart_enable()
4273 r = radeon_gart_table_vram_pin(rdev); in si_pcie_gart_enable()
4296 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in si_pcie_gart_enable()
4297 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in si_pcie_gart_enable()
4298 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in si_pcie_gart_enable()
4300 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4312 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in si_pcie_gart_enable()
4320 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4323 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4328 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4345 si_pcie_gart_tlb_flush(rdev); in si_pcie_gart_enable()
4347 (unsigned)(rdev->mc.gtt_size >> 20), in si_pcie_gart_enable()
4348 (unsigned long long)rdev->gart.table_addr); in si_pcie_gart_enable()
4349 rdev->gart.ready = true; in si_pcie_gart_enable()
4353 static void si_pcie_gart_disable(struct radeon_device *rdev) in si_pcie_gart_disable() argument
4363 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in si_pcie_gart_disable()
4380 radeon_gart_table_vram_unpin(rdev); in si_pcie_gart_disable()
4383 static void si_pcie_gart_fini(struct radeon_device *rdev) in si_pcie_gart_fini() argument
4385 si_pcie_gart_disable(rdev); in si_pcie_gart_fini()
4386 radeon_gart_table_vram_free(rdev); in si_pcie_gart_fini()
4387 radeon_gart_fini(rdev); in si_pcie_gart_fini()
4437 static int si_vm_packet3_ce_check(struct radeon_device *rdev, in si_vm_packet3_ce_check() argument
4510 static int si_vm_packet3_gfx_check(struct radeon_device *rdev, in si_vm_packet3_gfx_check() argument
4628 static int si_vm_packet3_compute_check(struct radeon_device *rdev, in si_vm_packet3_compute_check() argument
4716 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) in si_ib_parse() argument
4729 dev_err(rdev->dev, "Packet0 not allowed!\n"); in si_ib_parse()
4738 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4742 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4746 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4749 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring); in si_ib_parse()
4757 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type); in si_ib_parse()
4778 int si_vm_init(struct radeon_device *rdev) in si_vm_init() argument
4781 rdev->vm_manager.nvm = 16; in si_vm_init()
4783 rdev->vm_manager.vram_base_offset = 0; in si_vm_init()
4788 void si_vm_fini(struct radeon_device *rdev) in si_vm_fini() argument
4801 static void si_vm_decode_fault(struct radeon_device *rdev, in si_vm_decode_fault() argument
4809 if (rdev->family == CHIP_TAHITI) { in si_vm_decode_fault()
5056 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in si_vm_flush() argument
5108 static void si_wait_for_rlc_serdes(struct radeon_device *rdev) in si_wait_for_rlc_serdes() argument
5112 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5118 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5125 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev, in si_enable_gui_idle_interrupt() argument
5143 for (i = 0; i < rdev->usec_timeout; i++) { in si_enable_gui_idle_interrupt()
5151 static void si_set_uvd_dcm(struct radeon_device *rdev, in si_set_uvd_dcm() argument
5172 void si_init_uvd_internal_cg(struct radeon_device *rdev) in si_init_uvd_internal_cg() argument
5177 si_set_uvd_dcm(rdev, false); in si_init_uvd_internal_cg()
5185 static u32 si_halt_rlc(struct radeon_device *rdev) in si_halt_rlc() argument
5195 si_wait_for_rlc_serdes(rdev); in si_halt_rlc()
5201 static void si_update_rlc(struct radeon_device *rdev, u32 rlc) in si_update_rlc() argument
5210 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable) in si_enable_dma_pg() argument
5215 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA)) in si_enable_dma_pg()
5223 static void si_init_dma_pg(struct radeon_device *rdev) in si_init_dma_pg() argument
5234 static void si_enable_gfx_cgpg(struct radeon_device *rdev, in si_enable_gfx_cgpg() argument
5239 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { in si_enable_gfx_cgpg()
5259 static void si_init_gfx_cgpg(struct radeon_device *rdev) in si_init_gfx_cgpg() argument
5263 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_gfx_cgpg()
5269 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_gfx_cgpg()
5279 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh) in si_get_cu_active_bitmap() argument
5284 si_select_se_sh(rdev, se, sh); in si_get_cu_active_bitmap()
5287 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_get_cu_active_bitmap()
5294 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) { in si_get_cu_active_bitmap()
5302 static void si_init_ao_cu_mask(struct radeon_device *rdev) in si_init_ao_cu_mask() argument
5308 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_init_ao_cu_mask()
5309 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_init_ao_cu_mask()
5313 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) { in si_init_ao_cu_mask()
5314 if (si_get_cu_active_bitmap(rdev, i, j) & mask) { in si_init_ao_cu_mask()
5335 static void si_enable_cgcg(struct radeon_device *rdev, in si_enable_cgcg() argument
5342 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { in si_enable_cgcg()
5343 si_enable_gui_idle_interrupt(rdev, true); in si_enable_cgcg()
5347 tmp = si_halt_rlc(rdev); in si_enable_cgcg()
5353 si_wait_for_rlc_serdes(rdev); in si_enable_cgcg()
5355 si_update_rlc(rdev, tmp); in si_enable_cgcg()
5361 si_enable_gui_idle_interrupt(rdev, false); in si_enable_cgcg()
5375 static void si_enable_mgcg(struct radeon_device *rdev, in si_enable_mgcg() argument
5380 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { in si_enable_mgcg()
5386 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { in si_enable_mgcg()
5398 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5404 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5421 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5427 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5431 static void si_enable_uvd_mgcg(struct radeon_device *rdev, in si_enable_uvd_mgcg() argument
5436 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { in si_enable_uvd_mgcg()
5476 static void si_enable_mc_ls(struct radeon_device *rdev, in si_enable_mc_ls() argument
5484 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) in si_enable_mc_ls()
5493 static void si_enable_mc_mgcg(struct radeon_device *rdev, in si_enable_mc_mgcg() argument
5501 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) in si_enable_mc_mgcg()
5510 static void si_enable_dma_mgcg(struct radeon_device *rdev, in si_enable_dma_mgcg() argument
5516 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { in si_enable_dma_mgcg()
5547 static void si_enable_bif_mgls(struct radeon_device *rdev, in si_enable_bif_mgls() argument
5554 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) in si_enable_bif_mgls()
5565 static void si_enable_hdp_mgcg(struct radeon_device *rdev, in si_enable_hdp_mgcg() argument
5572 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) in si_enable_hdp_mgcg()
5581 static void si_enable_hdp_ls(struct radeon_device *rdev, in si_enable_hdp_ls() argument
5588 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) in si_enable_hdp_ls()
5597 static void si_update_cg(struct radeon_device *rdev, in si_update_cg() argument
5601 si_enable_gui_idle_interrupt(rdev, false); in si_update_cg()
5604 si_enable_mgcg(rdev, true); in si_update_cg()
5605 si_enable_cgcg(rdev, true); in si_update_cg()
5607 si_enable_cgcg(rdev, false); in si_update_cg()
5608 si_enable_mgcg(rdev, false); in si_update_cg()
5610 si_enable_gui_idle_interrupt(rdev, true); in si_update_cg()
5614 si_enable_mc_mgcg(rdev, enable); in si_update_cg()
5615 si_enable_mc_ls(rdev, enable); in si_update_cg()
5619 si_enable_dma_mgcg(rdev, enable); in si_update_cg()
5623 si_enable_bif_mgls(rdev, enable); in si_update_cg()
5627 if (rdev->has_uvd) { in si_update_cg()
5628 si_enable_uvd_mgcg(rdev, enable); in si_update_cg()
5633 si_enable_hdp_mgcg(rdev, enable); in si_update_cg()
5634 si_enable_hdp_ls(rdev, enable); in si_update_cg()
5638 static void si_init_cg(struct radeon_device *rdev) in si_init_cg() argument
5640 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_init_cg()
5645 if (rdev->has_uvd) { in si_init_cg()
5646 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true); in si_init_cg()
5647 si_init_uvd_internal_cg(rdev); in si_init_cg()
5651 static void si_fini_cg(struct radeon_device *rdev) in si_fini_cg() argument
5653 if (rdev->has_uvd) { in si_fini_cg()
5654 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false); in si_fini_cg()
5656 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_fini_cg()
5663 u32 si_get_csb_size(struct radeon_device *rdev) in si_get_csb_size() argument
5669 if (rdev->rlc.cs_data == NULL) in si_get_csb_size()
5677 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_size()
5695 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer) in si_get_csb_buffer() argument
5701 if (rdev->rlc.cs_data == NULL) in si_get_csb_buffer()
5713 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_buffer()
5729 switch (rdev->family) { in si_get_csb_buffer()
5755 static void si_init_pg(struct radeon_device *rdev) in si_init_pg() argument
5757 if (rdev->pg_flags) { in si_init_pg()
5758 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) { in si_init_pg()
5759 si_init_dma_pg(rdev); in si_init_pg()
5761 si_init_ao_cu_mask(rdev); in si_init_pg()
5762 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { in si_init_pg()
5763 si_init_gfx_cgpg(rdev); in si_init_pg()
5765 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5766 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5768 si_enable_dma_pg(rdev, true); in si_init_pg()
5769 si_enable_gfx_cgpg(rdev, true); in si_init_pg()
5771 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5772 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5776 static void si_fini_pg(struct radeon_device *rdev) in si_fini_pg() argument
5778 if (rdev->pg_flags) { in si_fini_pg()
5779 si_enable_dma_pg(rdev, false); in si_fini_pg()
5780 si_enable_gfx_cgpg(rdev, false); in si_fini_pg()
5787 void si_rlc_reset(struct radeon_device *rdev) in si_rlc_reset() argument
5799 static void si_rlc_stop(struct radeon_device *rdev) in si_rlc_stop() argument
5803 si_enable_gui_idle_interrupt(rdev, false); in si_rlc_stop()
5805 si_wait_for_rlc_serdes(rdev); in si_rlc_stop()
5808 static void si_rlc_start(struct radeon_device *rdev) in si_rlc_start() argument
5812 si_enable_gui_idle_interrupt(rdev, true); in si_rlc_start()
5817 static bool si_lbpw_supported(struct radeon_device *rdev) in si_lbpw_supported() argument
5828 static void si_enable_lbpw(struct radeon_device *rdev, bool enable) in si_enable_lbpw() argument
5840 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_enable_lbpw()
5845 static int si_rlc_resume(struct radeon_device *rdev) in si_rlc_resume() argument
5849 if (!rdev->rlc_fw) in si_rlc_resume()
5852 si_rlc_stop(rdev); in si_rlc_resume()
5854 si_rlc_reset(rdev); in si_rlc_resume()
5856 si_init_pg(rdev); in si_rlc_resume()
5858 si_init_cg(rdev); in si_rlc_resume()
5870 if (rdev->new_fw) { in si_rlc_resume()
5872 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; in si_rlc_resume()
5875 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_rlc_resume()
5885 (const __be32 *)rdev->rlc_fw->data; in si_rlc_resume()
5893 si_enable_lbpw(rdev, si_lbpw_supported(rdev)); in si_rlc_resume()
5895 si_rlc_start(rdev); in si_rlc_resume()
5900 static void si_enable_interrupts(struct radeon_device *rdev) in si_enable_interrupts() argument
5909 rdev->ih.enabled = true; in si_enable_interrupts()
5912 static void si_disable_interrupts(struct radeon_device *rdev) in si_disable_interrupts() argument
5924 rdev->ih.enabled = false; in si_disable_interrupts()
5925 rdev->ih.rptr = 0; in si_disable_interrupts()
5928 static void si_disable_interrupt_state(struct radeon_device *rdev) in si_disable_interrupt_state() argument
5944 for (i = 0; i < rdev->num_crtc; i++) in si_disable_interrupt_state()
5946 for (i = 0; i < rdev->num_crtc; i++) in si_disable_interrupt_state()
5949 if (!ASIC_IS_NODCE(rdev)) { in si_disable_interrupt_state()
5958 static int si_irq_init(struct radeon_device *rdev) in si_irq_init() argument
5965 ret = r600_ih_ring_alloc(rdev); in si_irq_init()
5970 si_disable_interrupts(rdev); in si_irq_init()
5973 ret = si_rlc_resume(rdev); in si_irq_init()
5975 r600_ih_ring_fini(rdev); in si_irq_init()
5981 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8); in si_irq_init()
5991 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); in si_irq_init()
5992 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in si_irq_init()
5998 if (rdev->wb.enabled) in si_irq_init()
6002 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); in si_irq_init()
6003 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); in si_irq_init()
6014 if (rdev->msi_enabled) in si_irq_init()
6019 si_disable_interrupt_state(rdev); in si_irq_init()
6021 pci_set_master(rdev->pdev); in si_irq_init()
6024 si_enable_interrupts(rdev); in si_irq_init()
6030 int si_irq_set(struct radeon_device *rdev) in si_irq_set() argument
6039 if (!rdev->irq.installed) { in si_irq_set()
6044 if (!rdev->ih.enabled) { in si_irq_set()
6045 si_disable_interrupts(rdev); in si_irq_set()
6047 si_disable_interrupt_state(rdev); in si_irq_set()
6061 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in si_irq_set()
6065 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { in si_irq_set()
6069 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { in si_irq_set()
6073 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { in si_irq_set()
6078 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { in si_irq_set()
6092 if (rdev->irq.dpm_thermal) { in si_irq_set()
6097 for (i = 0; i < rdev->num_crtc; i++) { in si_irq_set()
6099 rdev, INT_MASK + crtc_offsets[i], VBLANK_INT_MASK, in si_irq_set()
6100 rdev->irq.crtc_vblank_int[i] || in si_irq_set()
6101 atomic_read(&rdev->irq.pflip[i]), "vblank", i); in si_irq_set()
6104 for (i = 0; i < rdev->num_crtc; i++) in si_irq_set()
6107 if (!ASIC_IS_NODCE(rdev)) { in si_irq_set()
6110 rdev, DC_HPDx_INT_CONTROL(i), in si_irq_set()
6112 rdev->irq.hpd[i], "HPD", i); in si_irq_set()
6125 static inline void si_irq_ack(struct radeon_device *rdev) in si_irq_ack() argument
6128 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int; in si_irq_ack()
6129 u32 *grph_int = rdev->irq.stat_regs.evergreen.grph_int; in si_irq_ack()
6131 if (ASIC_IS_NODCE(rdev)) in si_irq_ack()
6136 if (i < rdev->num_crtc) in si_irq_ack()
6141 for (i = 0; i < rdev->num_crtc; i += 2) { in si_irq_ack()
6169 static void si_irq_disable(struct radeon_device *rdev) in si_irq_disable() argument
6171 si_disable_interrupts(rdev); in si_irq_disable()
6174 si_irq_ack(rdev); in si_irq_disable()
6175 si_disable_interrupt_state(rdev); in si_irq_disable()
6178 static void si_irq_suspend(struct radeon_device *rdev) in si_irq_suspend() argument
6180 si_irq_disable(rdev); in si_irq_suspend()
6181 si_rlc_stop(rdev); in si_irq_suspend()
6184 static void si_irq_fini(struct radeon_device *rdev) in si_irq_fini() argument
6186 si_irq_suspend(rdev); in si_irq_fini()
6187 r600_ih_ring_fini(rdev); in si_irq_fini()
6190 static inline u32 si_get_ih_wptr(struct radeon_device *rdev) in si_get_ih_wptr() argument
6194 if (rdev->wb.enabled) in si_get_ih_wptr()
6195 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); in si_get_ih_wptr()
6205 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", in si_get_ih_wptr()
6206 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in si_get_ih_wptr()
6207 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in si_get_ih_wptr()
6212 return (wptr & rdev->ih.ptr_mask); in si_get_ih_wptr()
6225 int si_irq_process(struct radeon_device *rdev) in si_irq_process() argument
6227 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int; in si_irq_process()
6240 if (!rdev->ih.enabled || rdev->shutdown) in si_irq_process()
6243 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6247 if (atomic_xchg(&rdev->ih.lock, 1)) in si_irq_process()
6250 rptr = rdev->ih.rptr; in si_irq_process()
6257 si_irq_ack(rdev); in si_irq_process()
6262 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in si_irq_process()
6263 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in si_irq_process()
6264 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; in si_irq_process()
6279 if (rdev->irq.crtc_vblank_int[crtc_idx]) { in si_irq_process()
6280 drm_handle_vblank(rdev_to_drm(rdev), crtc_idx); in si_irq_process()
6281 rdev->pm.vblank_sync = true; in si_irq_process()
6282 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6284 if (atomic_read(&rdev->irq.pflip[crtc_idx])) { in si_irq_process()
6285 radeon_crtc_handle_vblank(rdev, in si_irq_process()
6315 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); in si_irq_process()
6348 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); in si_irq_process()
6358 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); in si_irq_process()
6359 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_irq_process()
6361 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_irq_process()
6363 si_vm_decode_fault(rdev, status, addr); in si_irq_process()
6366 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6369 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6372 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6378 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6381 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6384 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6390 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); in si_irq_process()
6394 rdev->pm.dpm.thermal.high_to_low = false; in si_irq_process()
6399 rdev->pm.dpm.thermal.high_to_low = true; in si_irq_process()
6407 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_irq_process()
6416 rptr &= rdev->ih.ptr_mask; in si_irq_process()
6420 schedule_work(&rdev->dp_work); in si_irq_process()
6422 schedule_delayed_work(&rdev->hotplug_work, 0); in si_irq_process()
6423 if (queue_thermal && rdev->pm.dpm_enabled) in si_irq_process()
6424 schedule_work(&rdev->pm.dpm.thermal.work); in si_irq_process()
6425 rdev->ih.rptr = rptr; in si_irq_process()
6426 atomic_set(&rdev->ih.lock, 0); in si_irq_process()
6429 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6439 static void si_uvd_init(struct radeon_device *rdev) in si_uvd_init() argument
6443 if (!rdev->has_uvd) in si_uvd_init()
6446 r = radeon_uvd_init(rdev); in si_uvd_init()
6448 dev_err(rdev->dev, "failed UVD (%d) init.\n", r); in si_uvd_init()
6455 rdev->has_uvd = false; in si_uvd_init()
6458 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; in si_uvd_init()
6459 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); in si_uvd_init()
6462 static void si_uvd_start(struct radeon_device *rdev) in si_uvd_start() argument
6466 if (!rdev->has_uvd) in si_uvd_start()
6469 r = uvd_v2_2_resume(rdev); in si_uvd_start()
6471 dev_err(rdev->dev, "failed UVD resume (%d).\n", r); in si_uvd_start()
6474 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX); in si_uvd_start()
6476 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); in si_uvd_start()
6482 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in si_uvd_start()
6485 static void si_uvd_resume(struct radeon_device *rdev) in si_uvd_resume() argument
6490 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in si_uvd_resume()
6493 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_uvd_resume()
6494 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in si_uvd_resume()
6496 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r); in si_uvd_resume()
6499 r = uvd_v1_0_init(rdev); in si_uvd_resume()
6501 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r); in si_uvd_resume()
6506 static void si_vce_init(struct radeon_device *rdev) in si_vce_init() argument
6510 if (!rdev->has_vce) in si_vce_init()
6513 r = radeon_vce_init(rdev); in si_vce_init()
6515 dev_err(rdev->dev, "failed VCE (%d) init.\n", r); in si_vce_init()
6522 rdev->has_vce = false; in si_vce_init()
6525 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL; in si_vce_init()
6526 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096); in si_vce_init()
6527 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL; in si_vce_init()
6528 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096); in si_vce_init()
6531 static void si_vce_start(struct radeon_device *rdev) in si_vce_start() argument
6535 if (!rdev->has_vce) in si_vce_start()
6538 r = radeon_vce_resume(rdev); in si_vce_start()
6540 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in si_vce_start()
6543 r = vce_v1_0_resume(rdev); in si_vce_start()
6545 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in si_vce_start()
6548 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX); in si_vce_start()
6550 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r); in si_vce_start()
6553 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX); in si_vce_start()
6555 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r); in si_vce_start()
6561 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in si_vce_start()
6562 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in si_vce_start()
6565 static void si_vce_resume(struct radeon_device *rdev) in si_vce_resume() argument
6570 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in si_vce_resume()
6573 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in si_vce_resume()
6574 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP); in si_vce_resume()
6576 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in si_vce_resume()
6579 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in si_vce_resume()
6580 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP); in si_vce_resume()
6582 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in si_vce_resume()
6585 r = vce_v1_0_init(rdev); in si_vce_resume()
6587 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r); in si_vce_resume()
6592 static int si_startup(struct radeon_device *rdev) in si_startup() argument
6598 si_pcie_gen3_enable(rdev); in si_startup()
6600 si_program_aspm(rdev); in si_startup()
6603 r = r600_vram_scratch_init(rdev); in si_startup()
6607 si_mc_program(rdev); in si_startup()
6609 if (!rdev->pm.dpm_enabled) { in si_startup()
6610 r = si_mc_load_microcode(rdev); in si_startup()
6617 r = si_pcie_gart_enable(rdev); in si_startup()
6620 si_gpu_init(rdev); in si_startup()
6623 if (rdev->family == CHIP_VERDE) { in si_startup()
6624 rdev->rlc.reg_list = verde_rlc_save_restore_register_list; in si_startup()
6625 rdev->rlc.reg_list_size = in si_startup()
6628 rdev->rlc.cs_data = si_cs_data; in si_startup()
6629 r = sumo_rlc_init(rdev); in si_startup()
6636 r = radeon_wb_init(rdev); in si_startup()
6640 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_startup()
6642 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6646 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_startup()
6648 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6652 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_startup()
6654 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6658 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); in si_startup()
6660 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6664 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_startup()
6666 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6670 si_uvd_start(rdev); in si_startup()
6671 si_vce_start(rdev); in si_startup()
6674 if (!rdev->irq.installed) { in si_startup()
6675 r = radeon_irq_kms_init(rdev); in si_startup()
6680 r = si_irq_init(rdev); in si_startup()
6683 radeon_irq_kms_fini(rdev); in si_startup()
6686 si_irq_set(rdev); in si_startup()
6688 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_startup()
6689 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in si_startup()
6694 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_startup()
6695 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, in si_startup()
6700 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_startup()
6701 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, in si_startup()
6706 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_startup()
6707 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in si_startup()
6712 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_startup()
6713 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in si_startup()
6718 r = si_cp_load_microcode(rdev); in si_startup()
6721 r = si_cp_resume(rdev); in si_startup()
6725 r = cayman_dma_resume(rdev); in si_startup()
6729 si_uvd_resume(rdev); in si_startup()
6730 si_vce_resume(rdev); in si_startup()
6732 r = radeon_ib_pool_init(rdev); in si_startup()
6734 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in si_startup()
6738 r = radeon_vm_manager_init(rdev); in si_startup()
6740 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in si_startup()
6744 r = radeon_audio_init(rdev); in si_startup()
6751 int si_resume(struct radeon_device *rdev) in si_resume() argument
6760 atom_asic_init(rdev->mode_info.atom_context); in si_resume()
6763 si_init_golden_registers(rdev); in si_resume()
6765 if (rdev->pm.pm_method == PM_METHOD_DPM) in si_resume()
6766 radeon_pm_resume(rdev); in si_resume()
6768 rdev->accel_working = true; in si_resume()
6769 r = si_startup(rdev); in si_resume()
6772 rdev->accel_working = false; in si_resume()
6780 int si_suspend(struct radeon_device *rdev) in si_suspend() argument
6782 radeon_pm_suspend(rdev); in si_suspend()
6783 radeon_audio_fini(rdev); in si_suspend()
6784 radeon_vm_manager_fini(rdev); in si_suspend()
6785 si_cp_enable(rdev, false); in si_suspend()
6786 cayman_dma_stop(rdev); in si_suspend()
6787 if (rdev->has_uvd) { in si_suspend()
6788 radeon_uvd_suspend(rdev); in si_suspend()
6789 uvd_v1_0_fini(rdev); in si_suspend()
6791 if (rdev->has_vce) in si_suspend()
6792 radeon_vce_suspend(rdev); in si_suspend()
6793 si_fini_pg(rdev); in si_suspend()
6794 si_fini_cg(rdev); in si_suspend()
6795 si_irq_suspend(rdev); in si_suspend()
6796 radeon_wb_disable(rdev); in si_suspend()
6797 si_pcie_gart_disable(rdev); in si_suspend()
6807 int si_init(struct radeon_device *rdev) in si_init() argument
6809 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
6813 if (!radeon_get_bios(rdev)) { in si_init()
6814 if (ASIC_IS_AVIVO(rdev)) in si_init()
6818 if (!rdev->is_atom_bios) { in si_init()
6819 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in si_init()
6822 r = radeon_atombios_init(rdev); in si_init()
6827 if (!radeon_card_posted(rdev)) { in si_init()
6828 if (!rdev->bios) { in si_init()
6829 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in si_init()
6833 atom_asic_init(rdev->mode_info.atom_context); in si_init()
6836 si_init_golden_registers(rdev); in si_init()
6838 si_scratch_init(rdev); in si_init()
6840 radeon_surface_init(rdev); in si_init()
6842 radeon_get_clock_info(rdev_to_drm(rdev)); in si_init()
6845 radeon_fence_driver_init(rdev); in si_init()
6848 r = si_mc_init(rdev); in si_init()
6852 r = radeon_bo_init(rdev); in si_init()
6856 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || in si_init()
6857 !rdev->rlc_fw || !rdev->mc_fw) { in si_init()
6858 r = si_init_microcode(rdev); in si_init()
6866 radeon_pm_init(rdev); in si_init()
6868 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
6870 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
6872 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_init()
6874 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
6876 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_init()
6878 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
6880 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_init()
6882 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
6884 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_init()
6886 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
6888 si_uvd_init(rdev); in si_init()
6889 si_vce_init(rdev); in si_init()
6891 rdev->ih.ring_obj = NULL; in si_init()
6892 r600_ih_ring_init(rdev, 64 * 1024); in si_init()
6894 r = r600_pcie_gart_init(rdev); in si_init()
6898 rdev->accel_working = true; in si_init()
6899 r = si_startup(rdev); in si_init()
6901 dev_err(rdev->dev, "disabling GPU acceleration\n"); in si_init()
6902 si_cp_fini(rdev); in si_init()
6903 cayman_dma_fini(rdev); in si_init()
6904 si_irq_fini(rdev); in si_init()
6905 sumo_rlc_fini(rdev); in si_init()
6906 radeon_wb_fini(rdev); in si_init()
6907 radeon_ib_pool_fini(rdev); in si_init()
6908 radeon_vm_manager_fini(rdev); in si_init()
6909 radeon_irq_kms_fini(rdev); in si_init()
6910 si_pcie_gart_fini(rdev); in si_init()
6911 rdev->accel_working = false; in si_init()
6918 if (!rdev->mc_fw) { in si_init()
6926 void si_fini(struct radeon_device *rdev) in si_fini() argument
6928 radeon_pm_fini(rdev); in si_fini()
6929 si_cp_fini(rdev); in si_fini()
6930 cayman_dma_fini(rdev); in si_fini()
6931 si_fini_pg(rdev); in si_fini()
6932 si_fini_cg(rdev); in si_fini()
6933 si_irq_fini(rdev); in si_fini()
6934 sumo_rlc_fini(rdev); in si_fini()
6935 radeon_wb_fini(rdev); in si_fini()
6936 radeon_vm_manager_fini(rdev); in si_fini()
6937 radeon_ib_pool_fini(rdev); in si_fini()
6938 radeon_irq_kms_fini(rdev); in si_fini()
6939 if (rdev->has_uvd) { in si_fini()
6940 uvd_v1_0_fini(rdev); in si_fini()
6941 radeon_uvd_fini(rdev); in si_fini()
6943 if (rdev->has_vce) in si_fini()
6944 radeon_vce_fini(rdev); in si_fini()
6945 si_pcie_gart_fini(rdev); in si_fini()
6946 r600_vram_scratch_fini(rdev); in si_fini()
6947 radeon_gem_fini(rdev); in si_fini()
6948 radeon_fence_driver_fini(rdev); in si_fini()
6949 radeon_bo_fini(rdev); in si_fini()
6950 radeon_atombios_fini(rdev); in si_fini()
6951 kfree(rdev->bios); in si_fini()
6952 rdev->bios = NULL; in si_fini()
6963 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev) in si_get_gpu_clock_counter() argument
6967 mutex_lock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
6971 mutex_unlock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
6975 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in si_set_uvd_clocks() argument
6993 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, in si_set_uvd_clocks()
7013 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7050 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7064 static void si_pcie_gen3_enable(struct radeon_device *rdev) in si_pcie_gen3_enable() argument
7066 struct pci_dev *root = rdev->pdev->bus->self; in si_pcie_gen3_enable()
7072 if (pci_is_root_bus(rdev->pdev->bus)) in si_pcie_gen3_enable()
7078 if (rdev->flags & RADEON_IS_IGP) in si_pcie_gen3_enable()
7081 if (!(rdev->flags & RADEON_IS_PCIE)) in si_pcie_gen3_enable()
7109 if (!pci_is_pcie(root) || !pci_is_pcie(rdev->pdev)) in si_pcie_gen3_enable()
7120 pcie_capability_set_word(rdev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD); in si_pcie_gen3_enable()
7138 pcie_capability_read_word(rdev->pdev, in si_pcie_gen3_enable()
7146 pcie_capability_read_word(rdev->pdev, in si_pcie_gen3_enable()
7152 pcie_capability_read_word(rdev->pdev, in si_pcie_gen3_enable()
7171 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL, in si_pcie_gen3_enable()
7183 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL2, in si_pcie_gen3_enable()
7209 pcie_capability_clear_and_set_word(rdev->pdev, PCI_EXP_LNKCTL2, in si_pcie_gen3_enable()
7216 for (i = 0; i < rdev->usec_timeout; i++) { in si_pcie_gen3_enable()
7224 static void si_program_aspm(struct radeon_device *rdev) in si_program_aspm() argument
7233 if (!(rdev->flags & RADEON_IS_PCIE)) in si_program_aspm()
7291 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) { in si_program_aspm()
7340 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7347 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7353 !pci_is_root_bus(rdev->pdev->bus)) { in si_program_aspm()
7354 struct pci_dev *root = rdev->pdev->bus->self; in si_program_aspm()
7429 static int si_vce_send_vcepll_ctlreq(struct radeon_device *rdev) in si_vce_send_vcepll_ctlreq() argument
7460 int si_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) in si_set_vce_clocks() argument
7481 r = radeon_uvd_calc_upll_dividers(rdev, evclk, ecclk, 125000, 250000, in si_set_vce_clocks()
7504 r = si_vce_send_vcepll_ctlreq(rdev); in si_set_vce_clocks()
7536 r = si_vce_send_vcepll_ctlreq(rdev); in si_set_vce_clocks()