Lines Matching refs:gpu

19 static inline bool _a6xx_check_idle(struct msm_gpu *gpu)  in _a6xx_check_idle()  argument
21 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_check_idle()
29 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS) & in _a6xx_check_idle()
33 return !(gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS) & in _a6xx_check_idle()
37 static bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_idle() argument
40 if (!adreno_idle(gpu, ring)) in a6xx_idle()
43 if (spin_until(_a6xx_check_idle(gpu))) { in a6xx_idle()
45 gpu->name, __builtin_return_address(0), in a6xx_idle()
46 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_idle()
47 gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS), in a6xx_idle()
48 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_idle()
49 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a6xx_idle()
56 static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_shadow_rptr() argument
58 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in update_shadow_rptr()
69 static void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_flush() argument
71 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_flush()
76 update_shadow_rptr(gpu, ring); in a6xx_flush()
89 gpu_write(gpu, REG_A6XX_CP_RB_WPTR, wptr); in a6xx_flush()
222 static void a6xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a6xx_submit() argument
225 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_submit()
278 update_shadow_rptr(gpu, ring); in a6xx_submit()
302 gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER)); in a6xx_submit()
304 a6xx_flush(gpu, ring); in a6xx_submit()
347 static void a7xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a7xx_submit() argument
350 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_submit()
370 if (gpu->nr_rings > 1) in a7xx_submit()
415 update_shadow_rptr(gpu, ring); in a7xx_submit()
484 if (gpu->nr_rings > 1) { in a7xx_submit()
503 gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER)); in a7xx_submit()
505 a6xx_flush(gpu, ring); in a7xx_submit()
508 a6xx_preempt_trigger(gpu); in a7xx_submit()
511 static void a6xx_set_hwcg(struct msm_gpu *gpu, bool state) in a6xx_set_hwcg() argument
513 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_hwcg()
544 gpu_write(gpu, REG_A7XX_RBBM_CLOCK_CNTL_GLOBAL, 1); in a6xx_set_hwcg()
545 gpu_write(gpu, REG_A7XX_RBBM_CGC_GLOBAL_LOAD_CMD, state ? 1 : 0); in a6xx_set_hwcg()
548 gpu_write(gpu, REG_A7XX_RBBM_CGC_P2S_TRIG_CMD, 1); in a6xx_set_hwcg()
550 if (gpu_poll_timeout(gpu, REG_A7XX_RBBM_CGC_P2S_STATUS, val, in a6xx_set_hwcg()
552 dev_err(&gpu->pdev->dev, "RBBM_CGC_P2S_STATUS TXDONE Poll failed\n"); in a6xx_set_hwcg()
556 gpu_write(gpu, REG_A7XX_RBBM_CLOCK_CNTL_GLOBAL, 0); in a6xx_set_hwcg()
562 val = gpu_read(gpu, REG_A6XX_RBBM_CLOCK_CNTL); in a6xx_set_hwcg()
573 gpu_write(gpu, reg->offset, state ? reg->value : 0); in a6xx_set_hwcg()
579 gpu_write(gpu, REG_A6XX_RBBM_CLOCK_CNTL, state ? clock_cntl_on : 0); in a6xx_set_hwcg()
582 static void a6xx_set_cp_protect(struct msm_gpu *gpu) in a6xx_set_cp_protect() argument
584 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_cp_protect()
593 gpu_write(gpu, REG_A6XX_CP_PROTECT_CNTL, in a6xx_set_cp_protect()
601 gpu_write(gpu, REG_A6XX_CP_PROTECT(i), protect->regs[i]); in a6xx_set_cp_protect()
604 gpu_write(gpu, REG_A6XX_CP_PROTECT(protect->count_max - 1), protect->regs[i]); in a6xx_set_cp_protect()
607 static int a6xx_calc_ubwc_config(struct adreno_gpu *gpu) in a6xx_calc_ubwc_config() argument
610 struct qcom_ubwc_cfg_data *cfg = &gpu->_ubwc_config; in a6xx_calc_ubwc_config()
623 if (adreno_is_a610(gpu)) { in a6xx_calc_ubwc_config()
628 if (adreno_is_a618(gpu)) in a6xx_calc_ubwc_config()
631 if (adreno_is_a619(gpu)) in a6xx_calc_ubwc_config()
635 if (adreno_is_a619_holi(gpu)) in a6xx_calc_ubwc_config()
638 if (adreno_is_a621(gpu)) in a6xx_calc_ubwc_config()
641 if (adreno_is_a623(gpu)) in a6xx_calc_ubwc_config()
644 if (adreno_is_a650(gpu) || in a6xx_calc_ubwc_config()
645 adreno_is_a660(gpu) || in a6xx_calc_ubwc_config()
646 adreno_is_a690(gpu) || in a6xx_calc_ubwc_config()
647 adreno_is_a730(gpu) || in a6xx_calc_ubwc_config()
648 adreno_is_a740_family(gpu)) { in a6xx_calc_ubwc_config()
653 if (adreno_is_a663(gpu)) { in a6xx_calc_ubwc_config()
658 if (adreno_is_7c3(gpu)) in a6xx_calc_ubwc_config()
661 if (adreno_is_a702(gpu)) in a6xx_calc_ubwc_config()
672 gpu->ubwc_config = &gpu->_ubwc_config; in a6xx_calc_ubwc_config()
677 static void a6xx_set_ubwc_config(struct msm_gpu *gpu) in a6xx_set_ubwc_config() argument
679 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_ubwc_config()
703 gpu_write(gpu, REG_A6XX_RB_NC_MODE_CNTL, in a6xx_set_ubwc_config()
710 gpu_write(gpu, REG_A6XX_TPL1_NC_MODE_CNTL, in a6xx_set_ubwc_config()
715 gpu_write(gpu, REG_A6XX_SP_NC_MODE_CNTL, in a6xx_set_ubwc_config()
722 gpu_write(gpu, REG_A7XX_GRAS_NC_MODE_CNTL, in a6xx_set_ubwc_config()
725 gpu_write(gpu, REG_A6XX_UCHE_MODE_CNTL, in a6xx_set_ubwc_config()
728 gpu_write(gpu, REG_A6XX_RBBM_NC_MODE_CNTL, in a6xx_set_ubwc_config()
732 static void a7xx_patch_pwrup_reglist(struct msm_gpu *gpu) in a7xx_patch_pwrup_reglist() argument
734 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_patch_pwrup_reglist()
754 *dest++ = gpu_read(gpu, reglist->regs[i]); in a7xx_patch_pwrup_reglist()
775 static int a7xx_preempt_start(struct msm_gpu *gpu) in a7xx_preempt_start() argument
777 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_preempt_start()
779 struct msm_ringbuffer *ring = gpu->rb[0]; in a7xx_preempt_start()
781 if (gpu->nr_rings <= 1) in a7xx_preempt_start()
798 a6xx_flush(gpu, ring); in a7xx_preempt_start()
800 return a6xx_idle(gpu, ring) ? 0 : -EINVAL; in a7xx_preempt_start()
803 static int a6xx_cp_init(struct msm_gpu *gpu) in a6xx_cp_init() argument
805 struct msm_ringbuffer *ring = gpu->rb[0]; in a6xx_cp_init()
828 a6xx_flush(gpu, ring); in a6xx_cp_init()
829 return a6xx_idle(gpu, ring) ? 0 : -EINVAL; in a6xx_cp_init()
832 static int a7xx_cp_init(struct msm_gpu *gpu) in a7xx_cp_init() argument
834 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a7xx_cp_init()
836 struct msm_ringbuffer *ring = gpu->rb[0]; in a7xx_cp_init()
879 a6xx_flush(gpu, ring); in a7xx_cp_init()
880 return a6xx_idle(gpu, ring) ? 0 : -EINVAL; in a7xx_cp_init()
891 struct msm_gpu *gpu = &adreno_gpu->base; in a6xx_ucode_check_version() local
931 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
940 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
946 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
954 static int a6xx_ucode_load(struct msm_gpu *gpu) in a6xx_ucode_load() argument
956 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_ucode_load()
960 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_load()
967 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_load()
975 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->vm); in a6xx_ucode_load()
989 a6xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a6xx_ucode_load()
990 sizeof(u32) * gpu->nr_rings, in a6xx_ucode_load()
992 gpu->vm, &a6xx_gpu->shadow_bo, in a6xx_ucode_load()
1001 a6xx_gpu->pwrup_reglist_ptr = msm_gem_kernel_new(gpu->dev, PAGE_SIZE, in a6xx_ucode_load()
1003 gpu->vm, &a6xx_gpu->pwrup_reglist_bo, in a6xx_ucode_load()
1014 static int a6xx_zap_shader_init(struct msm_gpu *gpu) in a6xx_zap_shader_init() argument
1022 ret = adreno_zap_shader_load(gpu, GPU_PAS_ID); in a6xx_zap_shader_init()
1064 static int hw_init(struct msm_gpu *gpu) in hw_init() argument
1066 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in hw_init()
1082 gpu_write(gpu, REG_A6XX_GBIF_HALT, 0); in hw_init()
1083 gpu_read(gpu, REG_A6XX_GBIF_HALT); in hw_init()
1085 gpu_write(gpu, REG_A6XX_RBBM_GPR0_CNTL, 0); in hw_init()
1086 gpu_read(gpu, REG_A6XX_RBBM_GPR0_CNTL); in hw_init()
1088 gpu_write(gpu, REG_A6XX_GBIF_HALT, 0); in hw_init()
1089 gpu_read(gpu, REG_A6XX_GBIF_HALT); in hw_init()
1091 gpu_write(gpu, REG_A6XX_RBBM_GBIF_HALT, 0); in hw_init()
1092 gpu_read(gpu, REG_A6XX_RBBM_GBIF_HALT); in hw_init()
1095 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_CNTL, 0); in hw_init()
1105 gpu_write64(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_BASE, 0x00000000); in hw_init()
1106 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_SIZE, 0x00000000); in hw_init()
1110 gpu_write(gpu, REG_A6XX_CP_ADDR_MODE_CNTL, 0x1); in hw_init()
1111 gpu_write(gpu, REG_A6XX_VSC_ADDR_MODE_CNTL, 0x1); in hw_init()
1112 gpu_write(gpu, REG_A6XX_GRAS_ADDR_MODE_CNTL, 0x1); in hw_init()
1113 gpu_write(gpu, REG_A6XX_RB_ADDR_MODE_CNTL, 0x1); in hw_init()
1114 gpu_write(gpu, REG_A6XX_PC_ADDR_MODE_CNTL, 0x1); in hw_init()
1115 gpu_write(gpu, REG_A6XX_HLSQ_ADDR_MODE_CNTL, 0x1); in hw_init()
1116 gpu_write(gpu, REG_A6XX_VFD_ADDR_MODE_CNTL, 0x1); in hw_init()
1117 gpu_write(gpu, REG_A6XX_VPC_ADDR_MODE_CNTL, 0x1); in hw_init()
1118 gpu_write(gpu, REG_A6XX_UCHE_ADDR_MODE_CNTL, 0x1); in hw_init()
1119 gpu_write(gpu, REG_A6XX_SP_ADDR_MODE_CNTL, 0x1); in hw_init()
1120 gpu_write(gpu, REG_A6XX_TPL1_ADDR_MODE_CNTL, 0x1); in hw_init()
1121 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_ADDR_MODE_CNTL, 0x1); in hw_init()
1125 a6xx_set_hwcg(gpu, true); in hw_init()
1132 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE0, 0x00071620); in hw_init()
1133 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE1, 0x00071620); in hw_init()
1134 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE2, 0x00071620); in hw_init()
1135 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620); in hw_init()
1136 gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, in hw_init()
1139 gpu_write(gpu, REG_A6XX_RBBM_VBIF_CLIENT_QOS_CNTL, 0x3); in hw_init()
1143 gpu_write(gpu, REG_A6XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000009); in hw_init()
1146 gpu_write(gpu, REG_A6XX_UCHE_GBIF_GX_CONFIG, 0x10240e0); in hw_init()
1149 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_GPU_BUSY_MASKED, 0xffffffff); in hw_init()
1153 gpu_write64(gpu, REG_A6XX_UCHE_TRAP_BASE, adreno_gpu->uche_trap_base); in hw_init()
1154 gpu_write64(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE, adreno_gpu->uche_trap_base); in hw_init()
1156 gpu_write64(gpu, REG_A6XX_UCHE_WRITE_RANGE_MAX, adreno_gpu->uche_trap_base + 0xfc0); in hw_init()
1157 gpu_write64(gpu, REG_A6XX_UCHE_TRAP_BASE, adreno_gpu->uche_trap_base); in hw_init()
1158 gpu_write64(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE, adreno_gpu->uche_trap_base); in hw_init()
1167 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MIN, gmem_range_min); in hw_init()
1169 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MAX, in hw_init()
1174 gpu_write(gpu, REG_A6XX_UCHE_CACHE_WAYS, BIT(23)); in hw_init()
1176 gpu_write(gpu, REG_A6XX_UCHE_FILTER_CNTL, 0x804); in hw_init()
1177 gpu_write(gpu, REG_A6XX_UCHE_CACHE_WAYS, 0x4); in hw_init()
1181 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x02000140); in hw_init()
1182 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c); in hw_init()
1184 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x00800060); in hw_init()
1185 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x40201b16); in hw_init()
1187 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x010000c0); in hw_init()
1188 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c); in hw_init()
1192 gpu_write(gpu, REG_A6XX_CP_LPAC_PROG_FIFO_SIZE, 0x00000020); in hw_init()
1196 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 48); in hw_init()
1197 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_DBG_ADDR, 47); in hw_init()
1199 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 64); in hw_init()
1200 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_DBG_ADDR, 63); in hw_init()
1202 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 128); in hw_init()
1207 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, in hw_init()
1211 gpu_write(gpu, REG_A6XX_CP_AHB_CNTL, 0x1); in hw_init()
1214 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_CNTL, 0x1); in hw_init()
1223 gpu_write(gpu, REG_A6XX_CP_PERFCTR_CP_SEL(0), PERF_CP_ALWAYS_COUNT); in hw_init()
1225 a6xx_set_ubwc_config(gpu); in hw_init()
1230 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0xcfffff); in hw_init()
1232 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x4fffff); in hw_init()
1234 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x3fffff); in hw_init()
1236 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x3ffff); in hw_init()
1238 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x1fffff); in hw_init()
1240 gpu_write(gpu, REG_A6XX_UCHE_CLIENT_PF, BIT(7) | 0x1); in hw_init()
1244 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_0, 0); in hw_init()
1245 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_1, in hw_init()
1247 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_2, in hw_init()
1249 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_3, in hw_init()
1251 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_4, in hw_init()
1263 a6xx_set_cp_protect(gpu); in hw_init()
1267 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, 0x00028801); in hw_init()
1269 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, 0x1); in hw_init()
1270 gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, 0x0); in hw_init()
1273 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, BIT(24)); in hw_init()
1277 gpu_write(gpu, REG_A6XX_UCHE_CMDQ_CONFIG, 0x90); in hw_init()
1280 gpu_write(gpu, REG_A6XX_UCHE_CMDQ_CONFIG, 0x66906); in hw_init()
1282 gpu_write(gpu, REG_A6XX_UCHE_CMDQ_CONFIG, in hw_init()
1290 if (gpu->hw_apriv) { in hw_init()
1292 gpu_write(gpu, REG_A6XX_CP_APRIV_CNTL, in hw_init()
1294 gpu_write(gpu, REG_A7XX_CP_BV_APRIV_CNTL, in hw_init()
1296 gpu_write(gpu, REG_A7XX_CP_LPAC_APRIV_CNTL, in hw_init()
1299 gpu_write(gpu, REG_A6XX_CP_APRIV_CNTL, in hw_init()
1305 gpu_rmw(gpu, REG_A6XX_RB_CMP_DBG_ECO_CNTL, BIT(19), BIT(19)); in hw_init()
1308 gpu_write(gpu, REG_A6XX_TPL1_DBG_ECO_CNTL1, 0xc0700); in hw_init()
1311 gpu_rmw(gpu, REG_A6XX_RB_CMP_DBG_ECO_CNTL, BIT(11), BIT(11)); in hw_init()
1315 gpu_write(gpu, REG_A6XX_RBBM_INT_0_MASK, in hw_init()
1318 ret = adreno_hw_init(gpu); in hw_init()
1322 gpu_write64(gpu, REG_A6XX_CP_SQE_INSTR_BASE, a6xx_gpu->sqe_iova); in hw_init()
1325 gpu_write64(gpu, REG_A6XX_CP_RB_BASE, gpu->rb[0]->iova); in hw_init()
1332 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, MSM_GPU_RB_CNTL_DEFAULT); in hw_init()
1334 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, in hw_init()
1339 gpu_write64(gpu, REG_A6XX_CP_RB_RPTR_ADDR, in hw_init()
1340 shadowptr(a6xx_gpu, gpu->rb[0])); in hw_init()
1341 for (unsigned int i = 0; i < gpu->nr_rings; i++) in hw_init()
1347 gpu_write64(gpu, REG_A7XX_CP_BV_RB_RPTR_ADDR, in hw_init()
1348 rbmemptr(gpu->rb[0], bv_rptr)); in hw_init()
1351 a6xx_preempt_hw_init(gpu); in hw_init()
1354 a6xx_gpu->cur_ring = gpu->rb[0]; in hw_init()
1356 for (i = 0; i < gpu->nr_rings; i++) in hw_init()
1357 gpu->rb[i]->cur_ctx_seqno = 0; in hw_init()
1360 gpu_write(gpu, REG_A6XX_CP_SQE_CNTL, 1); in hw_init()
1363 a7xx_patch_pwrup_reglist(gpu); in hw_init()
1367 ret = adreno_is_a7xx(adreno_gpu) ? a7xx_cp_init(gpu) : a6xx_cp_init(gpu); in hw_init()
1378 ret = a6xx_zap_shader_init(gpu); in hw_init()
1380 OUT_PKT7(gpu->rb[0], CP_SET_SECURE_MODE, 1); in hw_init()
1381 OUT_RING(gpu->rb[0], 0x00000000); in hw_init()
1383 a6xx_flush(gpu, gpu->rb[0]); in hw_init()
1384 if (!a6xx_idle(gpu, gpu->rb[0])) in hw_init()
1393 dev_warn_once(gpu->dev->dev, in hw_init()
1395 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TRUST_CNTL, 0x0); in hw_init()
1406 a7xx_preempt_start(gpu); in hw_init()
1422 static int a6xx_hw_init(struct msm_gpu *gpu) in a6xx_hw_init() argument
1424 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_hw_init()
1429 ret = hw_init(gpu); in a6xx_hw_init()
1435 static void a6xx_dump(struct msm_gpu *gpu) in a6xx_dump() argument
1437 DRM_DEV_INFO(&gpu->pdev->dev, "status: %08x\n", in a6xx_dump()
1438 gpu_read(gpu, REG_A6XX_RBBM_STATUS)); in a6xx_dump()
1439 adreno_dump(gpu); in a6xx_dump()
1442 static void a6xx_recover(struct msm_gpu *gpu) in a6xx_recover() argument
1444 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_recover()
1449 adreno_dump_info(gpu); in a6xx_recover()
1452 DRM_DEV_INFO(&gpu->pdev->dev, "CP_SCRATCH_REG%d: %u\n", i, in a6xx_recover()
1453 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(i))); in a6xx_recover()
1456 a6xx_dump(gpu); in a6xx_recover()
1465 gpu_write(gpu, REG_A6XX_CP_SQE_CNTL, 3); in a6xx_recover()
1467 pm_runtime_dont_use_autosuspend(&gpu->pdev->dev); in a6xx_recover()
1470 mutex_lock(&gpu->active_lock); in a6xx_recover()
1471 active_submits = gpu->active_submits; in a6xx_recover()
1477 gpu->active_submits = 0; in a6xx_recover()
1484 a6xx_gpu_sw_reset(gpu, true); in a6xx_recover()
1485 a6xx_gpu_sw_reset(gpu, false); in a6xx_recover()
1494 pm_runtime_put(&gpu->pdev->dev); in a6xx_recover()
1497 pm_runtime_put_sync(&gpu->pdev->dev); in a6xx_recover()
1500 DRM_DEV_ERROR(&gpu->pdev->dev, "cx gdsc didn't collapse\n"); in a6xx_recover()
1504 pm_runtime_use_autosuspend(&gpu->pdev->dev); in a6xx_recover()
1507 pm_runtime_get(&gpu->pdev->dev); in a6xx_recover()
1509 pm_runtime_get_sync(&gpu->pdev->dev); in a6xx_recover()
1511 gpu->active_submits = active_submits; in a6xx_recover()
1512 mutex_unlock(&gpu->active_lock); in a6xx_recover()
1514 msm_gpu_hw_init(gpu); in a6xx_recover()
1518 static const char *a6xx_uche_fault_block(struct msm_gpu *gpu, u32 mid) in a6xx_uche_fault_block() argument
1520 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_uche_fault_block()
1538 val = gpu_read(gpu, REG_A6XX_UCHE_CLIENT_PF); in a6xx_uche_fault_block()
1615 static const char *a6xx_fault_block(struct msm_gpu *gpu, u32 id) in a6xx_fault_block() argument
1617 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_fault_block()
1630 return a6xx_uche_fault_block(gpu, id); in a6xx_fault_block()
1635 struct msm_gpu *gpu = arg; in a6xx_fault_handler() local
1640 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(4)), in a6xx_fault_handler()
1641 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(5)), in a6xx_fault_handler()
1642 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(6)), in a6xx_fault_handler()
1643 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(7)), in a6xx_fault_handler()
1647 block = a6xx_fault_block(gpu, info->fsynr1 & 0xff); in a6xx_fault_handler()
1649 return adreno_fault_handler(gpu, iova, flags, info, block, scratch); in a6xx_fault_handler()
1652 static void a6xx_cp_hw_err_irq(struct msm_gpu *gpu) in a6xx_cp_hw_err_irq() argument
1654 u32 status = gpu_read(gpu, REG_A6XX_CP_INTERRUPT_STATUS); in a6xx_cp_hw_err_irq()
1659 gpu_write(gpu, REG_A6XX_CP_SQE_STAT_ADDR, 1); in a6xx_cp_hw_err_irq()
1660 val = gpu_read(gpu, REG_A6XX_CP_SQE_STAT_DATA); in a6xx_cp_hw_err_irq()
1661 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1667 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1671 dev_err_ratelimited(&gpu->pdev->dev, "CP | HW fault | status=0x%8.8X\n", in a6xx_cp_hw_err_irq()
1672 gpu_read(gpu, REG_A6XX_CP_HW_FAULT)); in a6xx_cp_hw_err_irq()
1675 u32 val = gpu_read(gpu, REG_A6XX_CP_PROTECT_STATUS); in a6xx_cp_hw_err_irq()
1677 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1683 if (status & A6XX_CP_INT_CP_AHB_ERROR && !adreno_is_a7xx(to_adreno_gpu(gpu))) in a6xx_cp_hw_err_irq()
1684 dev_err_ratelimited(&gpu->pdev->dev, "CP AHB error interrupt\n"); in a6xx_cp_hw_err_irq()
1687 dev_err_ratelimited(&gpu->pdev->dev, "CP VSD decoder parity error\n"); in a6xx_cp_hw_err_irq()
1690 dev_err_ratelimited(&gpu->pdev->dev, "CP illegal instruction error\n"); in a6xx_cp_hw_err_irq()
1694 static void a6xx_fault_detect_irq(struct msm_gpu *gpu) in a6xx_fault_detect_irq() argument
1696 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_fault_detect_irq()
1698 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); in a6xx_fault_detect_irq()
1706 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS3) & A6XX_RBBM_STATUS3_SMMU_STALLED_ON_FAULT) in a6xx_fault_detect_irq()
1716 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_fault_detect_irq()
1719 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_fault_detect_irq()
1720 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_fault_detect_irq()
1721 gpu_read(gpu, REG_A6XX_CP_RB_WPTR), in a6xx_fault_detect_irq()
1722 gpu_read64(gpu, REG_A6XX_CP_IB1_BASE), in a6xx_fault_detect_irq()
1723 gpu_read(gpu, REG_A6XX_CP_IB1_REM_SIZE), in a6xx_fault_detect_irq()
1724 gpu_read64(gpu, REG_A6XX_CP_IB2_BASE), in a6xx_fault_detect_irq()
1725 gpu_read(gpu, REG_A6XX_CP_IB2_REM_SIZE)); in a6xx_fault_detect_irq()
1728 timer_delete(&gpu->hangcheck_timer); in a6xx_fault_detect_irq()
1730 kthread_queue_work(gpu->worker, &gpu->recover_work); in a6xx_fault_detect_irq()
1733 static void a7xx_sw_fuse_violation_irq(struct msm_gpu *gpu) in a7xx_sw_fuse_violation_irq() argument
1737 status = gpu_read(gpu, REG_A7XX_RBBM_SW_FUSE_INT_STATUS); in a7xx_sw_fuse_violation_irq()
1738 gpu_write(gpu, REG_A7XX_RBBM_SW_FUSE_INT_MASK, 0); in a7xx_sw_fuse_violation_irq()
1740 dev_err_ratelimited(&gpu->pdev->dev, "SW fuse violation status=%8.8x\n", status); in a7xx_sw_fuse_violation_irq()
1748 timer_delete(&gpu->hangcheck_timer); in a7xx_sw_fuse_violation_irq()
1750 kthread_queue_work(gpu->worker, &gpu->recover_work); in a7xx_sw_fuse_violation_irq()
1754 static irqreturn_t a6xx_irq(struct msm_gpu *gpu) in a6xx_irq() argument
1756 struct msm_drm_private *priv = gpu->dev->dev_private; in a6xx_irq()
1757 u32 status = gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS); in a6xx_irq()
1759 gpu_write(gpu, REG_A6XX_RBBM_INT_CLEAR_CMD, status); in a6xx_irq()
1765 a6xx_fault_detect_irq(gpu); in a6xx_irq()
1768 dev_err_ratelimited(&gpu->pdev->dev, "CP | AHB bus error\n"); in a6xx_irq()
1771 a6xx_cp_hw_err_irq(gpu); in a6xx_irq()
1774 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB ASYNC overflow\n"); in a6xx_irq()
1777 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB bus overflow\n"); in a6xx_irq()
1780 dev_err_ratelimited(&gpu->pdev->dev, "UCHE | Out of bounds access\n"); in a6xx_irq()
1783 a7xx_sw_fuse_violation_irq(gpu); in a6xx_irq()
1786 msm_gpu_retire(gpu); in a6xx_irq()
1787 a6xx_preempt_trigger(gpu); in a6xx_irq()
1791 a6xx_preempt_irq(gpu); in a6xx_irq()
1805 struct msm_gpu *gpu = &adreno_gpu->base; in a6xx_llc_activate() local
1822 gpu_rmw(gpu, REG_A6XX_GBIF_SCACHE_CNTL0, (0x1f << 10) | in a6xx_llc_activate()
1859 gpu_rmw(gpu, REG_A6XX_GBIF_SCACHE_CNTL1, GENMASK(24, 0), cntl1_regval); in a6xx_llc_activate()
1865 struct msm_gpu *gpu = &adreno_gpu->base; in a7xx_llc_activate() local
1875 gpu_write(gpu, REG_A6XX_GBIF_SCACHE_CNTL1, in a7xx_llc_activate()
1883 gpu_write(gpu, REG_A6XX_GBIF_SCACHE_CNTL0, in a7xx_llc_activate()
1934 struct msm_gpu *gpu = &adreno_gpu->base; in a7xx_cx_mem_init() local
1948 dev_warn_once(gpu->dev->dev, in a7xx_cx_mem_init()
1989 struct msm_gpu *gpu = &adreno_gpu->base; in a6xx_bus_clear_pending_transactions() local
1992 gpu_write(gpu, REG_A6XX_RBBM_GPR0_CNTL, GPR0_GBIF_HALT_REQUEST); in a6xx_bus_clear_pending_transactions()
1993 spin_until((gpu_read(gpu, REG_A6XX_RBBM_VBIF_GX_RESET_STATUS) & in a6xx_bus_clear_pending_transactions()
1996 gpu_write(gpu, REG_A6XX_VBIF_XIN_HALT_CTRL0, VBIF_XIN_HALT_CTRL0_MASK); in a6xx_bus_clear_pending_transactions()
1997 spin_until((gpu_read(gpu, REG_A6XX_VBIF_XIN_HALT_CTRL1) & in a6xx_bus_clear_pending_transactions()
1999 gpu_write(gpu, REG_A6XX_VBIF_XIN_HALT_CTRL0, 0); in a6xx_bus_clear_pending_transactions()
2006 gpu_write(gpu, REG_A6XX_RBBM_GBIF_HALT, 1); in a6xx_bus_clear_pending_transactions()
2007 spin_until(gpu_read(gpu, REG_A6XX_RBBM_GBIF_HALT_ACK) & 1); in a6xx_bus_clear_pending_transactions()
2011 gpu_write(gpu, REG_A6XX_GBIF_HALT, GBIF_CLIENT_HALT_MASK); in a6xx_bus_clear_pending_transactions()
2012 spin_until((gpu_read(gpu, REG_A6XX_GBIF_HALT_ACK) & in a6xx_bus_clear_pending_transactions()
2016 gpu_write(gpu, REG_A6XX_GBIF_HALT, GBIF_ARB_HALT_MASK); in a6xx_bus_clear_pending_transactions()
2017 spin_until((gpu_read(gpu, REG_A6XX_GBIF_HALT_ACK) & in a6xx_bus_clear_pending_transactions()
2021 gpu_write(gpu, REG_A6XX_GBIF_HALT, 0x0); in a6xx_bus_clear_pending_transactions()
2024 void a6xx_gpu_sw_reset(struct msm_gpu *gpu, bool assert) in a6xx_gpu_sw_reset() argument
2027 if (adreno_is_a610(to_adreno_gpu(gpu))) in a6xx_gpu_sw_reset()
2030 gpu_write(gpu, REG_A6XX_RBBM_SW_RESET_CMD, assert); in a6xx_gpu_sw_reset()
2032 gpu_read(gpu, REG_A6XX_RBBM_SW_RESET_CMD); in a6xx_gpu_sw_reset()
2040 static int a6xx_gmu_pm_resume(struct msm_gpu *gpu) in a6xx_gmu_pm_resume() argument
2042 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gmu_pm_resume()
2046 gpu->needs_hw_init = true; in a6xx_gmu_pm_resume()
2056 msm_devfreq_resume(gpu); in a6xx_gmu_pm_resume()
2063 static int a6xx_pm_resume(struct msm_gpu *gpu) in a6xx_pm_resume() argument
2065 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_resume()
2068 unsigned long freq = gpu->fast_rate; in a6xx_pm_resume()
2072 gpu->needs_hw_init = true; in a6xx_pm_resume()
2078 opp = dev_pm_opp_find_freq_ceil(&gpu->pdev->dev, &freq); in a6xx_pm_resume()
2086 dev_pm_opp_set_opp(&gpu->pdev->dev, opp); in a6xx_pm_resume()
2091 ret = clk_bulk_prepare_enable(gpu->nr_clocks, gpu->grp_clks); in a6xx_pm_resume()
2103 dev_pm_opp_set_opp(&gpu->pdev->dev, NULL); in a6xx_pm_resume()
2109 msm_devfreq_resume(gpu); in a6xx_pm_resume()
2114 static int a6xx_gmu_pm_suspend(struct msm_gpu *gpu) in a6xx_gmu_pm_suspend() argument
2116 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gmu_pm_suspend()
2124 msm_devfreq_suspend(gpu); in a6xx_gmu_pm_suspend()
2133 for (i = 0; i < gpu->nr_rings; i++) in a6xx_gmu_pm_suspend()
2136 gpu->suspend_count++; in a6xx_gmu_pm_suspend()
2141 static int a6xx_pm_suspend(struct msm_gpu *gpu) in a6xx_pm_suspend() argument
2143 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_suspend()
2150 msm_devfreq_suspend(gpu); in a6xx_pm_suspend()
2160 clk_bulk_disable_unprepare(gpu->nr_clocks, gpu->grp_clks); in a6xx_pm_suspend()
2163 dev_pm_opp_set_opp(&gpu->pdev->dev, NULL); in a6xx_pm_suspend()
2169 for (i = 0; i < gpu->nr_rings; i++) in a6xx_pm_suspend()
2172 gpu->suspend_count++; in a6xx_pm_suspend()
2177 static int a6xx_gmu_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a6xx_gmu_get_timestamp() argument
2179 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gmu_get_timestamp()
2187 *value = gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER); in a6xx_gmu_get_timestamp()
2196 static int a6xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a6xx_get_timestamp() argument
2198 *value = gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER); in a6xx_get_timestamp()
2202 static struct msm_ringbuffer *a6xx_active_ring(struct msm_gpu *gpu) in a6xx_active_ring() argument
2204 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_active_ring()
2210 static void a6xx_destroy(struct msm_gpu *gpu) in a6xx_destroy() argument
2212 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_destroy()
2216 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->vm); in a6xx_destroy()
2221 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->vm); in a6xx_destroy()
2234 static u64 a6xx_gpu_busy(struct msm_gpu *gpu, unsigned long *out_sample_rate) in a6xx_gpu_busy() argument
2236 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_busy()
2250 static void a6xx_gpu_set_freq(struct msm_gpu *gpu, struct dev_pm_opp *opp, in a6xx_gpu_set_freq() argument
2253 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_set_freq()
2257 a6xx_gmu_set_freq(gpu, opp, suspended); in a6xx_gpu_set_freq()
2262 a6xx_create_vm(struct msm_gpu *gpu, struct platform_device *pdev) in a6xx_create_vm() argument
2264 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_create_vm()
2276 return adreno_iommu_create_vm(gpu, pdev, quirks); in a6xx_create_vm()
2280 a6xx_create_private_vm(struct msm_gpu *gpu, bool kernel_managed) in a6xx_create_private_vm() argument
2284 mmu = msm_iommu_pagetable_create(to_msm_vm(gpu->vm)->mmu, kernel_managed); in a6xx_create_private_vm()
2289 return msm_gem_vm_create(gpu->dev, mmu, "gpu", ADRENO_VM_START, in a6xx_create_private_vm()
2290 adreno_private_vm_size(gpu), kernel_managed); in a6xx_create_private_vm()
2293 static uint32_t a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_get_rptr() argument
2295 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_rptr()
2301 return ring->memptrs->rptr = gpu_read(gpu, REG_A6XX_CP_RB_RPTR); in a6xx_get_rptr()
2304 static bool a6xx_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_progress() argument
2307 .ib1_base = gpu_read64(gpu, REG_A6XX_CP_IB1_BASE), in a6xx_progress()
2308 .ib2_base = gpu_read64(gpu, REG_A6XX_CP_IB2_BASE), in a6xx_progress()
2309 .ib1_rem = gpu_read(gpu, REG_A6XX_CP_IB1_REM_SIZE), in a6xx_progress()
2310 .ib2_rem = gpu_read(gpu, REG_A6XX_CP_IB2_REM_SIZE), in a6xx_progress()
2327 cp_state.ib1_rem += gpu_read(gpu, REG_A6XX_CP_ROQ_AVAIL_IB1) >> 16; in a6xx_progress()
2328 cp_state.ib2_rem += gpu_read(gpu, REG_A6XX_CP_ROQ_AVAIL_IB2) >> 16; in a6xx_progress()
2483 struct msm_gpu *gpu; in a6xx_gpu_init() local
2493 gpu = &adreno_gpu->base; in a6xx_gpu_init()
2564 msm_mmu_set_fault_handler(to_msm_vm(gpu->vm)->mmu, gpu, in a6xx_gpu_init()
2574 a6xx_preempt_init(gpu); in a6xx_gpu_init()
2576 return gpu; in a6xx_gpu_init()