Lines Matching refs:idx
839 r = radeon_cs_packet_parse(p, &wait_reg_mem, p->idx); in r600_cs_common_vline_parse()
850 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1); in r600_cs_common_vline_parse()
866 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != vline_status[0]) { in r600_cs_common_vline_parse()
871 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != RADEON_VLINE_STAT) { in r600_cs_common_vline_parse()
877 r = radeon_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2); in r600_cs_common_vline_parse()
881 h_idx = p->idx - 2; in r600_cs_common_vline_parse()
882 p->idx += wait_reg_mem.count + 2; in r600_cs_common_vline_parse()
883 p->idx += p3reloc.count + 2; in r600_cs_common_vline_parse()
920 unsigned idx, unsigned reg) in r600_packet0_check() argument
929 idx, reg); in r600_packet0_check()
934 pr_err("Forbidden register 0x%04X in cs at %d\n", reg, idx); in r600_packet0_check()
944 unsigned idx; in r600_cs_parse_packet0() local
947 idx = pkt->idx + 1; in r600_cs_parse_packet0()
949 for (i = 0; i <= pkt->count; i++, idx++, reg += 4) { in r600_cs_parse_packet0()
950 r = r600_packet0_check(p, pkt, idx, reg); in r600_cs_parse_packet0()
968 static int r600_cs_check_reg(struct radeon_cs_parser *p, u32 reg, u32 idx) in r600_cs_check_reg() argument
977 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); in r600_cs_check_reg()
1022 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1025 track->sq_config = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1028 track->db_depth_control = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1040 track->db_depth_info = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1041 ib[idx] &= C_028010_ARRAY_MODE; in r600_cs_check_reg()
1044 ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_2D_TILED_THIN1); in r600_cs_check_reg()
1047 ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_1D_TILED_THIN1); in r600_cs_check_reg()
1051 track->db_depth_info = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1056 track->db_depth_view = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1060 track->db_depth_size = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1061 track->db_depth_size_idx = idx; in r600_cs_check_reg()
1065 track->vgt_strmout_en = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1069 track->vgt_strmout_buffer_en = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1083 track->vgt_strmout_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8; in r600_cs_check_reg()
1084 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1095 track->vgt_strmout_size[tmp] = radeon_get_ib_value(p, idx) * 4; in r600_cs_check_reg()
1105 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1108 track->cb_target_mask = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1112 track->cb_shader_mask = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1115 tmp = G_028C04_MSAA_NUM_SAMPLES(radeon_get_ib_value(p, idx)); in r600_cs_check_reg()
1121 tmp = G_028808_SPECIAL_OP(radeon_get_ib_value(p, idx)); in r600_cs_check_reg()
1141 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1143 ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_2D_TILED_THIN1); in r600_cs_check_reg()
1146 ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_1D_TILED_THIN1); in r600_cs_check_reg()
1151 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1164 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1176 track->cb_color_size[tmp] = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1177 track->cb_color_size_idx[tmp] = idx; in r600_cs_check_reg()
1205 ib[idx] = track->cb_color_base_last[tmp]; in r600_cs_check_reg()
1213 track->cb_color_frag_offset[tmp] = (u64)ib[idx] << 8; in r600_cs_check_reg()
1214 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1236 ib[idx] = track->cb_color_base_last[tmp]; in r600_cs_check_reg()
1244 track->cb_color_tile_offset[tmp] = (u64)ib[idx] << 8; in r600_cs_check_reg()
1245 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1260 track->cb_color_mask[tmp] = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1280 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8; in r600_cs_check_reg()
1281 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1282 track->cb_color_base_last[tmp] = ib[idx]; in r600_cs_check_reg()
1294 track->db_offset = radeon_get_ib_value(p, idx) << 8; in r600_cs_check_reg()
1295 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1307 track->htile_offset = radeon_get_ib_value(p, idx) << 8; in r600_cs_check_reg()
1308 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1313 track->htile_surface = radeon_get_ib_value(p, idx); in r600_cs_check_reg()
1315 ib[idx] |= 3; in r600_cs_check_reg()
1377 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1386 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1389 track->sx_misc_kill_all_prims = (radeon_get_ib_value(p, idx) & 0x1) != 0; in r600_cs_check_reg()
1392 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); in r600_cs_check_reg()
1471 static int r600_check_texture_resource(struct radeon_cs_parser *p, u32 idx, in r600_check_texture_resource() argument
1496 word0 = radeon_get_ib_value(p, idx + 0); in r600_check_texture_resource()
1503 word1 = radeon_get_ib_value(p, idx + 1); in r600_check_texture_resource()
1504 word2 = radeon_get_ib_value(p, idx + 2) << 8; in r600_check_texture_resource()
1505 word3 = radeon_get_ib_value(p, idx + 3) << 8; in r600_check_texture_resource()
1506 word4 = radeon_get_ib_value(p, idx + 4); in r600_check_texture_resource()
1507 word5 = radeon_get_ib_value(p, idx + 5); in r600_check_texture_resource()
1612 static bool r600_is_safe_reg(struct radeon_cs_parser *p, u32 reg, u32 idx) in r600_is_safe_reg() argument
1618 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); in r600_is_safe_reg()
1624 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); in r600_is_safe_reg()
1634 unsigned idx; in r600_packet3_check() local
1642 idx = pkt->idx + 1; in r600_packet3_check()
1643 idx_value = radeon_get_ib_value(p, idx); in r600_packet3_check()
1657 tmp = radeon_get_ib_value(p, idx + 1); in r600_packet3_check()
1679 ib[idx + 0] = offset; in r600_packet3_check()
1680 ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff); in r600_packet3_check()
1718 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); in r600_packet3_check()
1720 ib[idx+0] = offset; in r600_packet3_check()
1721 ib[idx+1] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
1737 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx); in r600_packet3_check()
1769 (radeon_get_ib_value(p, idx+1) & 0xfffffff0) + in r600_packet3_check()
1770 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); in r600_packet3_check()
1772 ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffff0); in r600_packet3_check()
1773 ib[idx+2] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
1787 command = radeon_get_ib_value(p, idx+4); in r600_packet3_check()
1805 tmp = radeon_get_ib_value(p, idx) + in r600_packet3_check()
1806 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); in r600_packet3_check()
1816 ib[idx] = offset; in r600_packet3_check()
1817 ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff); in r600_packet3_check()
1835 tmp = radeon_get_ib_value(p, idx+2) + in r600_packet3_check()
1836 ((u64)(radeon_get_ib_value(p, idx+3) & 0xff) << 32); in r600_packet3_check()
1846 ib[idx+2] = offset; in r600_packet3_check()
1847 ib[idx+3] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
1857 if (radeon_get_ib_value(p, idx + 1) != 0xffffffff || in r600_packet3_check()
1858 radeon_get_ib_value(p, idx + 2) != 0) { in r600_packet3_check()
1864 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1881 (radeon_get_ib_value(p, idx+1) & 0xfffffff8) + in r600_packet3_check()
1882 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); in r600_packet3_check()
1884 ib[idx+1] = offset & 0xfffffff8; in r600_packet3_check()
1885 ib[idx+2] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
1903 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) + in r600_packet3_check()
1904 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); in r600_packet3_check()
1906 ib[idx+1] = offset & 0xfffffffc; in r600_packet3_check()
1907 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff); in r600_packet3_check()
1921 r = r600_cs_check_reg(p, reg, idx+1+i); in r600_packet3_check()
1937 r = r600_cs_check_reg(p, reg, idx+1+i); in r600_packet3_check()
1959 switch (G__SQ_VTX_CONSTANT_TYPE(radeon_get_ib_value(p, idx+(i*7)+6+1))) { in r600_packet3_check()
1970 ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_2D_TILED_THIN1); in r600_packet3_check()
1972 ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_1D_TILED_THIN1); in r600_packet3_check()
1983 r = r600_check_texture_resource(p, idx+(i*7)+1, in r600_packet3_check()
1985 base_offset + radeon_get_ib_value(p, idx+1+(i*7)+2), in r600_packet3_check()
1986 mip_offset + radeon_get_ib_value(p, idx+1+(i*7)+3), in r600_packet3_check()
1990 ib[idx+1+(i*7)+2] += base_offset; in r600_packet3_check()
1991 ib[idx+1+(i*7)+3] += mip_offset; in r600_packet3_check()
2002 offset = radeon_get_ib_value(p, idx+1+(i*7)+0); in r600_packet3_check()
2003 size = radeon_get_ib_value(p, idx+1+(i*7)+1) + 1; in r600_packet3_check()
2008 ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj) - offset; in r600_packet3_check()
2012 ib[idx+1+(i*8)+0] = offset64; in r600_packet3_check()
2013 ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) | in r600_packet3_check()
2109 offset = radeon_get_ib_value(p, idx+1) << 8; in r600_packet3_check()
2121 ib[idx+1] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
2147 offset = radeon_get_ib_value(p, idx+1); in r600_packet3_check()
2148 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; in r600_packet3_check()
2155 ib[idx+1] = offset; in r600_packet3_check()
2156 ib[idx+2] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
2166 offset = radeon_get_ib_value(p, idx+3); in r600_packet3_check()
2167 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; in r600_packet3_check()
2174 ib[idx+3] = offset; in r600_packet3_check()
2175 ib[idx+4] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
2191 offset = radeon_get_ib_value(p, idx+0); in r600_packet3_check()
2192 offset += ((u64)(radeon_get_ib_value(p, idx+1) & 0xff)) << 32UL; in r600_packet3_check()
2203 ib[idx+0] = offset; in r600_packet3_check()
2204 ib[idx+1] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
2220 offset = radeon_get_ib_value(p, idx+1); in r600_packet3_check()
2221 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; in r600_packet3_check()
2228 ib[idx+1] = offset; in r600_packet3_check()
2229 ib[idx+2] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
2232 reg = radeon_get_ib_value(p, idx+1) << 2; in r600_packet3_check()
2233 if (!r600_is_safe_reg(p, reg, idx+1)) in r600_packet3_check()
2244 offset = radeon_get_ib_value(p, idx+3); in r600_packet3_check()
2245 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; in r600_packet3_check()
2252 ib[idx+3] = offset; in r600_packet3_check()
2253 ib[idx+4] = upper_32_bits(offset) & 0xff; in r600_packet3_check()
2256 reg = radeon_get_ib_value(p, idx+3) << 2; in r600_packet3_check()
2257 if (!r600_is_safe_reg(p, reg, idx+3)) in r600_packet3_check()
2294 r = radeon_cs_packet_parse(p, &pkt, p->idx); in r600_cs_parse()
2300 p->idx += pkt.count + 2; in r600_cs_parse()
2321 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse()
2347 unsigned idx; in r600_dma_cs_next_reloc() local
2354 idx = p->dma_reloc_idx; in r600_dma_cs_next_reloc()
2355 if (idx >= p->nrelocs) { in r600_dma_cs_next_reloc()
2357 idx, p->nrelocs); in r600_dma_cs_next_reloc()
2360 *cs_reloc = &p->relocs[idx]; in r600_dma_cs_next_reloc()
2384 u32 idx, idx_value; in r600_dma_cs_parse() local
2389 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse()
2391 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse()
2394 idx = p->idx; in r600_dma_cs_parse()
2395 header = radeon_get_ib_value(p, idx); in r600_dma_cs_parse()
2408 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2411 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); in r600_dma_cs_parse()
2412 p->idx += count + 5; in r600_dma_cs_parse()
2414 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2415 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; in r600_dma_cs_parse()
2417 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2418 ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2419 p->idx += count + 3; in r600_dma_cs_parse()
2439 idx_value = radeon_get_ib_value(p, idx + 2); in r600_dma_cs_parse()
2443 src_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2445 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); in r600_dma_cs_parse()
2447 dst_offset = radeon_get_ib_value(p, idx+5); in r600_dma_cs_parse()
2448 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32; in r600_dma_cs_parse()
2449 ib[idx+5] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2450 ib[idx+6] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2453 src_offset = radeon_get_ib_value(p, idx+5); in r600_dma_cs_parse()
2454 src_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32; in r600_dma_cs_parse()
2455 ib[idx+5] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2456 ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2458 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2460 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); in r600_dma_cs_parse()
2462 p->idx += 7; in r600_dma_cs_parse()
2465 src_offset = radeon_get_ib_value(p, idx+2); in r600_dma_cs_parse()
2466 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; in r600_dma_cs_parse()
2467 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2468 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; in r600_dma_cs_parse()
2470 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2471 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2472 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2473 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2474 p->idx += 5; in r600_dma_cs_parse()
2476 src_offset = radeon_get_ib_value(p, idx+2); in r600_dma_cs_parse()
2477 src_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; in r600_dma_cs_parse()
2478 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2479 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff0000)) << 16; in r600_dma_cs_parse()
2481 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2482 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2483 ib[idx+3] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in r600_dma_cs_parse()
2484 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) & 0xff) << 16; in r600_dma_cs_parse()
2485 p->idx += 4; in r600_dma_cs_parse()
2509 dst_offset = radeon_get_ib_value(p, idx+1); in r600_dma_cs_parse()
2510 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0x00ff0000)) << 16; in r600_dma_cs_parse()
2516 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); in r600_dma_cs_parse()
2517 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000; in r600_dma_cs_parse()
2518 p->idx += 4; in r600_dma_cs_parse()
2521 p->idx += 1; in r600_dma_cs_parse()
2524 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx); in r600_dma_cs_parse()
2527 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse()