Lines Matching refs:reloc
969 struct radeon_bo_list *reloc; in r600_cs_check_reg() local
1014 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r600_cs_check_reg()
1020 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1032 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1041 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in r600_cs_check_reg()
1074 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1082 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1083 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1084 track->vgt_strmout_bo_mc[tmp] = reloc->gpu_offset; in r600_cs_check_reg()
1097 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1103 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1133 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1140 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in r600_cs_check_reg()
1143 } else if (reloc->tiling_flags & RADEON_TILING_MICRO) { in r600_cs_check_reg()
1205 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1210 track->cb_color_frag_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1212 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1236 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1241 track->cb_color_tile_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1243 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1271 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1279 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1281 track->cb_color_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1282 track->cb_color_bo_mc[tmp] = reloc->gpu_offset; in r600_cs_check_reg()
1286 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1293 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1294 track->db_bo = reloc->robj; in r600_cs_check_reg()
1295 track->db_bo_mc = reloc->gpu_offset; in r600_cs_check_reg()
1299 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1306 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1307 track->htile_bo = reloc->robj; in r600_cs_check_reg()
1369 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1375 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1378 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1384 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1629 struct radeon_bo_list *reloc; in r600_packet3_check() local
1667 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1673 offset = reloc->gpu_offset + in r600_packet3_check()
1708 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1714 offset = reloc->gpu_offset + in r600_packet3_check()
1760 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1766 offset = reloc->gpu_offset + in r600_packet3_check()
1797 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1806 offset = reloc->gpu_offset + tmp; in r600_packet3_check()
1808 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1810 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
1827 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1836 offset = reloc->gpu_offset + tmp; in r600_packet3_check()
1838 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1840 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
1857 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1862 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1873 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1878 offset = reloc->gpu_offset + in r600_packet3_check()
1894 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1900 offset = reloc->gpu_offset + in r600_packet3_check()
1960 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1965 base_offset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1967 if (reloc->tiling_flags & RADEON_TILING_MACRO) in r600_packet3_check()
1969 else if (reloc->tiling_flags & RADEON_TILING_MICRO) in r600_packet3_check()
1972 texture = reloc->robj; in r600_packet3_check()
1974 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1979 mip_offset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1980 mipmap = reloc->robj; in r600_packet3_check()
1985 reloc->tiling_flags); in r600_packet3_check()
1995 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2002 if (p->rdev && (size + offset) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2005 size + offset, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2006 ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj) - offset; in r600_packet3_check()
2009 offset64 = reloc->gpu_offset + offset; in r600_packet3_check()
2096 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2102 if (reloc->robj != track->vgt_strmout_bo[idx_value]) { in r600_packet3_check()
2114 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2116 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2119 ib[idx+1] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
2140 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2147 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2149 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2152 offset += reloc->gpu_offset; in r600_packet3_check()
2159 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2166 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2168 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2171 offset += reloc->gpu_offset; in r600_packet3_check()
2184 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2195 if ((offset + 8) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2197 offset + 8, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2200 offset += reloc->gpu_offset; in r600_packet3_check()
2213 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2220 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2222 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2225 offset += reloc->gpu_offset; in r600_packet3_check()
2237 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2244 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2246 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2249 offset += reloc->gpu_offset; in r600_packet3_check()