Lines Matching refs:batch

1008 static int emit_bb_start(struct i915_request *rq, struct i915_vma *batch)  in emit_bb_start()  argument
1011 i915_vma_offset(batch), in emit_bb_start()
1012 i915_vma_size(batch), in emit_bb_start()
1018 struct i915_vma *batch) in empty_request() argument
1027 err = emit_bb_start(request, batch); in empty_request()
1053 struct i915_vma *batch; in live_empty_request() local
1057 batch = empty_batch(engine->gt); in live_empty_request()
1058 if (IS_ERR(batch)) in live_empty_request()
1059 return PTR_ERR(batch); in live_empty_request()
1068 request = empty_request(engine, batch); in live_empty_request()
1081 request = empty_request(engine, batch); in live_empty_request()
1109 i915_vma_unpin(batch); in live_empty_request()
1110 i915_vma_put(batch); in live_empty_request()
1171 static int recursive_batch_resolve(struct i915_vma *batch) in recursive_batch_resolve() argument
1175 cmd = i915_gem_object_pin_map_unlocked(batch->obj, I915_MAP_WC); in recursive_batch_resolve()
1181 __i915_gem_object_flush_map(batch->obj, 0, sizeof(*cmd)); in recursive_batch_resolve()
1182 i915_gem_object_unpin_map(batch->obj); in recursive_batch_resolve()
1184 intel_gt_chipset_flush(batch->vm->gt); in recursive_batch_resolve()
1215 struct i915_vma *batch; in live_all_engines() local
1217 batch = recursive_batch(engine->gt); in live_all_engines()
1218 if (IS_ERR(batch)) { in live_all_engines()
1219 err = PTR_ERR(batch); in live_all_engines()
1225 i915_vma_lock(batch); in live_all_engines()
1233 GEM_BUG_ON(request[idx]->context->vm != batch->vm); in live_all_engines()
1235 err = i915_vma_move_to_active(batch, request[idx], 0); in live_all_engines()
1238 err = emit_bb_start(request[idx], batch); in live_all_engines()
1240 request[idx]->batch = batch; in live_all_engines()
1246 i915_vma_unlock(batch); in live_all_engines()
1264 err = recursive_batch_resolve(request[idx]->batch); in live_all_engines()
1288 i915_vma_unpin(rq->batch); in live_all_engines()
1289 i915_vma_put(rq->batch); in live_all_engines()
1305 if (rq->batch) { in live_all_engines()
1306 i915_vma_unpin(rq->batch); in live_all_engines()
1307 i915_vma_put(rq->batch); in live_all_engines()
1345 struct i915_vma *batch; in live_sequential_engines() local
1347 batch = recursive_batch(engine->gt); in live_sequential_engines()
1348 if (IS_ERR(batch)) { in live_sequential_engines()
1349 err = PTR_ERR(batch); in live_sequential_engines()
1355 i915_vma_lock(batch); in live_sequential_engines()
1363 GEM_BUG_ON(request[idx]->context->vm != batch->vm); in live_sequential_engines()
1376 err = i915_vma_move_to_active(batch, request[idx], 0); in live_sequential_engines()
1379 err = emit_bb_start(request[idx], batch); in live_sequential_engines()
1381 request[idx]->batch = batch; in live_sequential_engines()
1390 i915_vma_unlock(batch); in live_sequential_engines()
1406 err = recursive_batch_resolve(request[idx]->batch); in live_sequential_engines()
1436 cmd = i915_gem_object_pin_map_unlocked(request[idx]->batch->obj, in live_sequential_engines()
1441 __i915_gem_object_flush_map(request[idx]->batch->obj, in live_sequential_engines()
1443 i915_gem_object_unpin_map(request[idx]->batch->obj); in live_sequential_engines()
1448 i915_vma_put(request[idx]->batch); in live_sequential_engines()