Lines Matching refs:engine

325 	struct intel_engine_cs *engine;  in __gen6_reset_engines()  local
334 for_each_engine_masked(engine, gt, engine_mask, tmp) { in __gen6_reset_engines()
335 hw_mask |= engine->reset_domain; in __gen6_reset_engines()
356 static struct intel_engine_cs *find_sfc_paired_vecs_engine(struct intel_engine_cs *engine) in find_sfc_paired_vecs_engine() argument
360 GEM_BUG_ON(engine->class != VIDEO_DECODE_CLASS); in find_sfc_paired_vecs_engine()
362 vecs_id = _VECS((engine->instance) / 2); in find_sfc_paired_vecs_engine()
364 return engine->gt->engine[vecs_id]; in find_sfc_paired_vecs_engine()
377 static void get_sfc_forced_lock_data(struct intel_engine_cs *engine, in get_sfc_forced_lock_data() argument
380 switch (engine->class) { in get_sfc_forced_lock_data()
382 MISSING_CASE(engine->class); in get_sfc_forced_lock_data()
385 sfc_lock->lock_reg = GEN11_VCS_SFC_FORCED_LOCK(engine->mmio_base); in get_sfc_forced_lock_data()
388 sfc_lock->ack_reg = GEN11_VCS_SFC_LOCK_STATUS(engine->mmio_base); in get_sfc_forced_lock_data()
391 sfc_lock->usage_reg = GEN11_VCS_SFC_LOCK_STATUS(engine->mmio_base); in get_sfc_forced_lock_data()
393 sfc_lock->reset_bit = GEN11_VCS_SFC_RESET_BIT(engine->instance); in get_sfc_forced_lock_data()
397 sfc_lock->lock_reg = GEN11_VECS_SFC_FORCED_LOCK(engine->mmio_base); in get_sfc_forced_lock_data()
400 sfc_lock->ack_reg = GEN11_VECS_SFC_LOCK_ACK(engine->mmio_base); in get_sfc_forced_lock_data()
403 sfc_lock->usage_reg = GEN11_VECS_SFC_USAGE(engine->mmio_base); in get_sfc_forced_lock_data()
405 sfc_lock->reset_bit = GEN11_VECS_SFC_RESET_BIT(engine->instance); in get_sfc_forced_lock_data()
411 static int gen11_lock_sfc(struct intel_engine_cs *engine, in gen11_lock_sfc() argument
415 struct intel_uncore *uncore = engine->uncore; in gen11_lock_sfc()
416 u8 vdbox_sfc_access = engine->gt->info.vdbox_sfc_access; in gen11_lock_sfc()
421 switch (engine->class) { in gen11_lock_sfc()
423 if ((BIT(engine->instance) & vdbox_sfc_access) == 0) in gen11_lock_sfc()
428 get_sfc_forced_lock_data(engine, &sfc_lock); in gen11_lock_sfc()
438 if (engine->class != VIDEO_DECODE_CLASS || in gen11_lock_sfc()
439 GRAPHICS_VER(engine->i915) != 12) in gen11_lock_sfc()
450 GEN12_HCP_SFC_LOCK_STATUS(engine->mmio_base)) & in gen11_lock_sfc()
454 paired_vecs = find_sfc_paired_vecs_engine(engine); in gen11_lock_sfc()
459 *unlock_mask |= engine->mask; in gen11_lock_sfc()
495 ENGINE_TRACE(engine, "Wait for SFC forced lock ack failed\n"); in gen11_lock_sfc()
503 static void gen11_unlock_sfc(struct intel_engine_cs *engine) in gen11_unlock_sfc() argument
505 struct intel_uncore *uncore = engine->uncore; in gen11_unlock_sfc()
506 u8 vdbox_sfc_access = engine->gt->info.vdbox_sfc_access; in gen11_unlock_sfc()
509 if (engine->class != VIDEO_DECODE_CLASS && in gen11_unlock_sfc()
510 engine->class != VIDEO_ENHANCEMENT_CLASS) in gen11_unlock_sfc()
513 if (engine->class == VIDEO_DECODE_CLASS && in gen11_unlock_sfc()
514 (BIT(engine->instance) & vdbox_sfc_access) == 0) in gen11_unlock_sfc()
517 get_sfc_forced_lock_data(engine, &sfc_lock); in gen11_unlock_sfc()
526 struct intel_engine_cs *engine; in __gen11_reset_engines() local
535 for_each_engine_masked(engine, gt, engine_mask, tmp) { in __gen11_reset_engines()
536 reset_mask |= engine->reset_domain; in __gen11_reset_engines()
537 ret = gen11_lock_sfc(engine, &reset_mask, &unlock_mask); in __gen11_reset_engines()
557 for_each_engine_masked(engine, gt, unlock_mask, tmp) in __gen11_reset_engines()
558 gen11_unlock_sfc(engine); in __gen11_reset_engines()
563 static int gen8_engine_reset_prepare(struct intel_engine_cs *engine) in gen8_engine_reset_prepare() argument
565 struct intel_uncore *uncore = engine->uncore; in gen8_engine_reset_prepare()
566 const i915_reg_t reg = RING_RESET_CTL(engine->mmio_base); in gen8_engine_reset_prepare()
570 if (I915_SELFTEST_ONLY(should_fail(&engine->reset_timeout, 1))) in gen8_engine_reset_prepare()
596 gt_err(engine->gt, in gen8_engine_reset_prepare()
598 engine->name, request, in gen8_engine_reset_prepare()
604 static void gen8_engine_reset_cancel(struct intel_engine_cs *engine) in gen8_engine_reset_cancel() argument
606 intel_uncore_write_fw(engine->uncore, in gen8_engine_reset_cancel()
607 RING_RESET_CTL(engine->mmio_base), in gen8_engine_reset_cancel()
615 struct intel_engine_cs *engine; in gen8_reset_engines() local
623 for_each_engine_masked(engine, gt, engine_mask, tmp) { in gen8_reset_engines()
624 ret = gen8_engine_reset_prepare(engine); in gen8_reset_engines()
658 for_each_engine_masked(engine, gt, engine_mask, tmp) in gen8_reset_engines()
659 gen8_engine_reset_cancel(engine); in gen8_reset_engines()
738 if (engine_mask == ALL_ENGINES && first && intel_engine_is_idle(gt->engine[GSC0])) { in wa_14015076503_start()
831 static void reset_prepare_engine(struct intel_engine_cs *engine) in reset_prepare_engine() argument
840 intel_uncore_forcewake_get(engine->uncore, FORCEWAKE_ALL); in reset_prepare_engine()
841 if (engine->reset.prepare) in reset_prepare_engine()
842 engine->reset.prepare(engine); in reset_prepare_engine()
878 struct intel_engine_cs *engine; in reset_prepare() local
894 for_each_engine(engine, gt, id) { in reset_prepare()
895 if (intel_engine_pm_get_if_awake(engine)) in reset_prepare()
896 awake |= engine->mask; in reset_prepare()
897 reset_prepare_engine(engine); in reset_prepare()
910 struct intel_engine_cs *engine; in gt_reset() local
923 for_each_engine(engine, gt, id) in gt_reset()
924 __intel_engine_reset(engine, stalled_mask & engine->mask); in gt_reset()
934 static void reset_finish_engine(struct intel_engine_cs *engine) in reset_finish_engine() argument
936 if (engine->reset.finish) in reset_finish_engine()
937 engine->reset.finish(engine); in reset_finish_engine()
938 intel_uncore_forcewake_put(engine->uncore, FORCEWAKE_ALL); in reset_finish_engine()
940 intel_engine_signal_breadcrumbs(engine); in reset_finish_engine()
945 struct intel_engine_cs *engine; in reset_finish() local
948 for_each_engine(engine, gt, id) { in reset_finish()
949 reset_finish_engine(engine); in reset_finish()
950 if (awake & engine->mask) in reset_finish()
951 intel_engine_pm_put(engine); in reset_finish()
964 intel_engine_signal_breadcrumbs(request->engine); in nop_submit_request()
972 struct intel_engine_cs *engine; in __intel_gt_set_wedged() local
992 for_each_engine(engine, gt, id) in __intel_gt_set_wedged()
993 engine->submit_request = nop_submit_request; in __intel_gt_set_wedged()
1005 for_each_engine(engine, gt, id) in __intel_gt_set_wedged()
1006 if (engine->reset.cancel) in __intel_gt_set_wedged()
1007 engine->reset.cancel(engine); in __intel_gt_set_wedged()
1038 struct intel_engine_cs *engine; in intel_gt_set_wedged() local
1042 for_each_engine(engine, gt, id) { in intel_gt_set_wedged()
1043 if (intel_engine_is_idle(engine)) in intel_gt_set_wedged()
1046 intel_engine_dump(engine, &p, "%s\n", engine->name); in intel_gt_set_wedged()
1167 struct intel_engine_cs *engine; in resume() local
1171 for_each_engine(engine, gt, id) { in resume()
1172 ret = intel_engine_resume(engine); in resume()
1325 int intel_gt_reset_engine(struct intel_engine_cs *engine) in intel_gt_reset_engine() argument
1327 return __intel_gt_reset(engine->gt, engine->mask); in intel_gt_reset_engine()
1330 int __intel_engine_reset_bh(struct intel_engine_cs *engine, const char *msg) in __intel_engine_reset_bh() argument
1332 struct intel_gt *gt = engine->gt; in __intel_engine_reset_bh()
1335 ENGINE_TRACE(engine, "flags=%lx\n", gt->reset.flags); in __intel_engine_reset_bh()
1336 GEM_BUG_ON(!test_bit(I915_RESET_ENGINE + engine->id, &gt->reset.flags)); in __intel_engine_reset_bh()
1338 if (intel_engine_uses_guc(engine)) in __intel_engine_reset_bh()
1341 if (!intel_engine_pm_get_if_awake(engine)) in __intel_engine_reset_bh()
1344 reset_prepare_engine(engine); in __intel_engine_reset_bh()
1347 drm_notice(&engine->i915->drm, in __intel_engine_reset_bh()
1348 "Resetting %s for %s\n", engine->name, msg); in __intel_engine_reset_bh()
1349 i915_increase_reset_engine_count(&engine->i915->gpu_error, engine); in __intel_engine_reset_bh()
1351 ret = intel_gt_reset_engine(engine); in __intel_engine_reset_bh()
1354 ENGINE_TRACE(engine, "Failed to reset %s, err: %d\n", engine->name, ret); in __intel_engine_reset_bh()
1363 __intel_engine_reset(engine, true); in __intel_engine_reset_bh()
1370 ret = intel_engine_resume(engine); in __intel_engine_reset_bh()
1373 intel_engine_cancel_stop_cs(engine); in __intel_engine_reset_bh()
1374 reset_finish_engine(engine); in __intel_engine_reset_bh()
1375 intel_engine_pm_put_async(engine); in __intel_engine_reset_bh()
1392 int intel_engine_reset(struct intel_engine_cs *engine, const char *msg) in intel_engine_reset() argument
1397 err = __intel_engine_reset_bh(engine, msg); in intel_engine_reset()
1473 struct intel_engine_cs *engine; in intel_gt_handle_error() local
1512 for_each_engine_masked(engine, gt, engine_mask, tmp) { in intel_gt_handle_error()
1514 if (test_and_set_bit(I915_RESET_ENGINE + engine->id, in intel_gt_handle_error()
1518 if (__intel_engine_reset_bh(engine, msg) == 0) in intel_gt_handle_error()
1519 engine_mask &= ~engine->mask; in intel_gt_handle_error()
1521 clear_and_wake_up_bit(I915_RESET_ENGINE + engine->id, in intel_gt_handle_error()
1545 for_each_engine(engine, gt, tmp) { in intel_gt_handle_error()
1546 while (test_and_set_bit(I915_RESET_ENGINE + engine->id, in intel_gt_handle_error()
1549 I915_RESET_ENGINE + engine->id, in intel_gt_handle_error()
1560 for_each_engine(engine, gt, tmp) in intel_gt_handle_error()
1561 clear_bit_unlock(I915_RESET_ENGINE + engine->id, in intel_gt_handle_error()