| /drivers/gpu/drm/i915/ |
| A D | i915_scheduler.c | 67 assert_priolists(sched_engine); in i915_sched_lookup_priolist() 144 while (locked != (sched_engine = READ_ONCE(rq->engine)->sched_engine)) { in lock_sched_engine() 148 locked = sched_engine; in lock_sched_engine() 234 sched_engine = node_to_request(node)->engine->sched_engine; in __i915_schedule() 238 sched_engine = lock_sched_engine(node, sched_engine, &cache); in __i915_schedule() 246 sched_engine = lock_sched_engine(node, sched_engine, &cache); in __i915_schedule() 254 sched_engine); in __i915_schedule() 443 kfree(sched_engine); in default_destroy() 456 sched_engine = kzalloc(sizeof(*sched_engine), GFP_KERNEL); in i915_sched_engine_create() 457 if (!sched_engine) in i915_sched_engine_create() [all …]
|
| A D | i915_scheduler.h | 56 i915_sched_engine_get(struct i915_sched_engine *sched_engine) in i915_sched_engine_get() argument 58 kref_get(&sched_engine->ref); in i915_sched_engine_get() 59 return sched_engine; in i915_sched_engine_get() 63 i915_sched_engine_put(struct i915_sched_engine *sched_engine) in i915_sched_engine_put() argument 65 kref_put(&sched_engine->ref, sched_engine->destroy); in i915_sched_engine_put() 71 return RB_EMPTY_ROOT(&sched_engine->queue.rb_root); in i915_sched_engine_is_empty() 77 if (i915_sched_engine_is_empty(sched_engine)) in i915_sched_engine_reset_on_empty() 78 sched_engine->no_priolist = false; in i915_sched_engine_reset_on_empty() 85 tasklet_lock(&sched_engine->tasklet); in i915_sched_engine_active_lock_bh() 91 tasklet_unlock(&sched_engine->tasklet); in i915_sched_engine_active_unlock_bh() [all …]
|
| A D | i915_request.c | 258 spin_lock_irq(&locked->sched_engine->lock); in i915_request_active_engine() 260 spin_unlock(&locked->sched_engine->lock); in i915_request_active_engine() 262 spin_lock(&locked->sched_engine->lock); in i915_request_active_engine() 271 spin_unlock_irq(&locked->sched_engine->lock); in i915_request_active_engine() 612 lockdep_assert_held(&engine->sched_engine->lock); in __i915_request_submit() 1322 if (to->engine->sched_engine->schedule) { in __i915_request_await_execution() 1480 if (to->engine->sched_engine->schedule) { in i915_request_await_request() 1649 if (rq->engine->sched_engine->schedule) in __i915_request_ensure_parallel_ordering() 1704 if (rq->engine->sched_engine->schedule) in __i915_request_ensure_ordering() 1836 if (attr && rq->engine->sched_engine->schedule) in __i915_request_queue() [all …]
|
| A D | i915_scheduler_types.h | 174 bool (*disabled)(struct i915_sched_engine *sched_engine);
|
| A D | i915_request.h | 689 lockdep_is_held(&rq->engine->sched_engine->lock)); in i915_request_active_timeline()
|
| /drivers/gpu/drm/i915/gt/ |
| A D | intel_execlists_submission.c | 376 &engine->sched_engine->requests, in __unwind_incomplete_requests() 1267 struct i915_sched_engine * const sched_engine = engine->sched_engine; in execlists_dequeue() local 1297 spin_lock(&sched_engine->lock); in execlists_dequeue() 1583 sched_engine->queue_priority_hint = queue_prio(sched_engine); in execlists_dequeue() 1585 spin_unlock(&sched_engine->lock); in execlists_dequeue() 2554 struct i915_sched_engine *sched_engine = engine->sched_engine; in submit_queue() local 3141 struct i915_sched_engine * const sched_engine = engine->sched_engine; in execlists_reset_cancel() local 3315 struct i915_sched_engine *sched_engine = engine->sched_engine; in kick_execlists() local 3649 if (ve->base.sched_engine) in rcu_virtual_context_destroy() 3965 if (!ve->base.sched_engine) { in execlists_create_virtual() [all …]
|
| A D | mock_engine.c | 258 lockdep_assert_held(&rq->engine->sched_engine->lock); in mock_add_to_engine() 274 spin_lock_irq(&locked->sched_engine->lock); in mock_remove_from_engine() 276 spin_unlock(&locked->sched_engine->lock); in mock_remove_from_engine() 277 spin_lock(&engine->sched_engine->lock); in mock_remove_from_engine() 281 spin_unlock_irq(&locked->sched_engine->lock); in mock_remove_from_engine() 302 spin_lock_irqsave(&engine->sched_engine->lock, flags); in mock_reset_cancel() 318 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in mock_reset_cancel() 332 i915_sched_engine_put(engine->sched_engine); in mock_engine_release() 400 if (!engine->sched_engine) in mock_engine_init() 402 engine->sched_engine->private_data = engine; in mock_engine_init() [all …]
|
| A D | intel_ring_submission.c | 394 spin_lock_irqsave(&engine->sched_engine->lock, flags); in reset_rewind() 396 list_for_each_entry(pos, &engine->sched_engine->requests, sched.link) { in reset_rewind() 451 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in reset_rewind() 463 spin_lock_irqsave(&engine->sched_engine->lock, flags); in reset_cancel() 472 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in reset_cancel() 651 lockdep_assert_held(&engine->sched_engine->lock); in ring_context_revoke() 652 list_for_each_entry_continue(rq, &engine->sched_engine->requests, in ring_context_revoke() 1137 lockdep_assert_held(&rq->engine->sched_engine->lock); in add_to_engine() 1138 list_move_tail(&rq->sched.link, &rq->engine->sched_engine->requests); in add_to_engine() 1143 spin_lock_irq(&rq->engine->sched_engine->lock); in remove_from_engine() [all …]
|
| A D | intel_engine_cs.c | 1266 if (!engine->sched_engine) { in engine_setup_common() 1270 engine->sched_engine->private_data = engine; in engine_setup_common() 1294 i915_sched_engine_put(engine->sched_engine); in engine_setup_common() 1335 spin_lock_irq(&engine->sched_engine->lock); in measure_breadcrumb_dw() 1339 spin_unlock_irq(&engine->sched_engine->lock); in measure_breadcrumb_dw() 1542 GEM_BUG_ON(!list_empty(&engine->sched_engine->requests)); in intel_engine_cleanup_common() 1544 i915_sched_engine_put(engine->sched_engine); in intel_engine_cleanup_common() 1908 if (!i915_sched_engine_is_empty(engine->sched_engine)) in intel_engine_is_idle() 2175 i915_sched_engine_active_lock_bh(engine->sched_engine); in intel_engine_print_registers() 2485 lockdep_assert_held(&engine->sched_engine->lock); in engine_execlist_find_hung_request() [all …]
|
| A D | intel_engine_heartbeat.c | 160 if (i915_sched_engine_disabled(engine->sched_engine)) { in heartbeat() 184 } else if (engine->sched_engine->schedule && in heartbeat() 199 engine->sched_engine->schedule(rq, &attr); in heartbeat()
|
| A D | selftest_execlists.c | 47 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit() 275 engine->sched_engine->schedule(rq[1], &attr); in live_unlite_restore() 630 engine->sched_engine->tasklet.callback(&engine->sched_engine->tasklet); in live_hold_reset() 919 engine->sched_engine->schedule(rq, &attr); in release_queue() 1344 engine->sched_engine->schedule(rq, &attr); in live_timeslice_queue() 1890 engine->sched_engine->schedule(rq, &attr); in live_late_preempt() 2641 engine->sched_engine->schedule(rq, &attr); in live_chain_preempt() 2996 engine->sched_engine->schedule(rq, &attr); in live_preempt_gang() 3238 engine->sched_engine->schedule(rq, &attr); in preempt_user() 4380 engine->sched_engine->tasklet.callback(&engine->sched_engine->tasklet); in reset_virtual_engine() [all …]
|
| A D | intel_breadcrumbs.c | 254 if (rq->engine->sched_engine->retire_inflight_request_prio) in signal_irq_work() 255 rq->engine->sched_engine->retire_inflight_request_prio(rq); in signal_irq_work()
|
| A D | intel_engine_types.h | 422 struct i915_sched_engine *sched_engine; member
|
| A D | intel_engine_user.c | 114 if (engine->sched_engine->schedule) in set_scheduler_caps()
|
| A D | selftest_reset.c | 330 struct tasklet_struct *t = &engine->sched_engine->tasklet; in igt_atomic_engine_reset()
|
| A D | selftest_lrc.c | 58 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit() 1758 tasklet_disable(&engine->sched_engine->tasklet); in garbage_reset() 1763 tasklet_enable(&engine->sched_engine->tasklet); in garbage_reset()
|
| A D | selftest_hangcheck.c | 929 if (engine->sched_engine->schedule && arg->flags & TEST_PRIORITY) { in active_engine() 934 engine->sched_engine->schedule(rq[idx], &attr); in active_engine() 1880 struct tasklet_struct * const t = &engine->sched_engine->tasklet; in __igt_atomic_reset_engine()
|
| /drivers/gpu/drm/i915/gt/uc/ |
| A D | intel_guc_submission.c | 929 struct i915_sched_engine * const sched_engine = guc->sched_engine; in guc_dequeue_one_context() local 1673 struct i915_sched_engine * const sched_engine = guc->sched_engine; in submission_disabled() local 1682 struct i915_sched_engine * const sched_engine = guc->sched_engine; in disable_submission() local 1693 struct i915_sched_engine * const sched_engine = guc->sched_engine; in enable_submission() local 1711 struct i915_sched_engine * const sched_engine = guc->sched_engine; in guc_flush_submissions() local 2240 struct i915_sched_engine *sched_engine = rq->engine->sched_engine; in need_tasklet() local 2250 struct i915_sched_engine *sched_engine = rq->engine->sched_engine; in guc_submit_request() local 4658 engine->sched_engine = i915_sched_engine_get(guc->sched_engine); in intel_guc_submission_setup() 5223 struct i915_sched_engine *sched_engine = ce->engine->sched_engine; in guc_context_replay() local 5496 struct i915_sched_engine *sched_engine = guc->sched_engine; in intel_guc_submission_print_info() local [all …]
|
| A D | intel_guc.h | 49 struct i915_sched_engine *sched_engine; member
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_wait.c | 104 if (engine->sched_engine->schedule) in fence_set_priority() 105 engine->sched_engine->schedule(rq, attr); in fence_set_priority()
|