Home
last modified time | relevance | path

Searched refs:gt (Results 1 – 25 of 389) sorted by relevance

12345678910>>...16

/drivers/gpu/drm/xe/
A Dxe_gt_sriov_pf_control.c216 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_control()
217 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_pick_vf_control()
343 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_queue_vf()
536 pf_queue_vf(gt, vfid); in pf_enter_vf_pause_send_pause()
696 pf_queue_vf(gt, vfid); in pf_enter_vf_resume_send_resume()
1068 if (xe_tile_is_root(gt->tile) && xe_gt_is_main_type(gt)) in pf_exit_vf_flr_reset_data()
1416 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_worker_find_work()
1429 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in control_worker_func()
1435 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_stop_worker()
1443 pf_stop_worker(gt); in control_fini_action()
[all …]
A Dxe_gt.c80 if (!gt) in xe_gt_alloc()
91 return gt; in xe_gt_alloc()
400 xe_reg_sr_init(&gt->reg_sr, "GT", gt_to_xe(gt)); in xe_gt_init_early()
412 xe_force_wake_init_gt(gt, gt_to_fw(gt)); in xe_gt_init_early()
529 xe_reg_sr_apply_mmio(&gt->reg_sr, gt); in gt_init_with_all_forcewake()
621 gt->ring_ops[i] = xe_ring_ops_get(gt, i); in xe_gt_init()
649 xe_force_wake_init_engines(gt, gt_to_fw(gt)); in xe_gt_init()
687 gt->mmio.sriov_vf_gt = gt; in xe_gt_mmio_init()
707 xe_gt_assert(gt, (gt->user_engines.mask | gt->info.engine_mask) in xe_gt_record_user_engines()
764 xe_reg_sr_apply_mmio(&gt->reg_sr, gt); in do_gt_restart()
[all …]
A Dxe_gt_sriov_pf_policy.c141 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_bulk_reset_sched_priority()
152 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sched_if_idle()
167 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_sched_if_idle()
175 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_sched_if_idle()
213 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_sched_if_idle()
224 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_reset_engine()
233 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_reset_engine()
241 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_reset_engine()
279 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_reset_engine()
290 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sample_period()
[all …]
A Dxe_gt_sriov_pf_config.c233 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_config()
399 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in pf_get_spare_ggtt()
400 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_get_spare_ggtt()
411 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in pf_set_spare_ggtt()
466 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in pf_provision_vf_ggtt()
515 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in pf_get_vf_config_ggtt()
583 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in xe_gt_sriov_pf_config_set_ggtt()
645 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in xe_gt_sriov_pf_config_bulk_set_ggtt()
716 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in xe_gt_sriov_pf_config_set_fair_ggtt()
1438 xe_gt_assert(gt, IS_DGFX(gt_to_xe(gt))); in pf_release_vf_config_lmem()
[all …]
A Dxe_gt_mcr.c390 gt->steering_dss_per_grp = dss_per_group(gt); in init_steering_dss()
720 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read_any()
726 mcr_lock(gt); in xe_gt_mcr_unicast_read_any()
753 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read()
755 mcr_lock(gt); in xe_gt_mcr_unicast_read()
757 mcr_unlock(gt); in xe_gt_mcr_unicast_read()
776 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_write()
778 mcr_lock(gt); in xe_gt_mcr_unicast_write()
780 mcr_unlock(gt); in xe_gt_mcr_unicast_write()
796 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_multicast_write()
[all …]
A Dxe_gt_sriov_pf.c41 gt->sriov.pf.vfs = drmm_kcalloc(&gt_to_xe(gt)->drm, 1 + num_vfs, in pf_alloc_metadata()
43 if (!gt->sriov.pf.vfs) in pf_alloc_metadata()
51 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_init_workers()
57 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_fini_workers()
85 pf_init_workers(gt); in xe_gt_sriov_pf_init_early()
94 pf_fini_workers(gt); in pf_fini_action()
201 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_sanitize_hw()
208 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_cancel_restart()
239 struct xe_gt *gt = container_of(w, typeof(*gt), sriov.pf.workers.restart); in pf_worker_restart_func() local
241 pf_restart(gt); in pf_worker_restart_func()
[all …]
A Dxe_gt_debugfs.c86 return print(gt, &p); in xe_gt_debugfs_simple_show()
104 for_each_hw_engine(hwe, gt, id) in hw_engines()
140 xe_gt_topology_dump(gt, p); in topology()
149 xe_gt_mcr_steering_dump(gt, p); in steering()
198 xe_wa_dump(gt, p); in workarounds()
207 xe_tuning_dump(gt, p); in tunings()
216 xe_pat_dump(gt, p); in pat()
225 xe_mocs_dump(gt, p); in mocs()
327 call(gt); in write_to_gt_call()
336 xe_gt_reset_async(gt); in force_reset()
[all …]
A Dxe_gt_sriov_pf_migration.c118 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_migration_supported()
124 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_migration_mutex()
131 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_snapshot()
132 xe_gt_assert(gt, vfid <= xe_sriov_pf_get_totalvfs(gt_to_xe(gt))); in pf_pick_vf_snapshot()
237 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_save_guc_state()
239 xe_gt_assert(gt, vfid <= xe_sriov_pf_get_totalvfs(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_save_guc_state()
286 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_restore_guc_state()
288 xe_gt_assert(gt, vfid <= xe_sriov_pf_get_totalvfs(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_restore_guc_state()
322 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_read_guc_state()
324 xe_gt_assert(gt, vfid <= xe_sriov_pf_get_totalvfs(gt_to_xe(gt))); in xe_gt_sriov_pf_migration_read_guc_state()
[all …]
A Dxe_gt_sriov_vf.c167 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_handshake_with_guc()
284 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_guc_versions()
320 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_notify_resfix_done()
418 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_gmdid()
419 xe_gt_assert(gt, !GRAPHICS_VERx100(gt_to_xe(gt)) || has_gmdid(gt_to_xe(gt))); in xe_gt_sriov_vf_gmdid()
440 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_get_ggtt_info()
474 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_get_lmem_info()
501 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_get_submission_cfg()
532 xe_gt_assert(gt, has_gmdid(gt_to_xe(gt))); in vf_cache_gmdid()
533 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_cache_gmdid()
[all …]
A Dxe_gt_tlb_invalidation.c42 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_fini()
70 if (WARN_ON_ONCE(!fence->gt)) in xe_gt_tlb_invalidation_fence_signal()
104 tlb_timeout_jiffies(gt)); in xe_gt_tlb_fence_timeout()
119 gt->tlb_invalidation.seqno = 1; in xe_gt_tlb_invalidation_init_early()
153 mutex_lock(&gt->uc.guc.ct.lock); in xe_gt_tlb_invalidation_reset()
198 xe_gt_assert(gt, fence); in send_tlb_invalidation()
238 gt->tlb_invalidation.seqno = (gt->tlb_invalidation.seqno + 1) % in send_tlb_invalidation()
358 xe_gt_assert(gt, gt == fence->gt); in xe_gt_tlb_invalidation_all()
401 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range()
543 tlb_timeout_jiffies(gt)); in xe_guc_tlb_invalidation_done_handler()
[all …]
A Dxe_hw_engine.c372 struct xe_gt *gt = hwe->gt; in xe_hw_engine_setup_default_lrc_state() local
409 struct xe_gt *gt = hwe->gt; in hw_engine_setup_default_state() local
424 gt->mocs.wb_index : gt->mocs.uc_index; in hw_engine_setup_default_state()
507 xe_gt_assert(gt, !hwe->gt); in hw_engine_init_early()
509 hwe->gt = gt; in hw_engine_init_early()
573 struct xe_gt *gt = hwe->gt; in adjust_idledly() local
606 xe_gt_assert(gt, gt->info.engine_mask & BIT(id)); in hw_engine_init()
841 hw_engine_init_early(gt, &gt->hw_engines[i], i); in xe_hw_engines_init_early()
948 gt = snapshot->hwe->gt; in xe_hw_engine_snapshot_free()
992 struct xe_gt *gt = hwe->gt; in xe_hw_engine_is_reserved() local
[all …]
A Dxe_gt_sriov_pf_config.h15 u64 xe_gt_sriov_pf_config_get_ggtt(struct xe_gt *gt, unsigned int vfid);
16 int xe_gt_sriov_pf_config_set_ggtt(struct xe_gt *gt, unsigned int vfid, u64 size);
17 int xe_gt_sriov_pf_config_set_fair_ggtt(struct xe_gt *gt,
19 int xe_gt_sriov_pf_config_bulk_set_ggtt(struct xe_gt *gt,
22 u32 xe_gt_sriov_pf_config_get_ctxs(struct xe_gt *gt, unsigned int vfid);
28 u32 xe_gt_sriov_pf_config_get_dbs(struct xe_gt *gt, unsigned int vfid);
34 u64 xe_gt_sriov_pf_config_get_lmem(struct xe_gt *gt, unsigned int vfid);
61 int xe_gt_sriov_pf_config_restore(struct xe_gt *gt, unsigned int vfid,
64 bool xe_gt_sriov_pf_config_is_empty(struct xe_gt *gt, unsigned int vfid);
66 int xe_gt_sriov_pf_config_init(struct xe_gt *gt);
[all …]
A Dxe_gt_throttle.c43 xe_pm_runtime_get(gt_to_xe(gt)); in xe_gt_throttle_get_limit_reasons()
44 if (xe_gt_is_media_type(gt)) in xe_gt_throttle_get_limit_reasons()
48 xe_pm_runtime_put(gt_to_xe(gt)); in xe_gt_throttle_get_limit_reasons()
121 struct xe_gt *gt = dev_to_gt(dev); in status_show() local
122 bool status = !!read_status(gt); in status_show()
132 struct xe_gt *gt = dev_to_gt(dev); in reason_pl1_show() local
133 bool pl1 = !!read_reason_pl1(gt); in reason_pl1_show()
143 struct xe_gt *gt = dev_to_gt(dev); in reason_pl2_show() local
144 bool pl2 = !!read_reason_pl2(gt); in reason_pl2_show()
155 bool pl4 = !!read_reason_pl4(gt); in reason_pl4_show()
[all …]
A Dxe_gt.h22 #define CCS_MASK(gt) (((gt)->info.engine_mask & XE_HW_ENGINE_CCS_MASK) >> XE_HW_ENGINE_CCS0) argument
31 int xe_gt_init_early(struct xe_gt *gt);
32 int xe_gt_init(struct xe_gt *gt);
33 void xe_gt_mmio_init(struct xe_gt *gt);
50 int xe_gt_suspend(struct xe_gt *gt);
51 void xe_gt_shutdown(struct xe_gt *gt);
52 int xe_gt_resume(struct xe_gt *gt);
54 void xe_gt_sanitize(struct xe_gt *gt);
66 return flush_work(&gt->reset.worker); in xe_gt_wait_for_reset()
78 xe_gt_reset_async(gt); in xe_gt_reset()
[all …]
A Dxe_eu_stall.c267 props->gt = gt; in set_prop_eu_stall_gt_id()
383 struct xe_gt *gt = stream->gt; in eu_stall_data_buf_poll() local
515 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_read_locked() local
554 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_read() local
594 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_free() local
638 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_enable() local
695 struct xe_gt *gt = stream->gt; in eu_stall_data_buf_poll_work_fn() local
711 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_init() local
767 struct xe_gt *gt = stream->gt; in xe_eu_stall_stream_poll() local
875 struct xe_gt *gt = props->gt; in xe_eu_stall_stream_open_locked() local
[all …]
/drivers/gpu/drm/i915/gt/
A Dintel_gt_pm.c64 gt->stats.total = in runtime_end()
73 struct intel_gt *gt = container_of(wf, typeof(*gt), wakeref); in __gt_unpark() local
77 GT_TRACE(gt, "\n"); in __gt_unpark()
99 runtime_begin(gt); in __gt_unpark()
106 struct intel_gt *gt = container_of(wf, typeof(*gt), wakeref); in __gt_park() local
111 GT_TRACE(gt, "\n"); in __gt_park()
113 runtime_end(gt); in __gt_park()
117 i915_vma_parked(gt); in __gt_park()
146 intel_wakeref_init(&gt->wakeref, gt->i915, &wf_ops, "GT"); in intel_gt_pm_init_early()
147 seqcount_mutex_init(&gt->stats.lock, &gt->wakeref.mutex); in intel_gt_pm_init_early()
[all …]
A Dintel_gt.c68 if (!gt) in intel_root_gt_init_early()
71 i915->gt[0] = gt; in intel_root_gt_init_early()
119 gt->ggtt = to_gt(gt->i915)->ggtt; in intel_gt_assign_ggtt()
121 gt->ggtt = i915_ggtt_create(gt->i915); in intel_gt_assign_ggtt()
455 intel_gsc_init(&gt->gsc, gt->i915); in intel_gt_driver_register()
711 gt->vm = kernel_vm(gt); in intel_gt_init()
749 intel_migrate_init(&gt->migrate, gt); in intel_gt_init()
880 intel_uncore_init_early(gt->uncore, gt); in intel_gt_tile_setup()
913 gt_dbg(gt, "Setting up %s\n", gt->name); in intel_gt_probe_all()
936 gt_dbg(gt, "Setting up %s\n", gt->name); in intel_gt_probe_all()
[all …]
A Dintel_reset.c308 GT_TRACE(gt, in gen6_hw_domain_reset()
650 __gen11_reset_engines(gt, gt->info.engine_mask, 0); in gen8_reset_engines()
739 __reset_guc(gt); in wa_14015076503_start()
1212 GT_TRACE(gt, "flags=%lx\n", gt->reset.flags); in intel_gt_reset()
1221 gt_revoke(gt); in intel_gt_reset()
1332 struct intel_gt *gt = engine->gt; in __intel_engine_reset_bh() local
1439 gt); in intel_gt_reset_global()
1510 intel_has_reset_engine(gt) && !intel_gt_is_wedged(gt)) { in intel_gt_handle_error()
1669 i915_gem_shrinker_taints_mutex(gt->i915, &gt->reset.mutex); in intel_gt_init_reset()
1693 w->gt = gt; in __intel_init_wedge()
[all …]
A Dintel_gt_irq.c133 gt = pick_gt(gt, class, instance); in gen11_gt_identity_handler()
228 if (HAS_ENGINE(gt, BCS1) || HAS_ENGINE(gt, BCS2)) in gen11_gt_irq_reset()
230 if (HAS_ENGINE(gt, BCS3) || HAS_ENGINE(gt, BCS4)) in gen11_gt_irq_reset()
232 if (HAS_ENGINE(gt, BCS5) || HAS_ENGINE(gt, BCS6)) in gen11_gt_irq_reset()
234 if (HAS_ENGINE(gt, BCS7) || HAS_ENGINE(gt, BCS8)) in gen11_gt_irq_reset()
238 if (HAS_ENGINE(gt, VCS4) || HAS_ENGINE(gt, VCS5)) in gen11_gt_irq_reset()
240 if (HAS_ENGINE(gt, VCS6) || HAS_ENGINE(gt, VCS7)) in gen11_gt_irq_reset()
245 if (HAS_ENGINE(gt, CCS0) || HAS_ENGINE(gt, CCS1)) in gen11_gt_irq_reset()
348 gt->pm_imr = ~gt->pm_ier; in gen11_gt_irq_postinstall()
478 gt->pm_imr = ~gt->pm_ier; in gen8_gt_irq_postinstall()
[all …]
A Dintel_gt_mcr.c114 spin_lock_init(&gt->mcr_lock); in intel_gt_mcr_init()
121 gt->info.mslice_mask = in intel_gt_mcr_init()
165 gt->info.l3bank_mask = in intel_gt_mcr_init()
299 intel_gt_mcr_lock(gt, &flags); in rw_with_mcr_steering()
307 intel_gt_mcr_unlock(gt, flags); in rw_with_mcr_steering()
327 __acquires(&gt->mcr_lock) in intel_gt_mcr_lock()
390 __releases(&gt->mcr_lock) in intel_gt_mcr_unlock()
476 intel_gt_mcr_lock(gt, &flags); in intel_gt_mcr_multicast_write()
635 if ((VDBOX_MASK(gt) | VEBOX_MASK(gt) | gt->info.sfc_mask) & BIT(0)) in get_nonterminated_steering()
742 struct intel_gt *gt, in report_steering_type() argument
[all …]
A Dintel_gt_pm.h21 intel_wakeref_get(&gt->wakeref); in intel_gt_pm_get_untracked()
26 intel_gt_pm_get_untracked(gt); in intel_gt_pm_get()
32 __intel_wakeref_get(&gt->wakeref); in __intel_gt_pm_get()
45 intel_wakeref_might_get(&gt->wakeref); in intel_gt_pm_might_get()
50 intel_wakeref_put(&gt->wakeref); in intel_gt_pm_put_untracked()
56 intel_gt_pm_put_untracked(gt); in intel_gt_pm_put()
61 intel_wakeref_put_async(&gt->wakeref); in intel_gt_pm_put_async_untracked()
72 intel_gt_pm_put_async_untracked(gt); in intel_gt_pm_put_async()
75 #define with_intel_gt_pm(gt, wf) \ argument
76 for ((wf) = intel_gt_pm_get(gt); (wf); intel_gt_pm_put((gt), (wf)), (wf) = NULL)
[all …]
A Dintel_gt.h20 #define IS_GFX_GT_IP_RANGE(gt, from, until) ( \ argument
23 ((gt)->type != GT_MEDIA && \
24 GRAPHICS_VER_FULL((gt)->i915) >= (from) && \
25 GRAPHICS_VER_FULL((gt)->i915) <= (until)))
37 ((gt) && (gt)->type == GT_MEDIA && \
38 MEDIA_VER_FULL((gt)->i915) >= (from) && \
39 MEDIA_VER_FULL((gt)->i915) <= (until)))
79 #define GT_TRACE(gt, fmt, ...) do { \ argument
87 return !gt->info.id; in gt_is_root()
129 return &gt->uc.guc; in gt_to_guc()
[all …]
A Dselftest_reset.c51 igt_global_reset_lock(gt); in __igt_reset_stolen()
192 struct intel_gt *gt = arg; in igt_reset_engines_stolen() local
211 struct intel_gt *gt = arg; in igt_global_reset() local
218 igt_global_reset_lock(gt); in igt_global_reset()
241 struct intel_gt *gt = arg; in igt_wedged_reset() local
249 intel_gt_set_wedged(gt); in igt_wedged_reset()
273 if (!igt_force_reset(gt)) in igt_atomic_reset()
287 reset_finish(gt, awake); in igt_atomic_reset()
296 igt_force_reset(gt); in igt_atomic_reset()
326 if (!igt_force_reset(gt)) in igt_atomic_engine_reset()
[all …]
A Dintel_gt_pm_irq.c17 u32 mask = gt->pm_imr; in write_pm_imr()
40 lockdep_assert_held(gt->irq_lock); in gen6_gt_pm_update_irq()
42 new_val = gt->pm_imr; in gen6_gt_pm_update_irq()
46 if (new_val != gt->pm_imr) { in gen6_gt_pm_update_irq()
47 gt->pm_imr = new_val; in gen6_gt_pm_update_irq()
48 write_pm_imr(gt); in gen6_gt_pm_update_irq()
78 u32 mask = gt->pm_ier; in write_pm_ier()
97 gt->pm_ier |= enable_mask; in gen6_gt_pm_enable_irq()
98 write_pm_ier(gt); in gen6_gt_pm_enable_irq()
106 gt->pm_ier &= ~disable_mask; in gen6_gt_pm_disable_irq()
[all …]
A Dintel_gt_sysfs_pm.c31 struct intel_gt *gt; in sysfs_gt_attribute_w_func() local
39 for_each_gt(gt, i915, i) { in sysfs_gt_attribute_w_func()
40 ret = func(gt, val); in sysfs_gt_attribute_w_func()
46 ret = func(gt, val); in sysfs_gt_attribute_w_func()
57 struct intel_gt *gt; in sysfs_gt_attribute_r_func() local
68 u32 val = func(gt); in sysfs_gt_attribute_r_func()
84 ret = func(gt); in sysfs_gt_attribute_r_func()
306 if (IS_VALLEYVIEW(gt->i915) || IS_CHERRYVIEW(gt->i915)) { in intel_sysfs_rc6_init()
521 bool val = rps_read_mask_mmio(&gt->rps, t_attr->reg32(gt), t_attr->mask); in throttle_reason_bool_show()
871 if (IS_VALLEYVIEW(gt->i915) || IS_CHERRYVIEW(gt->i915)) in intel_sysfs_rps_init()
[all …]

Completed in 682 milliseconds

12345678910>>...16