Home
last modified time | relevance | path

Searched refs:xe (Results 1 – 25 of 252) sorted by relevance

1234567891011

/drivers/gpu/drm/xe/
A Dxe_pm.c146 xe_irq_suspend(xe); in xe_pm_suspend()
196 xe_i2c_pm_resume(xe, xe->d3cold.allowed); in xe_pm_resume()
198 xe_irq_resume(xe); in xe_pm_resume()
254 if (IS_DGFX(xe)) in xe_pm_runtime_init()
271 err = drmm_mutex_init(&xe->drm, &xe->mem_access.vram_userfault.lock); in xe_pm_init_early()
275 err = drmm_mutex_init(&xe->drm, &xe->d3cold.lock); in xe_pm_init_early()
279 xe->d3cold.capable = xe_pm_pci_d3cold_capable(xe); in xe_pm_init_early()
491 xe_irq_suspend(xe); in xe_pm_runtime_suspend()
544 xe_i2c_pm_resume(xe, xe->d3cold.allowed); in xe_pm_runtime_resume()
546 xe_irq_resume(xe); in xe_pm_runtime_resume()
[all …]
A Dxe_device.c94 xef->xe = xe; in xe_file_open()
427 if (IS_ERR(xe)) in xe_device_create()
428 return xe; in xe_device_create()
430 err = ttm_device_init(&xe->ttm, &xe_ttm_funcs, xe->drm.dev, in xe_device_create()
482 if (!xe->ordered_wq || !xe->unordered_wq || in xe_device_create()
483 !xe->preempt_fence_wq || !xe->destroy_wq) { in xe_device_create()
493 err = drmm_mutex_init(&xe->drm, &xe->pmt.lock); in xe_device_create()
497 return xe; in xe_device_create()
749 if (GRAPHICS_VER(xe) < 20 || !xe->info.has_flat_ccs || IS_SRIOV_VF(xe)) in probe_has_flat_ccs()
1062 if (!IS_DGFX(xe) || GRAPHICS_VER(xe) < 20) in xe_device_td_flush()
[all …]
A Dxe_bo_evict.c68 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.early.kernel_bo_present, in xe_bo_notifier_prepare_all_pinned()
72 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.late.kernel_bo_present, in xe_bo_notifier_prepare_all_pinned()
89 (void)xe_bo_apply_to_pinned(xe, &xe->pinned.early.kernel_bo_present, in xe_bo_notifier_unprepare_all_pinned()
93 (void)xe_bo_apply_to_pinned(xe, &xe->pinned.late.kernel_bo_present, in xe_bo_notifier_unprepare_all_pinned()
161 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.late.kernel_bo_present, in xe_bo_evict_all()
165 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.late.kernel_bo_present, in xe_bo_evict_all()
227 return xe_bo_apply_to_pinned(xe, &xe->pinned.early.evicted, in xe_bo_restore_early()
247 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.late.evicted, in xe_bo_restore_late()
257 if (!IS_DGFX(xe)) in xe_bo_restore_late()
261 ret = xe_bo_apply_to_pinned(xe, &xe->pinned.late.external, in xe_bo_restore_late()
[all …]
A Dxe_irq.c579 xe_assert(xe, IS_SRIOV_VF(xe)); in vf_irq_reset()
584 xe_assert(xe, xe_device_has_memirq(xe)); in vf_irq_reset()
641 xe_assert(xe, xe_device_has_memirq(xe)); in vf_irq_postinstall()
657 xe_display_irq_postinstall(xe, xe_root_mmio_gt(xe)); in xe_irq_postinstall()
690 if (IS_SRIOV_VF(xe) && xe_device_has_memirq(xe)) in xe_irq_handler()
737 xe_irq_reset(xe); in irq_uninstall()
759 xe_irq_reset(xe); in xe_irq_install()
772 err = xe_device_has_msix(xe) ? xe_irq_msix_request_irqs(xe) : in xe_irq_install()
781 return devm_add_action_or_reset(xe->drm.dev, irq_uninstall, xe); in xe_irq_install()
812 xe_irq_reset(xe); in xe_irq_resume()
[all …]
A Dxe_pci_sriov.c40 for_each_gt(gt, xe, id) { in pf_provision_vfs()
56 for_each_gt(gt, xe, id) in pf_unprovision_vfs()
67 for_each_gt(gt, xe, id) in pf_reset_vfs()
76 xe_assert(xe, IS_SRIOV_PF(xe)); in xe_pci_pf_get_vf_dev()
153 xe_assert(xe, IS_SRIOV_PF(xe)); in pf_enable_vfs()
177 if (IS_DGFX(xe)) { in pf_enable_vfs()
198 xe_pm_runtime_put(xe); in pf_enable_vfs()
211 xe_assert(xe, IS_SRIOV_PF(xe)); in pf_disable_vfs()
226 xe_pm_runtime_put(xe); in pf_disable_vfs()
249 if (!IS_SRIOV_PF(xe)) in xe_pci_sriov_configure()
[all …]
A Dxe_pat.c380 if (GRAPHICS_VER(xe) == 30 || GRAPHICS_VER(xe) == 20) { in xe_pat_init_early()
384 if (IS_DGFX(xe)) in xe_pat_init_early()
423 WARN_ON_ONCE(!IS_DGFX(xe) && !xe->info.has_llc); in xe_pat_init_early()
440 GRAPHICS_VER(xe), GRAPHICS_VERx100(xe) % 100); in xe_pat_init_early()
447 xe_assert(xe, !xe->pat.ops || xe->pat.ops->dump); in xe_pat_init_early()
448 xe_assert(xe, !xe->pat.ops || xe->pat.ops->program_graphics); in xe_pat_init_early()
449 xe_assert(xe, !xe->pat.ops || MEDIA_VER(xe) < 13 || xe->pat.ops->program_media); in xe_pat_init_early()
456 if (!xe->pat.ops) in xe_pat_init()
460 xe->pat.ops->program_media(gt, xe->pat.table, xe->pat.n_entries); in xe_pat_init()
462 xe->pat.ops->program_graphics(gt, xe->pat.table, xe->pat.n_entries); in xe_pat_init()
[all …]
A Dxe_sriov_pf.c41 pf_reduce_totalvfs(xe, 0); in pf_continue_as_native()
66 if (!xe_device_uc_enabled(xe)) in xe_sriov_pf_readiness()
90 xe_assert(xe, IS_SRIOV_PF(xe)); in xe_sriov_pf_init_early()
92 xe->sriov.pf.vfs = drmm_kcalloc(&xe->drm, 1 + xe_sriov_pf_get_totalvfs(xe), in xe_sriov_pf_init_early()
94 if (!xe->sriov.pf.vfs) in xe_sriov_pf_init_early()
97 err = drmm_mutex_init(&xe->drm, &xe->sriov.pf.master_lock); in xe_sriov_pf_init_early()
101 xe_sriov_pf_service_init(xe); in xe_sriov_pf_init_early()
120 if (xe_device_wedged(xe)) in xe_sriov_pf_wait_ready()
123 for_each_gt(gt, xe, id) { in xe_sriov_pf_wait_ready()
143 xe_assert(xe, IS_SRIOV_PF(xe)); in xe_sriov_pf_print_vfs_summary()
[all …]
A Dxe_sriov.c64 if (test_is_vf(xe)) in xe_sriov_probe_early()
80 xe_assert(xe, !xe->sriov.__mode); in xe_sriov_probe_early()
82 xe_assert(xe, xe->sriov.__mode); in xe_sriov_probe_early()
84 if (IS_SRIOV(xe)) in xe_sriov_probe_early()
94 xe->sriov.wq = NULL; in fini_sriov()
108 if (!IS_SRIOV(xe)) in xe_sriov_init()
111 if (IS_SRIOV_PF(xe)) { in xe_sriov_init()
118 if (IS_SRIOV_VF(xe)) in xe_sriov_init()
121 xe_assert(xe, !xe->sriov.wq); in xe_sriov_init()
123 if (!xe->sriov.wq) in xe_sriov_init()
[all …]
A Dxe_device.h36 return (struct xe_device *)xe; in xe_device_const_cast()
60 return &xe->tiles[0]; in xe_device_get_root_tile()
75 if (gt_id >= xe->info.tile_count * xe->info.max_gt_per_tile) in xe_device_get_gt()
78 tile = &xe->tiles[gt_id / xe->info.max_gt_per_tile]; in xe_device_get_gt()
81 xe_assert(xe, false); in xe_device_get_gt()
119 return !xe->info.force_execlist; in xe_device_uc_enabled()
147 return xe->info.has_flat_ccs; in xe_device_has_flat_ccs()
152 return xe->info.has_sriov; in xe_device_has_sriov()
157 return xe->irq.msix.nvec > 0; in xe_device_has_msix()
167 return xe_device_has_memirq(xe) && (IS_SRIOV_VF(xe) || xe_device_has_msix(xe)); in xe_device_uses_memirq()
[all …]
A Dxe_pci.c594 err = xe_tile_init_early(xe_device_get_root_tile(xe), xe, 0); in xe_info_init_early()
629 xe->info.step = xe_step_pre_gmdid_get(xe); in xe_info_init()
634 xe->info.step = xe_step_gmdid_get(xe, in xe_info_init()
726 if (IS_SRIOV_PF(xe)) in xe_pci_remove()
733 xe_pm_fini(xe); in xe_pci_remove()
788 if (IS_ERR(xe)) in xe_pci_probe()
789 return PTR_ERR(xe); in xe_pci_probe()
826 xe->info.devid, xe->info.revid, in xe_pci_probe()
827 xe->info.is_dgfx, in xe_pci_probe()
835 xe->info.dma_mask_size, xe->info.tile_count, in xe_pci_probe()
[all …]
A Dxe_debugfs.c47 xe_pm_runtime_get(xe); in info()
58 xe->info.subplatform > XE_SUBPLATFORM_NONE ? xe->info.subplatform : 0); in info()
74 xe_pm_runtime_put(xe); in info()
89 xe_pm_runtime_get(xe); in workarounds()
91 xe_pm_runtime_put(xe); in workarounds()
101 workarounds(xe, &p); in workaround_info()
118 xe_pm_runtime_get(xe); in forcewake_open()
139 xe_pm_runtime_put(xe); in forcewake_open()
151 xe_pm_runtime_put(xe); in forcewake_release()
195 xe_pm_runtime_get(xe); in wedged_mode_set()
[all …]
A Dxe_sriov_pf_service.c29 xe_assert(xe, IS_SRIOV_PF(xe)); in xe_sriov_pf_service_init()
48 xe_assert(xe, IS_SRIOV_PF(xe)); in pf_negotiate_version()
49 xe_assert(xe, base.major); in pf_negotiate_version()
89 xe_sriov_pf_assert_vfid(xe, vfid); in pf_connect()
90 xe_assert(xe, major || minor); in pf_connect()
98 xe_sriov_pf_assert_vfid(xe, vfid); in pf_disconnect()
119 xe_sriov_pf_assert_vfid(xe, vfid); in xe_sriov_pf_service_is_negotiated()
154 pf_disconnect(xe, vfid); in xe_sriov_pf_service_handshake_vf()
158 pf_connect(xe, vfid, *major, *minor); in xe_sriov_pf_service_handshake_vf()
178 pf_disconnect(xe, vfid); in xe_sriov_pf_service_reset_vf()
[all …]
A Dxe_rtp.c59 if (drm_WARN_ON(&xe->drm, !gt)) in rule_matches()
66 if (drm_WARN_ON(&xe->drm, !gt)) in rule_matches()
117 match = !xe->info.is_dgfx; in rule_matches()
120 match = xe->info.is_dgfx; in rule_matches()
213 struct xe_device **xe) in rtp_get_context() argument
219 *xe = ctx->xe; in rtp_get_context()
224 *xe = gt_to_xe(*gt); in rtp_get_context()
229 *xe = gt_to_xe(*gt); in rtp_get_context()
289 struct xe_device *xe = NULL; in xe_rtp_process_to_sr() local
293 xe_assert(xe, entries); in xe_rtp_process_to_sr()
[all …]
A Dxe_sriov_vf.c146 if (!vf_migration_supported(xe)) in xe_sriov_vf_init_early()
166 for_each_gt(gt, xe, id) { in vf_post_migration_notify_resfix_done()
188 for_each_gt(gt, xe, id) { in vf_get_next_migrated_gt_id()
232 xe_pm_runtime_get(xe); in vf_post_migration_recovery()
254 xe_pm_runtime_put(xe); in vf_post_migration_recovery()
258 xe_pm_runtime_put(xe); in vf_post_migration_recovery()
260 xe_device_declare_wedged(xe); in vf_post_migration_recovery()
268 vf_post_migration_recovery(xe); in migration_worker_func()
283 for_each_gt(gt, xe, id) { in vf_ready_to_recovery_on_any_gts()
300 xe_assert(xe, IS_SRIOV_VF(xe)); in xe_sriov_vf_start_migration_recovery()
[all …]
A Dxe_ttm_stolen_mgr.c57 return GRAPHICS_VERx100(xe) < 1270 && !IS_DGFX(xe); in xe_ttm_stolen_cpu_access_needs_ggtt()
101 wopcm_size = get_wopcm_size(xe); in detect_bar2_dgfx()
102 if (drm_WARN_ON(&xe->drm, !wopcm_size)) in detect_bar2_dgfx()
161 wopcm_size = get_wopcm_size(xe); in detect_bar2_integrated()
162 if (drm_WARN_ON(&xe->drm, !wopcm_size)) in detect_bar2_integrated()
221 if (IS_SRIOV_VF(xe)) in xe_ttm_stolen_mgr_init()
223 else if (IS_DGFX(xe)) in xe_ttm_stolen_mgr_init()
225 else if (GRAPHICS_VERx100(xe) >= 1270) in xe_ttm_stolen_mgr_init()
228 stolen_size = detect_stolen(xe, mgr); in xe_ttm_stolen_mgr_init()
307 XE_WARN_ON(IS_DGFX(xe)); in __xe_ttm_stolen_io_mem_reserve_stolen()
[all …]
A Dxe_vram.c81 drm_info(&xe->drm, in resize_vram_bar()
148 resize_vram_bar(xe); in determine_lmem_bar_size()
152 if (!xe->mem.vram.io_size) in determine_lmem_bar_size()
159 xe->mem.vram.mapping = ioremap_wc(xe->mem.vram.io_start, xe->mem.vram.io_size); in determine_lmem_bar_size()
230 if (IS_SRIOV_VF(xe)) { in tile_vram_size()
235 for_each_tile(t, xe, id) in tile_vram_size()
310 if (!IS_DGFX(xe)) in xe_vram_probe()
323 drm_info(&xe->drm, "VISIBLE VRAM: %pa, %pa\n", &xe->mem.vram.io_start, in xe_vram_probe()
369 drm_info(&xe->drm, "Total VRAM: %pa, %pa\n", &xe->mem.vram.io_start, in xe_vram_probe()
371 drm_info(&xe->drm, "Available VRAM: %pa, %pa\n", &xe->mem.vram.io_start, in xe_vram_probe()
[all …]
A Dxe_pxp_submit.c37 struct xe_device *xe = pxp->xe; in allocate_vcs_execution_resources() local
101 xe_assert(xe, hwe); in allocate_gsc_client_resources()
223 emit_cmd(xe, batch, offset++, 0); in pxp_emit_wait()
235 emit_cmd(xe, batch, offset++, 0); in pxp_emit_session_selection()
236 emit_cmd(xe, batch, offset++, 0); in pxp_emit_session_selection()
403 struct xe_device *xe = gsc_res->vm->xe; in gsccs_send_message() local
438 xe_device_wmb(xe); in gsccs_send_message()
505 struct xe_device *xe = gsc_res->vm->xe; in xe_pxp_submit_session_init() local
528 drm_info_once(&xe->drm, in xe_pxp_submit_session_init()
552 struct xe_device *xe = gsc_res->vm->xe; in xe_pxp_submit_session_invalidation() local
[all …]
A Dxe_sriov_printk.h14 #define xe_sriov_printk_prefix(xe) \ argument
18 #define xe_sriov_printk(xe, _level, fmt, ...) \ argument
19 drm_##_level(&(xe)->drm, "%s" fmt, xe_sriov_printk_prefix(xe), ##__VA_ARGS__)
21 #define xe_sriov_err(xe, fmt, ...) \ argument
22 xe_sriov_printk((xe), err, fmt, ##__VA_ARGS__)
27 #define xe_sriov_warn(xe, fmt, ...) \ argument
30 #define xe_sriov_notice(xe, fmt, ...) \ argument
33 #define xe_sriov_info(xe, fmt, ...) \ argument
36 #define xe_sriov_dbg(xe, fmt, ...) \ argument
41 #define xe_sriov_dbg_verbose(xe, fmt, ...) xe_sriov_dbg(xe, fmt, ##__VA_ARGS__) argument
[all …]
A Dxe_device_sysfs.c43 xe_pm_runtime_get(xe); in vram_d3cold_threshold_show()
45 xe_pm_runtime_put(xe); in vram_d3cold_threshold_show()
65 xe_pm_runtime_get(xe); in vram_d3cold_threshold_store()
67 xe_pm_runtime_put(xe); in vram_d3cold_threshold_store()
83 xe_pm_runtime_get(xe); in lb_fan_control_version_show()
107 xe_pm_runtime_put(xe); in lb_fan_control_version_show()
122 xe_pm_runtime_get(xe); in lb_voltage_regulator_version_show()
146 xe_pm_runtime_put(xe); in lb_voltage_regulator_version_show()
159 xe_pm_runtime_get(xe); in late_bind_create_files()
180 xe_pm_runtime_put(xe); in late_bind_create_files()
[all …]
A Dxe_sriov.h18 void xe_sriov_probe_early(struct xe_device *xe);
20 int xe_sriov_init(struct xe_device *xe);
24 xe_assert(xe, xe->sriov.__mode); in xe_device_sriov_mode()
25 return xe->sriov.__mode; in xe_device_sriov_mode()
28 static inline bool xe_device_is_sriov_pf(const struct xe_device *xe) in xe_device_is_sriov_pf() argument
30 return xe_device_sriov_mode(xe) == XE_SRIOV_MODE_PF; in xe_device_is_sriov_pf()
35 return xe_device_sriov_mode(xe) == XE_SRIOV_MODE_VF; in xe_device_is_sriov_vf()
39 #define IS_SRIOV_PF(xe) xe_device_is_sriov_pf(xe) argument
41 #define IS_SRIOV_PF(xe) (typecheck(struct xe_device *, (xe)) && false) argument
43 #define IS_SRIOV_VF(xe) xe_device_is_sriov_vf(xe) argument
[all …]
A Dxe_pm.h15 int xe_pm_suspend(struct xe_device *xe);
16 int xe_pm_resume(struct xe_device *xe);
18 int xe_pm_init_early(struct xe_device *xe);
19 int xe_pm_init(struct xe_device *xe);
20 void xe_pm_fini(struct xe_device *xe);
21 bool xe_pm_runtime_suspended(struct xe_device *xe);
22 int xe_pm_runtime_suspend(struct xe_device *xe);
23 int xe_pm_runtime_resume(struct xe_device *xe);
24 void xe_pm_runtime_get(struct xe_device *xe);
25 int xe_pm_runtime_get_ioctl(struct xe_device *xe);
[all …]
/drivers/gpu/drm/xe/display/
A Dxe_display.c141 return devm_add_action_or_reset(xe->drm.dev, xe_display_fini_early, xe); in xe_display_init_early()
173 return devm_add_action_or_reset(xe->drm.dev, xe_display_fini, xe); in xe_display_init()
292 if (has_display(xe)) in xe_display_enable_d3cold()
305 if (has_display(xe)) in xe_display_disable_d3cold()
312 if (has_display(xe)) in xe_display_disable_d3cold()
335 if (has_display(xe)) { in xe_display_pm_suspend()
375 if (has_display(xe)) in xe_display_pm_shutdown()
464 if (has_display(xe)) in xe_display_pm_resume()
469 if (has_display(xe)) in xe_display_pm_resume()
480 if (has_display(xe)) in xe_display_pm_resume()
[all …]
A Dxe_display.h18 int xe_display_probe(struct xe_device *xe);
20 int xe_display_init_early(struct xe_device *xe);
21 int xe_display_init(struct xe_device *xe);
23 void xe_display_register(struct xe_device *xe);
24 void xe_display_unregister(struct xe_device *xe);
28 void xe_display_irq_reset(struct xe_device *xe);
31 void xe_display_pm_suspend(struct xe_device *xe);
32 void xe_display_pm_shutdown(struct xe_device *xe);
33 void xe_display_pm_suspend_late(struct xe_device *xe);
35 void xe_display_pm_resume_early(struct xe_device *xe);
[all …]
/drivers/gpu/drm/xe/tests/
A Dxe_sriov_pf_service_kunit.c19 struct xe_device *xe; in pf_service_test_init() local
24 xe = test->priv; in pf_service_test_init()
27 xe_sriov_pf_service_init(xe); in pf_service_test_init()
37 if (xe->sriov.pf.service.version.base.major == xe->sriov.pf.service.version.latest.major) in pf_service_test_init()
62 pf_negotiate_version(xe, in pf_negotiate_base_match()
76 pf_negotiate_version(xe, in pf_negotiate_base_newer()
94 pf_negotiate_version(xe, in pf_negotiate_base_next()
114 pf_negotiate_version(xe, in pf_negotiate_base_older()
126 pf_negotiate_version(xe, in pf_negotiate_base_prev()
137 pf_negotiate_version(xe, in pf_negotiate_latest_match()
[all …]
A Dxe_migrate.c268 IS_DGFX(xe)); in xe_migrate_sanity_test()
294 if (xe->info.tile_count > 1) { in xe_migrate_sanity_test()
316 if (xe->info.tile_count > 1) { in xe_migrate_sanity_test()
342 xe_pm_runtime_get(xe); in migrate_test_run_device()
344 for_each_tile(tile, xe, id) { in migrate_test_run_device()
353 xe_pm_runtime_put(xe); in migrate_test_run_device()
362 migrate_test_run_device(xe); in xe_migrate_sanity_kunit()
749 if (!(GRAPHICS_VER(xe) >= 20 && IS_DGFX(xe))) { in validate_ccs_test_run_device()
754 xe_pm_runtime_get(xe); in validate_ccs_test_run_device()
756 for_each_tile(tile, xe, id) in validate_ccs_test_run_device()
[all …]

Completed in 76 milliseconds

1234567891011