Home
last modified time | relevance | path

Searched refs:guc (Results 1 – 25 of 92) sorted by relevance

1234

/linux/drivers/gpu/drm/i915/gt/uc/
A Dintel_guc.c75 guc_send_reg(guc, i), in intel_guc_init_send_regs()
276 u32 ads = intel_guc_ggtt_offset(guc, guc->ads_vma) >> PAGE_SHIFT; in guc_ctl_ads_flags()
347 u32 *params = guc->params; in guc_init_params()
425 GEM_BUG_ON(!guc->ads_vma); in intel_guc_init()
448 guc_init_params(guc); in intel_guc_init()
459 intel_guc_ads_destroy(guc); in intel_guc_init()
465 intel_uc_fw_fini(&guc->fw); in intel_guc_init()
487 intel_guc_ads_destroy(guc); in intel_guc_fini()
490 intel_uc_fw_fini(&guc->fw); in intel_guc_fini()
519 intel_guc_notify(guc); in intel_guc_send_mmio()
[all …]
A Dintel_guc.h327 #define GUC_SUBMIT_VER(guc) MAKE_GUC_VER_STRUCT((guc)->submission_version) argument
328 #define GUC_FIRMWARE_VER(guc) MAKE_GUC_VER_STRUCT((guc)->fw.file_selected.ver) argument
396 if (guc->interrupts.enabled) in intel_guc_to_host_event_handler()
470 return intel_guc_is_fw_running(guc) && intel_guc_ct_enabled(&guc->ct); in intel_guc_is_ready()
475 guc->interrupts.reset(guc); in intel_guc_reset_interrupts()
480 guc->interrupts.enable(guc); in intel_guc_enable_interrupts()
485 guc->interrupts.disable(guc); in intel_guc_disable_interrupts()
493 guc->mmio_msg = 0; in intel_guc_sanitize()
500 spin_lock_irq(&guc->irq_lock); in intel_guc_enable_msg()
501 guc->msg_enabled_mask |= mask; in intel_guc_enable_msg()
[all …]
A Dintel_guc_ads.c473 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_mmio_reg_state_init()
475 iosys_map_memcpy_to(&guc->ads_map, offset, guc->ads_regset, in guc_mmio_reg_state_init()
552 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_prep_golden_context()
636 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_init_golden_context()
718 ads_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma); in guc_capture_prep_lists()
872 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_waklv_init()
918 base = intel_guc_ggtt_offset(guc, guc->ads_vma); in __guc_ads_init()
987 ret = intel_guc_allocate_and_map_vma(guc, size, &guc->ads_vma, in intel_guc_ads_create()
1029 iosys_map_memset(&guc->ads_map, guc_ads_private_data_offset(guc), in guc_ads_private_data_reset()
1043 if (!guc->ads_vma) in intel_guc_ads_reset()
[all …]
A Dintel_uc.c155 struct intel_guc *guc = &uc->guc; in __uc_capture_load_err_log() local
316 struct intel_guc *guc = &uc->guc; in __uc_init() local
350 struct intel_guc *guc = &uc->guc; in __uc_sanitize() local
456 struct intel_guc *guc = &uc->guc; in __uc_init_hw() local
585 struct intel_guc *guc = &uc->guc; in __uc_fini_hw() local
604 struct intel_guc *guc = &uc->guc; in intel_uc_reset_prepare() local
625 struct intel_guc *guc = &uc->guc; in intel_uc_reset() local
634 struct intel_guc *guc = &uc->guc; in intel_uc_reset_finish() local
649 struct intel_guc *guc = &uc->guc; in intel_uc_cancel_requests() local
658 struct intel_guc *guc = &uc->guc; in intel_uc_runtime_suspend() local
[all …]
A Dintel_guc_debugfs.c19 struct intel_guc *guc = m->private; in guc_info_show() local
22 if (!intel_guc_is_supported(guc)) in guc_info_show()
25 intel_guc_load_status(guc, &p); in guc_info_show()
27 intel_guc_log_info(&guc->log, &p); in guc_info_show()
42 struct intel_guc *guc = m->private; in guc_registered_contexts_show() local
56 struct intel_guc *guc = m->private; in guc_slpc_info_show() local
60 if (!intel_guc_slpc_is_used(guc)) in guc_slpc_info_show()
76 struct intel_guc *guc = data; in guc_sched_disable_delay_ms_get() local
88 struct intel_guc *guc = data; in guc_sched_disable_delay_ms_set() local
104 struct intel_guc *guc = data; in guc_sched_disable_gucid_threshold_get() local
[all …]
A Dintel_guc_submission.c1481 struct intel_guc *guc = container_of(wrk, typeof(*guc), in guc_timestamp_ping() local
1483 struct intel_uc *uc = container_of(guc, typeof(*uc), guc); in guc_timestamp_ping()
1684 guc->interrupts.disable(guc); in intel_guc_submission_reset_prepare()
2079 wait = xa_load(&guc->tlb_lookup, guc->serial_slot); in fini_tlb_lookup()
2185 return submission_disabled(guc) || guc->stalled_request || in need_tasklet()
3631 struct intel_guc *guc = &ce->engine->gt->uc.guc; in update_context_prio() local
4579 guc->sched_engine->private_data = guc; in intel_guc_submission_setup()
4809 guc->submission_selected = __guc_submission_selected(guc); in intel_guc_submission_init_early()
4847 guc_dbg(guc, in wait_wake_outstanding_tlb_g2h()
4927 wq = xa_load(&guc->tlb_lookup, guc->serial_slot); in guc_send_invalidate_tlb()
[all …]
A Dintel_guc_rc.c16 return guc->submission_supported && in __guc_rc_supported()
17 GRAPHICS_VER(guc_to_i915(guc)) >= 12; in __guc_rc_supported()
22 if (!intel_guc_rc_is_supported(guc)) in __guc_rc_selected()
25 return guc->submission_selected; in __guc_rc_selected()
30 guc->rc_supported = __guc_rc_supported(guc); in intel_guc_rc_init_early()
31 guc->rc_selected = __guc_rc_selected(guc); in intel_guc_rc_init_early()
52 struct intel_gt *gt = guc_to_gt(guc); in __guc_rc_control()
58 if (!intel_guc_is_ready(guc)) in __guc_rc_control()
61 ret = guc_action_control_gucrc(guc, enable); in __guc_rc_control()
75 return __guc_rc_control(guc, true); in intel_guc_rc_enable()
[all …]
A Dintel_guc_capture.c619 if (!guc->capture) in guc_capture_output_min_size_est()
1093 guc->capture->max_mmio_per_node = guc_get_max_reglist_count(guc); in guc_capture_create_prealloc_nodes()
1296 struct intel_uc *uc = container_of(guc, typeof(*uc), guc); in __guc_capture_process_output()
1542 guc = gt_to_guc(gt); in intel_guc_capture_is_matching_engine()
1543 if (!guc->capture) in intel_guc_capture_is_matching_engine()
1573 if (!guc->capture) in intel_guc_capture_get_matching_node()
1602 if (guc->capture) in intel_guc_capture_process()
1626 if (!guc->capture) in intel_guc_capture_destroy()
1642 guc->capture = kzalloc(sizeof(*guc->capture), GFP_KERNEL); in intel_guc_capture_init()
1643 if (!guc->capture) in intel_guc_capture_init()
[all …]
A Dintel_guc_submission.h16 void intel_guc_submission_init_early(struct intel_guc *guc);
17 int intel_guc_submission_init(struct intel_guc *guc);
18 int intel_guc_submission_enable(struct intel_guc *guc);
19 void intel_guc_submission_disable(struct intel_guc *guc);
20 void intel_guc_submission_fini(struct intel_guc *guc);
21 int intel_guc_preempt_work_create(struct intel_guc *guc);
22 void intel_guc_preempt_work_destroy(struct intel_guc *guc);
36 int intel_guc_wait_for_pending_msg(struct intel_guc *guc,
45 return guc->submission_supported; in intel_guc_submission_is_supported()
50 return guc->submission_selected; in intel_guc_submission_is_wanted()
[all …]
A Dintel_guc_rc.h11 void intel_guc_rc_init_early(struct intel_guc *guc);
13 static inline bool intel_guc_rc_is_supported(struct intel_guc *guc) in intel_guc_rc_is_supported() argument
15 return guc->rc_supported; in intel_guc_rc_is_supported()
18 static inline bool intel_guc_rc_is_wanted(struct intel_guc *guc) in intel_guc_rc_is_wanted() argument
20 return guc->submission_selected && intel_guc_rc_is_supported(guc); in intel_guc_rc_is_wanted()
23 static inline bool intel_guc_rc_is_used(struct intel_guc *guc) in intel_guc_rc_is_used() argument
25 return intel_guc_submission_is_used(guc) && intel_guc_rc_is_wanted(guc); in intel_guc_rc_is_used()
28 int intel_guc_rc_enable(struct intel_guc *guc);
29 int intel_guc_rc_disable(struct intel_guc *guc);
A Dintel_guc_fw.c74 intel_guc_ggtt_offset(guc, guc_fw->rsa_data)); in guc_xfer_rsa_vma()
154 static int guc_wait_ucode(struct intel_guc *guc) in guc_wait_ucode() argument
156 struct intel_gt *gt = guc_to_gt(guc); in guc_wait_ucode()
235 guc_info(guc, "firmware exception. EIP: %#x\n", in guc_wait_ucode()
246 guc_info(guc, "invalid w/a KLV entry\n"); in guc_wait_ucode()
251 guc_info(guc, "still extracting hwconfig table.\n"); in guc_wait_ucode()
286 int intel_guc_fw_upload(struct intel_guc *guc) in intel_guc_fw_upload() argument
288 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_fw_upload()
302 ret = guc_xfer_rsa(&guc->fw, uncore); in intel_guc_fw_upload()
310 ret = intel_uc_fw_upload(&guc->fw, 0x2000, UOS_MOVE); in intel_guc_fw_upload()
[all …]
A Dintel_guc_log.c42 struct intel_guc *guc = log_to_guc(log); in _guc_log_init_sizes() local
545 if (!guc->dbgfs_node) in guc_log_relay_create()
549 guc->dbgfs_node, in guc_log_relay_create()
586 guc_action_flush_log_complete(guc); in guc_log_copy_debuglogs_for_relay()
687 ret = guc_action_control_log(guc, in intel_guc_log_set_level()
785 guc_action_flush_log(guc); in intel_guc_log_relay_flush()
885 struct intel_uc *uc = container_of(guc, struct intel_uc, guc); in intel_guc_log_dump()
891 if (!intel_guc_is_supported(guc)) in intel_guc_log_dump()
896 else if (guc->log.vma) in intel_guc_log_dump()
897 obj = guc->log.vma->obj; in intel_guc_log_dump()
[all …]
A Dintel_guc_slpc.c36 return guc->submission_supported && in __detect_slpc_supported()
37 GRAPHICS_VER(guc_to_i915(guc)) >= 12; in __detect_slpc_supported()
42 if (!intel_guc_slpc_is_supported(guc)) in __guc_slpc_selected()
45 return guc->submission_selected; in __guc_slpc_selected()
50 struct intel_guc *guc = slpc_to_guc(slpc); in intel_guc_slpc_init_early() local
53 slpc->selected = __guc_slpc_selected(guc); in intel_guc_slpc_init_early()
119 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_set_param_nb() local
163 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_query_task_state() local
167 ret = guc_action_slpc_query(guc, offset); in slpc_query_task_state()
178 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_set_param() local
[all …]
A Dselftest_guc.c147 struct intel_guc *guc = gt_to_guc(gt); in intel_guc_steal_guc_ids() local
158 guc_err(guc, "Context array allocation failed\n"); in intel_guc_steal_guc_ids()
164 sv = guc->submission_state.num_guc_ids; in intel_guc_steal_guc_ids()
165 guc->submission_state.num_guc_ids = 512; in intel_guc_steal_guc_ids()
264 guc_err(guc, "No guc_id was stolen"); in intel_guc_steal_guc_ids()
284 guc->submission_state.num_guc_ids = sv; in intel_guc_steal_guc_ids()
293 static int bad_h2g(struct intel_guc *guc) in bad_h2g() argument
354 gt->uc.guc.fast_response_selftest = 1; in intel_guc_fast_request()
356 ret = bad_h2g(&gt->uc.guc); in intel_guc_fast_request()
375 if (gt->uc.guc.fast_response_selftest != 2) { in intel_guc_fast_request()
[all …]
A Dintel_guc_hwconfig.c34 static int __guc_action_get_hwconfig(struct intel_guc *guc, in __guc_action_get_hwconfig() argument
45 ret = intel_guc_send_mmio(guc, action, ARRAY_SIZE(action), NULL, 0); in __guc_action_get_hwconfig()
52 static int guc_hwconfig_discover_size(struct intel_guc *guc, struct intel_hwconfig *hwconfig) in guc_hwconfig_discover_size() argument
60 ret = __guc_action_get_hwconfig(guc, 0, 0); in guc_hwconfig_discover_size()
71 static int guc_hwconfig_fill_buffer(struct intel_guc *guc, struct intel_hwconfig *hwconfig) in guc_hwconfig_fill_buffer() argument
80 ret = intel_guc_allocate_and_map_vma(guc, hwconfig->size, &vma, &vaddr); in guc_hwconfig_fill_buffer()
84 ggtt_offset = intel_guc_ggtt_offset(guc, vma); in guc_hwconfig_fill_buffer()
86 ret = __guc_action_get_hwconfig(guc, ggtt_offset, hwconfig->size); in guc_hwconfig_fill_buffer()
114 struct intel_guc *guc = gt_to_guc(gt); in guc_hwconfig_init() local
120 ret = guc_hwconfig_discover_size(guc, hwconfig); in guc_hwconfig_init()
[all …]
A Dintel_guc_ads.h16 int intel_guc_ads_create(struct intel_guc *guc);
17 void intel_guc_ads_destroy(struct intel_guc *guc);
18 void intel_guc_ads_init_late(struct intel_guc *guc);
19 void intel_guc_ads_reset(struct intel_guc *guc);
20 void intel_guc_ads_print_policy_info(struct intel_guc *guc,
23 u32 intel_guc_engine_usage_offset(struct intel_guc *guc);
A Dintel_guc_slpc.h17 static inline bool intel_guc_slpc_is_supported(struct intel_guc *guc) in intel_guc_slpc_is_supported() argument
19 return guc->slpc.supported; in intel_guc_slpc_is_supported()
22 static inline bool intel_guc_slpc_is_wanted(struct intel_guc *guc) in intel_guc_slpc_is_wanted() argument
24 return guc->slpc.selected; in intel_guc_slpc_is_wanted()
27 static inline bool intel_guc_slpc_is_used(struct intel_guc *guc) in intel_guc_slpc_is_used() argument
29 return intel_guc_submission_is_used(guc) && intel_guc_slpc_is_wanted(guc); in intel_guc_slpc_is_used()
/linux/drivers/gpu/drm/xe/
A Dxe_guc.c81 u32 offset = guc_bo_ggtt_addr(guc, guc->log.bo) >> PAGE_SHIFT; in guc_ctl_log_params_flags()
134 u32 ads = guc_bo_ggtt_addr(guc, guc->ads.bo) >> PAGE_SHIFT; in guc_ctl_ads_flags()
210 guc_print_params(guc); in guc_init_params()
224 guc_print_params(guc); in guc_init_params_post_hwconfig()
359 guc_init_params(guc); in xe_guc_init()
374 err = xe_guc_submit_init(guc, xe_gt_sriov_vf_guc_ids(guc_to_gt(guc))); in vf_guc_init_post_hwconfig()
731 guc_wait_ucode(guc); in __xe_guc_upload()
870 guc_enable_irq(guc); in xe_guc_enable_communication()
896 xe_guc_sanitize(guc); in xe_guc_suspend()
960 xe_guc_notify(guc); in xe_guc_mmio_send_recv()
[all …]
A Dxe_guc.h21 #define GUC_SUBMIT_VER(guc) \ argument
23 #define GUC_FIRMWARE_VER(guc) \ argument
29 int xe_guc_init(struct xe_guc *guc);
32 int xe_guc_reset(struct xe_guc *guc);
33 int xe_guc_upload(struct xe_guc *guc);
36 int xe_guc_suspend(struct xe_guc *guc);
37 void xe_guc_notify(struct xe_guc *guc);
50 void xe_guc_stop(struct xe_guc *guc);
51 int xe_guc_start(struct xe_guc *guc);
77 return container_of(guc, struct xe_gt, uc.guc); in guc_to_gt()
[all …]
A Dxe_guc_submit.c575 CIRC_SPACE(q->guc->wqi_tail, q->guc->wqi_head, WQ_SIZE) in wq_wait_for_space()
808 queue_work(guc_to_gt(guc)->ordered_wq, &q->guc->lr_tdr); in xe_guc_exec_queue_trigger_cleanup()
827 xe_gt_assert(guc_to_gt(guc), guc_to_xe(guc)->wedged.mode); in xe_guc_submit_wedge()
1010 q->guc->id, in __deregister_exec_queue()
1406 q->guc = ge; in guc_exec_queue_init()
1818 q->guc->id, in deregister_exec_queue()
1883 atomic_read(&q->guc->state), q->guc->id, in xe_guc_sched_done_handler()
1924 atomic_read(&q->guc->state), q->guc->id); in xe_guc_deregister_done_handler()
2032 snapshot->guc.wqi_head = q->guc->wqi_head; in guc_exec_queue_wq_snapshot_capture()
2033 snapshot->guc.wqi_tail = q->guc->wqi_tail; in guc_exec_queue_wq_snapshot_capture()
[all …]
A Dxe_guc_hwconfig.c43 int ret = send_get_hwconfig(guc, xe_bo_ggtt_addr(guc->hwconfig.bo), in guc_hwconfig_copy()
44 guc->hwconfig.size); in guc_hwconfig_copy()
55 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_hwconfig_init()
62 if (guc->hwconfig.bo) in xe_guc_hwconfig_init()
75 err = guc_hwconfig_size(guc, &size); in xe_guc_hwconfig_init()
87 guc->hwconfig.bo = bo; in xe_guc_hwconfig_init()
88 guc->hwconfig.size = size; in xe_guc_hwconfig_init()
90 return guc_hwconfig_copy(guc); in xe_guc_hwconfig_init()
95 return !guc->hwconfig.bo ? 0 : guc->hwconfig.size; in xe_guc_hwconfig_size()
102 XE_WARN_ON(!guc->hwconfig.bo); in xe_guc_hwconfig_copy()
[all …]
A Dxe_guc_submit.h15 int xe_guc_submit_init(struct xe_guc *guc, unsigned int num_ids);
17 int xe_guc_submit_reset_prepare(struct xe_guc *guc);
18 void xe_guc_submit_reset_wait(struct xe_guc *guc);
19 void xe_guc_submit_stop(struct xe_guc *guc);
20 int xe_guc_submit_start(struct xe_guc *guc);
21 void xe_guc_submit_wedge(struct xe_guc *guc);
23 int xe_guc_sched_done_handler(struct xe_guc *guc, u32 *msg, u32 len);
24 int xe_guc_deregister_done_handler(struct xe_guc *guc, u32 *msg, u32 len);
25 int xe_guc_exec_queue_reset_handler(struct xe_guc *guc, u32 *msg, u32 len);
26 int xe_guc_exec_queue_memory_cat_error_handler(struct xe_guc *guc, u32 *msg,
[all …]
A Dxe_uc.c43 ret = xe_guc_init(&uc->guc); in xe_uc_init()
106 ret = xe_guc_reset(&uc->guc); in uc_reset()
118 xe_guc_sanitize(&uc->guc); in xe_uc_sanitize()
193 ret = xe_guc_upload(&uc->guc); in xe_uc_init_hw()
209 ret = xe_guc_pc_start(&uc->guc.pc); in xe_uc_init_hw()
245 xe_guc_stop_prepare(&uc->guc); in xe_uc_stop_prepare()
254 xe_guc_stop(&uc->guc); in xe_uc_stop()
263 return xe_guc_start(&uc->guc); in xe_uc_start()
271 xe_guc_reset_wait(&uc->guc); in uc_reset_wait()
288 return xe_guc_suspend(&uc->guc); in xe_uc_suspend()
[all …]
A Dxe_guc_debugfs.c26 struct xe_guc *guc = node_to_guc(m->private); in guc_info() local
27 struct xe_device *xe = guc_to_xe(guc); in guc_info()
31 xe_guc_print_info(guc, &p); in guc_info()
39 struct xe_guc *guc = node_to_guc(m->private); in guc_log() local
40 struct xe_device *xe = guc_to_xe(guc); in guc_log()
44 xe_guc_log_print(&guc->log, &p); in guc_log()
55 void xe_guc_debugfs_register(struct xe_guc *guc, struct dentry *parent) in xe_guc_debugfs_register() argument
57 struct drm_minor *minor = guc_to_xe(guc)->drm.primary; in xe_guc_debugfs_register()
62 local = drmm_kmalloc(&guc_to_xe(guc)->drm, DEBUGFS_SIZE, GFP_KERNEL); in xe_guc_debugfs_register()
70 local[i].data = guc; in xe_guc_debugfs_register()
A Dxe_gt_sriov_pf_policy.c42 struct xe_guc *guc = &gt->uc.guc; in pf_send_policy_klvs() local
144 &gt->sriov.pf.policy.guc.sched_if_idle, in pf_provision_sched_if_idle()
161 gt->sriov.pf.policy.guc.sched_if_idle = false; in pf_sanitize_sched_if_idle()
199 enable = gt->sriov.pf.policy.guc.sched_if_idle; in xe_gt_sriov_pf_policy_get_sched_if_idle()
211 &gt->sriov.pf.policy.guc.reset_engine, enable); in pf_provision_reset_engine()
227 gt->sriov.pf.policy.guc.reset_engine = false; in pf_sanitize_reset_engine()
265 enable = gt->sriov.pf.policy.guc.reset_engine; in xe_gt_sriov_pf_policy_get_reset_engine()
277 &gt->sriov.pf.policy.guc.sample_period, value); in pf_provision_sample_period()
293 gt->sriov.pf.policy.guc.sample_period = 0; in pf_sanitize_sample_period()
331 value = gt->sriov.pf.policy.guc.sample_period; in xe_gt_sriov_pf_policy_get_sample_period()
[all …]

Completed in 73 milliseconds

1234