Home
last modified time | relevance | path

Searched refs:guc (Results 1 – 25 of 46) sorted by relevance

12

/linux-6.3-rc2/drivers/gpu/drm/i915/gt/uc/
A Dintel_guc.c74 guc_send_reg(guc, i), in intel_guc_init_send_regs()
258 u32 ads = intel_guc_ggtt_offset(guc, guc->ads_vma) >> PAGE_SHIFT; in guc_ctl_ads_flags()
333 u32 *params = guc->params; in guc_init_params()
411 GEM_BUG_ON(!guc->ads_vma); in intel_guc_init()
434 guc_init_params(guc); in intel_guc_init()
445 intel_guc_ads_destroy(guc); in intel_guc_init()
451 intel_uc_fw_fini(&guc->fw); in intel_guc_init()
471 intel_guc_ads_destroy(guc); in intel_guc_fini()
474 intel_uc_fw_fini(&guc->fw); in intel_guc_fini()
503 intel_guc_notify(guc); in intel_guc_send_mmio()
[all …]
A Dintel_guc.h280 #define GUC_SUBMIT_VER(guc) MAKE_GUC_VER_STRUCT((guc)->submission_version) argument
348 if (guc->interrupts.enabled) in intel_guc_to_host_event_handler()
422 return intel_guc_is_fw_running(guc) && intel_guc_ct_enabled(&guc->ct); in intel_guc_is_ready()
427 guc->interrupts.reset(guc); in intel_guc_reset_interrupts()
432 guc->interrupts.enable(guc); in intel_guc_enable_interrupts()
437 guc->interrupts.disable(guc); in intel_guc_disable_interrupts()
442 intel_uc_fw_sanitize(&guc->fw); in intel_guc_sanitize()
445 guc->mmio_msg = 0; in intel_guc_sanitize()
452 spin_lock_irq(&guc->irq_lock); in intel_guc_enable_msg()
453 guc->msg_enabled_mask |= mask; in intel_guc_enable_msg()
[all …]
A Dintel_guc_ads.c449 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_mmio_reg_state_init()
451 iosys_map_memcpy_to(&guc->ads_map, offset, guc->ads_regset, in guc_mmio_reg_state_init()
528 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_prep_golden_context()
612 addr_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma) + offset; in guc_init_golden_context()
661 ads_ggtt = intel_guc_ggtt_offset(guc, guc->ads_vma); in guc_capture_prep_lists()
793 base = intel_guc_ggtt_offset(guc, guc->ads_vma); in __guc_ads_init()
853 ret = intel_guc_allocate_and_map_vma(guc, size, &guc->ads_vma, in intel_guc_ads_create()
895 iosys_map_memset(&guc->ads_map, guc_ads_private_data_offset(guc), in guc_ads_private_data_reset()
909 if (!guc->ads_vma) in intel_guc_ads_reset()
919 return intel_guc_ggtt_offset(guc, guc->ads_vma) + in intel_guc_engine_usage_offset()
[all …]
A Dintel_guc_debugfs.c19 struct intel_guc *guc = m->private; in guc_info_show() local
22 if (!intel_guc_is_supported(guc)) in guc_info_show()
25 intel_guc_load_status(guc, &p); in guc_info_show()
27 intel_guc_log_info(&guc->log, &p); in guc_info_show()
42 struct intel_guc *guc = m->private; in guc_registered_contexts_show() local
56 struct intel_guc *guc = m->private; in guc_slpc_info_show() local
60 if (!intel_guc_slpc_is_used(guc)) in guc_slpc_info_show()
76 struct intel_guc *guc = data; in guc_sched_disable_delay_ms_get() local
88 struct intel_guc *guc = data; in guc_sched_disable_delay_ms_set() local
104 struct intel_guc *guc = data; in guc_sched_disable_gucid_threshold_get() local
[all …]
A Dintel_uc.c166 struct intel_guc *guc = &uc->guc; in __uc_capture_load_err_log() local
327 struct intel_guc *guc = &uc->guc; in __uc_init() local
361 struct intel_guc *guc = &uc->guc; in __uc_sanitize() local
464 struct intel_guc *guc = &uc->guc; in __uc_init_hw() local
586 struct intel_guc *guc = &uc->guc; in __uc_fini_hw() local
605 struct intel_guc *guc = &uc->guc; in intel_uc_reset_prepare() local
626 struct intel_guc *guc = &uc->guc; in intel_uc_reset() local
635 struct intel_guc *guc = &uc->guc; in intel_uc_reset_finish() local
646 struct intel_guc *guc = &uc->guc; in intel_uc_cancel_requests() local
655 struct intel_guc *guc = &uc->guc; in intel_uc_runtime_suspend() local
[all …]
A Dintel_guc_submission.c1235 struct intel_guc *guc = &engine->gt->uc.guc; in guc_update_engine_gt_clks() local
1300 struct intel_guc *guc = &gt->uc.guc; in guc_engine_busyness() local
1453 struct intel_guc *guc = &gt->uc.guc; in intel_guc_busyness_park() local
1480 struct intel_guc *guc = &gt->uc.guc; in intel_guc_busyness_unpark() local
1553 guc->interrupts.disable(guc); in intel_guc_submission_reset_prepare()
2447 struct intel_guc *guc = &engine->gt->uc.guc; in guc_context_policy_init_v70() local
2523 struct intel_guc *guc = &engine->gt->uc.guc; in prepare_context_registration_info_v69() local
2592 struct intel_guc *guc = &engine->gt->uc.guc; in prepare_context_registration_info_v70() local
2655 struct intel_guc *guc = &engine->gt->uc.guc; in try_context_registration() local
4277 guc->sched_engine->private_data = guc; in intel_guc_submission_setup()
[all …]
A Dintel_guc_rc.c14 struct intel_gt *gt = guc_to_gt(guc); in __guc_rc_supported()
26 return guc->submission_supported && in __guc_rc_supported()
32 if (!intel_guc_rc_is_supported(guc)) in __guc_rc_selected()
35 return guc->submission_selected; in __guc_rc_selected()
40 guc->rc_supported = __guc_rc_supported(guc); in intel_guc_rc_init_early()
41 guc->rc_selected = __guc_rc_selected(guc); in intel_guc_rc_init_early()
62 struct intel_gt *gt = guc_to_gt(guc); in __guc_rc_control()
68 if (!intel_guc_is_ready(guc)) in __guc_rc_control()
71 ret = guc_action_control_gucrc(guc, enable); in __guc_rc_control()
86 return __guc_rc_control(guc, true); in intel_guc_rc_enable()
[all …]
A Dintel_guc_capture.c688 if (!guc->capture) in guc_capture_output_min_size_est()
1165 guc->capture->max_mmio_per_node = guc_get_max_reglist_count(guc); in guc_capture_create_prealloc_nodes()
1369 struct intel_uc *uc = container_of(guc, typeof(*uc), guc); in __guc_capture_process_output()
1513 guc = &ee->engine->gt->uc.guc; in intel_guc_capture_print_engine_node()
1596 guc = &gt->uc.guc; in intel_guc_capture_get_matching_node()
1597 if (!guc->capture) in intel_guc_capture_get_matching_node()
1623 if (guc->capture) in intel_guc_capture_process()
1647 if (!guc->capture) in intel_guc_capture_destroy()
1663 guc->capture = kzalloc(sizeof(*guc->capture), GFP_KERNEL); in intel_guc_capture_init()
1664 if (!guc->capture) in intel_guc_capture_init()
[all …]
A Dintel_guc_submission.h16 void intel_guc_submission_init_early(struct intel_guc *guc);
17 int intel_guc_submission_init(struct intel_guc *guc);
18 void intel_guc_submission_enable(struct intel_guc *guc);
19 void intel_guc_submission_disable(struct intel_guc *guc);
20 void intel_guc_submission_fini(struct intel_guc *guc);
21 int intel_guc_preempt_work_create(struct intel_guc *guc);
22 void intel_guc_preempt_work_destroy(struct intel_guc *guc);
36 int intel_guc_wait_for_pending_msg(struct intel_guc *guc,
43 return guc->submission_supported; in intel_guc_submission_is_supported()
48 return guc->submission_selected; in intel_guc_submission_is_wanted()
[all …]
A Dintel_guc_rc.h11 void intel_guc_rc_init_early(struct intel_guc *guc);
13 static inline bool intel_guc_rc_is_supported(struct intel_guc *guc) in intel_guc_rc_is_supported() argument
15 return guc->rc_supported; in intel_guc_rc_is_supported()
18 static inline bool intel_guc_rc_is_wanted(struct intel_guc *guc) in intel_guc_rc_is_wanted() argument
20 return guc->submission_selected && intel_guc_rc_is_supported(guc); in intel_guc_rc_is_wanted()
23 static inline bool intel_guc_rc_is_used(struct intel_guc *guc) in intel_guc_rc_is_used() argument
25 return intel_guc_submission_is_used(guc) && intel_guc_rc_is_wanted(guc); in intel_guc_rc_is_used()
28 int intel_guc_rc_enable(struct intel_guc *guc);
29 int intel_guc_rc_disable(struct intel_guc *guc);
A Dintel_guc_fw.c73 intel_guc_ggtt_offset(guc, guc_fw->rsa_data)); in guc_xfer_rsa_vma()
106 static int guc_wait_ucode(struct intel_guc *guc) in guc_wait_ucode() argument
108 struct intel_gt *gt = guc_to_gt(guc); in guc_wait_ucode()
132 guc_info(guc, "load failed: status = 0x%08X\n", status); in guc_wait_ucode()
133 guc_info(guc, "load failed: status: Reset = %d, " in guc_wait_ucode()
148 guc_info(guc, "firmware exception. EIP: %#x\n", in guc_wait_ucode()
169 int intel_guc_fw_upload(struct intel_guc *guc) in intel_guc_fw_upload() argument
171 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_fw_upload()
185 ret = guc_xfer_rsa(&guc->fw, uncore); in intel_guc_fw_upload()
193 ret = intel_uc_fw_upload(&guc->fw, 0x2000, UOS_MOVE); in intel_guc_fw_upload()
[all …]
A Dintel_guc_log.c42 struct intel_guc *guc = log_to_guc(log); in _guc_log_init_sizes() local
377 struct intel_guc *guc = log_to_guc(log); in _guc_log_copy_debuglogs_for_relay() local
523 struct intel_guc *guc = log_to_guc(log); in guc_log_relay_create() local
584 guc_action_flush_log_complete(guc); in guc_log_copy_debuglogs_for_relay()
685 ret = guc_action_control_log(guc, in intel_guc_log_set_level()
783 guc_action_flush_log(guc); in intel_guc_log_relay_flush()
883 struct intel_uc *uc = container_of(guc, struct intel_uc, guc); in intel_guc_log_dump()
889 if (!intel_guc_is_supported(guc)) in intel_guc_log_dump()
894 else if (guc->log.vma) in intel_guc_log_dump()
895 obj = guc->log.vma->obj; in intel_guc_log_dump()
[all …]
A Dintel_guc_hwconfig.c34 static int __guc_action_get_hwconfig(struct intel_guc *guc, in __guc_action_get_hwconfig() argument
45 ret = intel_guc_send_mmio(guc, action, ARRAY_SIZE(action), NULL, 0); in __guc_action_get_hwconfig()
52 static int guc_hwconfig_discover_size(struct intel_guc *guc, struct intel_hwconfig *hwconfig) in guc_hwconfig_discover_size() argument
60 ret = __guc_action_get_hwconfig(guc, 0, 0); in guc_hwconfig_discover_size()
71 static int guc_hwconfig_fill_buffer(struct intel_guc *guc, struct intel_hwconfig *hwconfig) in guc_hwconfig_fill_buffer() argument
80 ret = intel_guc_allocate_and_map_vma(guc, hwconfig->size, &vma, &vaddr); in guc_hwconfig_fill_buffer()
84 ggtt_offset = intel_guc_ggtt_offset(guc, vma); in guc_hwconfig_fill_buffer()
86 ret = __guc_action_get_hwconfig(guc, ggtt_offset, hwconfig->size); in guc_hwconfig_fill_buffer()
114 struct intel_guc *guc = &gt->uc.guc; in guc_hwconfig_init() local
120 ret = guc_hwconfig_discover_size(guc, hwconfig); in guc_hwconfig_init()
[all …]
A Dintel_guc_slpc.c35 return guc->submission_supported && in __detect_slpc_supported()
36 GRAPHICS_VER(guc_to_gt(guc)->i915) >= 12; in __detect_slpc_supported()
41 if (!intel_guc_slpc_is_supported(guc)) in __guc_slpc_selected()
44 return guc->submission_selected; in __guc_slpc_selected()
49 struct intel_guc *guc = slpc_to_guc(slpc); in intel_guc_slpc_init_early() local
52 slpc->selected = __guc_slpc_selected(guc); in intel_guc_slpc_init_early()
118 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_set_param_nb() local
173 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_query_task_state() local
178 ret = guc_action_slpc_query(guc, offset); in slpc_query_task_state()
190 struct intel_guc *guc = slpc_to_guc(slpc); in slpc_set_param() local
[all …]
A Dintel_guc_ads.h16 int intel_guc_ads_create(struct intel_guc *guc);
17 void intel_guc_ads_destroy(struct intel_guc *guc);
18 void intel_guc_ads_init_late(struct intel_guc *guc);
19 void intel_guc_ads_reset(struct intel_guc *guc);
20 void intel_guc_ads_print_policy_info(struct intel_guc *guc,
23 u32 intel_guc_engine_usage_offset(struct intel_guc *guc);
A Dintel_guc_slpc.h17 static inline bool intel_guc_slpc_is_supported(struct intel_guc *guc) in intel_guc_slpc_is_supported() argument
19 return guc->slpc.supported; in intel_guc_slpc_is_supported()
22 static inline bool intel_guc_slpc_is_wanted(struct intel_guc *guc) in intel_guc_slpc_is_wanted() argument
24 return guc->slpc.selected; in intel_guc_slpc_is_wanted()
27 static inline bool intel_guc_slpc_is_used(struct intel_guc *guc) in intel_guc_slpc_is_used() argument
29 return intel_guc_submission_is_used(guc) && intel_guc_slpc_is_wanted(guc); in intel_guc_slpc_is_used()
A Dintel_guc_capture.h23 void intel_guc_capture_process(struct intel_guc *guc);
24 int intel_guc_capture_getlist(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
26 int intel_guc_capture_getlistsize(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
28 int intel_guc_capture_getnullheader(struct intel_guc *guc, void **outptr, size_t *size);
29 void intel_guc_capture_destroy(struct intel_guc *guc);
30 int intel_guc_capture_init(struct intel_guc *guc);
A Dintel_uc.h32 struct intel_guc guc; member
87 uc_state_checkers(guc, guc);
89 uc_state_checkers(guc, guc_submission);
90 uc_state_checkers(guc, guc_slpc);
91 uc_state_checkers(guc, guc_rc);
99 return intel_guc_wait_for_idle(&uc->guc, timeout); in intel_uc_wait_for_idle()
A Dintel_guc_log_debugfs.c33 struct intel_guc *guc = log_to_guc(log); in guc_log_dump_size() local
35 if (!intel_guc_is_supported(guc)) in guc_log_dump_size()
61 struct intel_guc *guc = log_to_guc(log); in guc_load_err_dump_size() local
62 struct intel_uc *uc = container_of(guc, struct intel_uc, guc); in guc_load_err_dump_size()
64 if (!intel_guc_is_supported(guc)) in guc_load_err_dump_size()
A Dselftest_guc.c145 struct intel_guc *guc = &gt->uc.guc; in intel_guc_steal_guc_ids() local
152 int number_guc_id_stolen = guc->number_guc_id_stolen; in intel_guc_steal_guc_ids()
162 sv = guc->submission_state.num_guc_ids; in intel_guc_steal_guc_ids()
163 guc->submission_state.num_guc_ids = 512; in intel_guc_steal_guc_ids()
261 if (guc->number_guc_id_stolen == number_guc_id_stolen) { in intel_guc_steal_guc_ids()
282 guc->submission_state.num_guc_ids = sv; in intel_guc_steal_guc_ids()
A Dintel_guc_ct.c205 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_init() local
214 err = i915_inject_probe_error(guc_to_gt(guc)->i915, -ENXIO); in intel_guc_ct_init()
278 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_enable() local
288 base = intel_guc_ggtt_offset(guc, ct->vma); in intel_guc_ct_enable()
336 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_disable() local
342 if (intel_guc_is_fw_running(guc)) { in intel_guc_ct_disable()
756 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_send() local
757 struct intel_uc *uc = container_of(guc, struct intel_uc, guc); in intel_guc_ct_send()
974 struct intel_guc *guc = ct_to_guc(ct); in ct_process_request() local
993 ret = intel_guc_deregister_done_process_msg(guc, payload, in ct_process_request()
[all …]
A Dintel_uc_fw.c537 guc->submission_version.major = 1; in guc_read_css_info()
538 guc->submission_version.minor = 1; in guc_read_css_info()
539 guc->submission_version.patch = 0; in guc_read_css_info()
548 guc->submission_version.major = 0; in guc_read_css_info()
550 guc->submission_version.patch = 0; in guc_read_css_info()
553 guc->submission_version.major = 0; in guc_read_css_info()
554 guc->submission_version.minor = 1; in guc_read_css_info()
555 guc->submission_version.patch = 0; in guc_read_css_info()
649 guc->submission_version.major, in guc_check_version_range()
650 guc->submission_version.minor, in guc_check_version_range()
[all …]
/linux-6.3-rc2/drivers/gpu/drm/i915/gt/
A Dintel_wopcm.h24 } guc; member
39 return wopcm->guc.base; in intel_wopcm_guc_base()
54 return wopcm->guc.size; in intel_wopcm_guc_size()
A Dintel_wopcm.c237 u32 guc_fw_size = intel_uc_fw_get_upload_size(&gt->uc.guc.fw); in intel_wopcm_init()
248 GEM_BUG_ON(wopcm->guc.base); in intel_wopcm_init()
249 GEM_BUG_ON(wopcm->guc.size); in intel_wopcm_init()
317 wopcm->guc.base = guc_wopcm_base; in intel_wopcm_init()
318 wopcm->guc.size = guc_wopcm_size; in intel_wopcm_init()
319 GEM_BUG_ON(!wopcm->guc.base); in intel_wopcm_init()
320 GEM_BUG_ON(!wopcm->guc.size); in intel_wopcm_init()
/linux-6.3-rc2/drivers/gpu/drm/i915/
A Di915_gpu_error.c1030 i915_vma_coredump_free(uc->guc.vma_log); in cleanup_uc()
1031 i915_vma_coredump_free(uc->guc.vma_ctb); in cleanup_uc()
1713 memcpy(&error_uc->guc_fw, &uc->guc.fw, sizeof(uc->guc.fw)); in gt_record_uc()
1727 error_uc->guc.vma_log = create_vma_coredump(gt->_gt, uc->guc.log.vma, in gt_record_uc()
1729 error_uc->guc.vma_ctb = create_vma_coredump(gt->_gt, uc->guc.ct.vma, in gt_record_uc()
1731 error_uc->guc.last_fence = uc->guc.ct.requests.last_fence; in gt_record_uc()
1732 gt_record_guc_ctb(error_uc->guc.ctb + 0, &uc->guc.ct.ctbs.send, in gt_record_uc()
1733 uc->guc.ct.ctbs.send.desc, (struct intel_guc *)&uc->guc); in gt_record_uc()
1734 gt_record_guc_ctb(error_uc->guc.ctb + 1, &uc->guc.ct.ctbs.recv, in gt_record_uc()
1735 uc->guc.ct.ctbs.send.desc, (struct intel_guc *)&uc->guc); in gt_record_uc()
[all …]

Completed in 70 milliseconds

12