Home
last modified time | relevance | path

Searched refs:xe_gt_assert (Results 1 – 25 of 36) sorted by relevance

12

/linux/drivers/gpu/drm/xe/
A Dxe_gt_sriov_vf.c127 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_handshake_with_guc()
312 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_gmdid()
334 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_get_ggtt_info()
367 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_get_lmem_info()
425 xe_gt_assert(gt, has_gmdid(gt_to_xe(gt))); in vf_cache_gmdid()
525 xe_gt_assert(gt, IS_SRIOV_VF(xe)); in vf_balloon_ggtt()
526 xe_gt_assert(gt, !xe_gt_is_media_type(gt)); in vf_balloon_ggtt()
709 xe_gt_assert(gt, IS_SRIOV_VF(xe)); in vf_prepare_runtime_info()
744 xe_gt_assert(gt, limit); in vf_query_runtime_info()
889 xe_gt_assert(gt, !reg.vf); in xe_gt_sriov_vf_read32()
[all …]
A Dxe_gt_sriov_pf_monitor.c28 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_monitor_flr()
38 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_update_event_counter()
39 xe_gt_assert(gt, e < XE_GUC_KLV_NUM_THRESHOLDS); in pf_update_event_counter()
84 xe_gt_assert(gt, len >= GUC_HXG_MSG_MIN_LEN); in xe_gt_sriov_pf_monitor_process_guc2pf()
85 xe_gt_assert(gt, FIELD_GET(GUC_HXG_MSG_0_ORIGIN, msg[0]) == GUC_HXG_ORIGIN_GUC); in xe_gt_sriov_pf_monitor_process_guc2pf()
86 xe_gt_assert(gt, FIELD_GET(GUC_HXG_MSG_0_TYPE, msg[0]) == GUC_HXG_TYPE_EVENT); in xe_gt_sriov_pf_monitor_process_guc2pf()
87 xe_gt_assert(gt, FIELD_GET(GUC_HXG_EVENT_MSG_0_ACTION, msg[0]) == in xe_gt_sriov_pf_monitor_process_guc2pf()
124 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_monitor_print_events()
A Dxe_gt_sriov_pf_config.c218 xe_gt_assert(gt, key); in pf_push_vf_cfg_threshold()
410 xe_gt_assert(gt, vfid); in pf_provision_vf_ggtt()
544 xe_gt_assert(gt, first); in pf_config_bulk_set_u64_done()
585 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_ggtt()
655 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_set_fair_ggtt()
736 xe_gt_assert(gt, vfid); in pf_provision_vf_ctxs()
903 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_ctxs()
955 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_set_fair_ctxs()
1033 xe_gt_assert(gt, vfid); in pf_provision_vf_dbs()
1147 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_dbs()
[all …]
A Dxe_gt_sriov_pf_policy.c140 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sched_if_idle()
150 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_sched_if_idle()
158 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_sched_if_idle()
196 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_sched_if_idle()
207 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_reset_engine()
216 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_reset_engine()
224 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_reset_engine()
262 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_reset_engine()
273 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sample_period()
282 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_sample_period()
[all …]
A Dxe_bb.c71 xe_gt_assert(q->gt, bb->len * 4 + bb_prefetch(q->gt) <= size); in __xe_bb_create_job()
89 xe_gt_assert(q->gt, second_idx <= bb->len); in xe_bb_create_migration_job()
90 xe_gt_assert(q->gt, xe_sched_job_is_migration(q)); in xe_bb_create_migration_job()
91 xe_gt_assert(q->gt, q->width == 1); in xe_bb_create_migration_job()
101 xe_gt_assert(q->gt, !xe_sched_job_is_migration(q)); in xe_bb_create_job()
102 xe_gt_assert(q->gt, q->width == 1); in xe_bb_create_job()
A Dxe_gt_sriov_pf_service.c45 xe_gt_assert(gt, base.major); in pf_negotiate_version()
46 xe_gt_assert(gt, base.major <= latest.major); in pf_negotiate_version()
73 xe_gt_assert(gt, base.major == latest.major); in pf_negotiate_version()
86 xe_gt_assert(gt, major || minor); in pf_connect()
213 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_alloc_runtime_info()
214 xe_gt_assert(gt, !gt->sriov.pf.service.runtime.size); in pf_alloc_runtime_info()
368 xe_gt_assert(gt, major || minor); in pf_process_handshake_msg()
394 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_service_runtime_query()
472 xe_gt_assert(gt, msg_len >= GUC_HXG_MSG_MIN_LEN); in xe_gt_sriov_pf_service_process_request()
508 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_service_print_runtime()
[all …]
A Dxe_force_wake.h27 xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL); in xe_force_wake_ref()
45 xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL); in xe_force_wake_assert_held()
46 xe_gt_assert(fw->gt, fw->awake_domains & domain); in xe_force_wake_assert_held()
A Dxe_gt_sriov_pf_debugfs.c143 xe_gt_assert(gt, gt == extract_gt(parent)); in pf_add_policy_attrs()
144 xe_gt_assert(gt, PFID == extract_vfid(parent)); in pf_add_policy_attrs()
274 xe_gt_assert(gt, gt == extract_gt(parent)); in MAKE_XE_GUC_KLV_THRESHOLDS_SET()
275 xe_gt_assert(gt, vfid == extract_vfid(parent)); in MAKE_XE_GUC_KLV_THRESHOLDS_SET()
328 xe_gt_assert(gt, vfid); in control_write()
343 xe_gt_assert(gt, sizeof(cmd) > strlen(control_cmds[n].cmd)); in control_write()
394 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in xe_gt_sriov_pf_debugfs_register()
395 xe_gt_assert(gt, root->d_inode->i_private == gt); in xe_gt_sriov_pf_debugfs_register()
A Dxe_gt_tlb_invalidation.c182 xe_gt_assert(gt, fence); in send_tlb_invalidation()
331 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range()
375 xe_gt_assert(gt, length >= SZ_4K); in xe_gt_tlb_invalidation_range()
376 xe_gt_assert(gt, is_power_of_2(length)); in xe_gt_tlb_invalidation_range()
377 xe_gt_assert(gt, !(length & GENMASK(ilog2(SZ_16M) - 1, in xe_gt_tlb_invalidation_range()
379 xe_gt_assert(gt, IS_ALIGNED(start, length)); in xe_gt_tlb_invalidation_range()
388 xe_gt_assert(gt, len <= MAX_TLB_INVALIDATION_LEN); in xe_gt_tlb_invalidation_range()
410 xe_gt_assert(gt, vma); in xe_gt_tlb_invalidation_vma()
A Dxe_guc_ct.c176 xe_gt_assert(gt, !(guc_ct_size() % PAGE_SIZE)); in xe_guc_ct_init()
396 xe_gt_assert(gt, !xe_guc_ct_enabled(ct)); in xe_guc_ct_enable()
559 xe_gt_assert(gt, tail <= h2g->info.size); in h2g_write()
642 xe_gt_assert(gt, !g2h_len || !g2h_fence); in __guc_ct_send_locked()
643 xe_gt_assert(gt, !num_g2h || !g2h_fence); in __guc_ct_send_locked()
644 xe_gt_assert(gt, !g2h_len || num_g2h); in __guc_ct_send_locked()
645 xe_gt_assert(gt, g2h_len || !num_g2h); in __guc_ct_send_locked()
663 xe_gt_assert(gt, xe_guc_ct_enabled(ct)); in __guc_ct_send_locked()
723 xe_gt_assert(gt, !g2h_len || !g2h_fence); in guc_ct_send_locked()
1043 xe_gt_assert(gt, fence == g2h_fence->seqno); in parse_g2h_response()
[all …]
A Dxe_gt_sriov_pf_control.c58 xe_gt_assert(gt, vfid != PFID); in pf_send_vf_control_cmd()
212 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_control()
213 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_pick_vf_control()
330 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_queue_control_worker()
339 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_queue_vf()
1279 xe_gt_assert(gt, len); in xe_gt_sriov_pf_control_process_guc2pf()
1373 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_worker_find_work()
1386 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in control_worker_func()
1392 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_stop_worker()
1416 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in xe_gt_sriov_pf_control_init()
[all …]
A Dxe_gt_mcr.c349 xe_gt_assert(gt, dss < XE_MAX_DSS_FUSE_BITS); in xe_gt_mcr_get_dss_steering()
676 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read_any()
709 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read()
732 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_write()
752 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_multicast_write()
A Dxe_guc_submit.c827 xe_gt_assert(guc_to_gt(guc), guc_to_xe(guc)->wedged.mode); in xe_guc_submit_wedge()
937 xe_gt_assert(gt, timeout_ms < 100 * MSEC_PER_SEC); in check_timeout()
964 xe_gt_assert(guc_to_gt(guc), !exec_queue_destroyed(q)); in enable_scheduling()
965 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in enable_scheduling()
992 xe_gt_assert(guc_to_gt(guc), !exec_queue_destroyed(q)); in disable_scheduling()
993 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in disable_scheduling()
1013 xe_gt_assert(guc_to_gt(guc), !exec_queue_destroyed(q)); in __deregister_exec_queue()
1014 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in __deregister_exec_queue()
1821 xe_gt_assert(guc_to_gt(guc), exec_queue_destroyed(q)); in deregister_exec_queue()
1822 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in deregister_exec_queue()
[all …]
A Dxe_ring_ops.c273 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_simple()
336 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_video()
387 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_render_compute()
423 xe_gt_assert(job->q->gt, i <= MAX_JOB_SIZE_DW); in emit_migration_job_gen12()
432 xe_gt_assert(gt, job->q->width <= 1); /* no parallel submission for GSCCS */ in emit_job_gen12_gsc()
A Dxe_guc_ads.c437 xe_gt_assert(gt, ads->bo); in xe_guc_ads_init_post_hwconfig()
442 xe_gt_assert(gt, ads->golden_lrc_size + in xe_guc_ads_init_post_hwconfig()
654 xe_gt_assert(gt, regset_used <= ads->regset_size); in guc_mmio_reg_state_init()
709 xe_gt_assert(gt, ads->bo); in xe_guc_ads_populate_minimal()
733 xe_gt_assert(gt, ads->bo); in xe_guc_ads_populate()
781 xe_gt_assert(gt, gt->default_lrc[class]); in guc_populate_golden_lrc()
810 xe_gt_assert(gt, total_size == ads->golden_lrc_size); in guc_populate_golden_lrc()
A Dxe_wa.h31 xe_gt_assert(gt__, (gt__)->wa_active.oob_initialized); \
A Dxe_uc.c215 xe_gt_assert(uc_to_gt(uc), !ret); in xe_uc_init_hw()
313 xe_gt_assert(uc_to_gt(uc), uc_to_xe(uc)->wedged.mode); in xe_uc_declare_wedged()
A Dxe_force_wake.c43 xe_gt_assert(gt, GRAPHICS_VER(gt_to_xe(gt)) >= 11); in xe_force_wake_init_gt()
63 xe_gt_assert(gt, GRAPHICS_VER(gt_to_xe(gt)) >= 11); in xe_force_wake_init_engines()
A Dxe_gt_clock.c64 xe_gt_assert(gt, GRAPHICS_VER(gt_to_xe(gt)) >= 11); in xe_gt_clock_init()
A Dxe_assert.h167 #define xe_gt_assert(gt, condition) xe_gt_assert_msg((gt), condition, "") macro
A Dxe_migrate.c637 xe_gt_assert(gt, FIELD_FIT(XE2_CCS_SIZE_MASK, num_pages - 1)); in emit_copy_ccs()
645 xe_gt_assert(gt, FIELD_FIT(CCS_SIZE_MASK, num_ccs_blks - 1)); in emit_copy_ccs()
672 xe_gt_assert(gt, size / pitch <= S16_MAX); in emit_copy()
673 xe_gt_assert(gt, pitch / 4 <= S16_MAX); in emit_copy()
674 xe_gt_assert(gt, pitch <= U16_MAX); in emit_copy()
730 xe_gt_assert(gt, src_is_indirect || dst_is_indirect); in xe_migrate_ccs_copy()
957 xe_gt_assert(gt, cs - bb->cs == len + bb->len); in emit_clear_link_copy()
998 xe_gt_assert(gt, cs - bb->cs == len + bb->len); in emit_clear_main_copy()
A Dxe_hw_engine.c293 xe_gt_assert(hwe->gt, !(reg.addr & hwe->mmio_base)); in xe_hw_engine_mmio_write32()
313 xe_gt_assert(hwe->gt, !(reg.addr & hwe->mmio_base)); in xe_hw_engine_mmio_read32()
476 xe_gt_assert(gt, !hwe->gt); in hw_engine_init_early()
545 xe_gt_assert(gt, id < ARRAY_SIZE(engine_infos) && engine_infos[id].name); in hw_engine_init()
546 xe_gt_assert(gt, gt->info.engine_mask & BIT(id)); in hw_engine_init()
A Dxe_execlist.c53 xe_gt_assert(hwe->gt, FIELD_FIT(XEHP_SW_CTX_ID, ctx_id)); in __start_lrc()
56 xe_gt_assert(hwe->gt, FIELD_FIT(SW_CTX_ID, ctx_id)); in __start_lrc()
A Dxe_guc_db_mgr.c46 #define dbm_assert(_dbm, _cond) xe_gt_assert(dbm_to_gt(_dbm), _cond)
A Dxe_guc_id_mgr.c32 #define idm_assert(idm, cond) xe_gt_assert(idm_to_gt(idm), cond)

Completed in 80 milliseconds

12