| /drivers/gpu/drm/xe/ |
| A D | xe_gt_sriov_pf_migration.c | 55 xe_gt_assert(gt, size % sizeof(u32) == 0); in pf_send_guc_save_vf_state() 91 xe_gt_assert(gt, size % sizeof(u32) == 0); in pf_send_guc_restore_vf_state() 118 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_migration_supported() 124 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_migration_mutex() 131 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_snapshot() 211 xe_gt_assert(gt, size); in pf_save_vf_guc_state() 238 xe_gt_assert(gt, vfid != PFID); in xe_gt_sriov_pf_migration_save_guc_state() 287 xe_gt_assert(gt, vfid != PFID); in xe_gt_sriov_pf_migration_restore_guc_state() 323 xe_gt_assert(gt, vfid != PFID); in xe_gt_sriov_pf_migration_read_guc_state() 363 xe_gt_assert(gt, vfid != PFID); in xe_gt_sriov_pf_migration_write_guc_state() [all …]
|
| A D | xe_gt_sriov_vf.c | 167 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in vf_handshake_with_guc() 284 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_guc_versions() 320 xe_gt_assert(gt, IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_sriov_vf_notify_resfix_done() 532 xe_gt_assert(gt, has_gmdid(gt_to_xe(gt))); in vf_cache_gmdid() 652 xe_gt_assert(gt, xe_gt_is_main_type(gt)); in xe_gt_sriov_vf_ggtt_shift() 764 xe_gt_assert(gt, IS_SRIOV_VF(xe)); in xe_gt_sriov_vf_migrated_event_handler() 781 xe_gt_assert(gt, IS_SRIOV_VF(xe)); in vf_is_negotiated() 793 xe_gt_assert(gt, IS_SRIOV_VF(xe)); in vf_prepare_runtime_info() 828 xe_gt_assert(gt, limit); in vf_query_runtime_info() 972 xe_gt_assert(gt, !reg.vf); in xe_gt_sriov_vf_read32() [all …]
|
| A D | xe_gt_sriov_pf_config.c | 227 xe_gt_assert(gt, key); in pf_push_vf_cfg_threshold() 365 xe_gt_assert(gt, vfid); in pf_push_vf_cfg() 465 xe_gt_assert(gt, vfid); in pf_provision_vf_ggtt() 644 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_ggtt() 714 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_set_fair_ggtt() 795 xe_gt_assert(gt, vfid); in pf_provision_vf_ctxs() 966 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_ctxs() 1018 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_set_fair_ctxs() 1096 xe_gt_assert(gt, vfid); in pf_provision_vf_dbs() 1214 xe_gt_assert(gt, vfid); in xe_gt_sriov_pf_config_bulk_set_dbs() [all …]
|
| A D | xe_gt_sriov_pf_policy.c | 141 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_bulk_reset_sched_priority() 152 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sched_if_idle() 167 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_sched_if_idle() 175 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_sched_if_idle() 213 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_sched_if_idle() 224 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_reset_engine() 233 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_reprovision_reset_engine() 241 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_sanitize_reset_engine() 279 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_policy_get_reset_engine() 290 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_provision_sample_period() [all …]
|
| A D | xe_gt_sriov_pf_monitor.c | 28 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_monitor_flr() 38 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_update_event_counter() 39 xe_gt_assert(gt, e < XE_GUC_KLV_NUM_THRESHOLDS); in pf_update_event_counter() 84 xe_gt_assert(gt, len >= GUC_HXG_MSG_MIN_LEN); in xe_gt_sriov_pf_monitor_process_guc2pf() 85 xe_gt_assert(gt, FIELD_GET(GUC_HXG_MSG_0_ORIGIN, msg[0]) == GUC_HXG_ORIGIN_GUC); in xe_gt_sriov_pf_monitor_process_guc2pf() 86 xe_gt_assert(gt, FIELD_GET(GUC_HXG_MSG_0_TYPE, msg[0]) == GUC_HXG_TYPE_EVENT); in xe_gt_sriov_pf_monitor_process_guc2pf() 87 xe_gt_assert(gt, FIELD_GET(GUC_HXG_EVENT_MSG_0_ACTION, msg[0]) == in xe_gt_sriov_pf_monitor_process_guc2pf() 124 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_monitor_print_events()
|
| A D | xe_bb.c | 71 xe_gt_assert(q->gt, bb->len * 4 + bb_prefetch(q->gt) <= size); in __xe_bb_create_job() 89 xe_gt_assert(q->gt, second_idx <= bb->len); in xe_bb_create_migration_job() 90 xe_gt_assert(q->gt, xe_sched_job_is_migration(q)); in xe_bb_create_migration_job() 91 xe_gt_assert(q->gt, q->width == 1); in xe_bb_create_migration_job() 101 xe_gt_assert(q->gt, !xe_sched_job_is_migration(q)); in xe_bb_create_job() 102 xe_gt_assert(q->gt, q->width == 1); in xe_bb_create_job()
|
| A D | xe_guc_submit.c | 242 xe_gt_assert(gt, ret); in guc_submit_fini() 502 xe_gt_assert(guc_to_gt(guc), info->wq_desc_lo == in __register_mlrc_exec_queue() 504 xe_gt_assert(guc_to_gt(guc), info->wq_base_lo == in __register_mlrc_exec_queue() 506 xe_gt_assert(guc_to_gt(guc), q->width == in __register_mlrc_exec_queue() 508 xe_gt_assert(guc_to_gt(guc), info->hwlrca_lo == in __register_mlrc_exec_queue() 535 xe_gt_assert(guc_to_gt(guc), info->wq_desc_lo == in __register_exec_queue() 537 xe_gt_assert(guc_to_gt(guc), info->wq_base_lo == in __register_exec_queue() 539 xe_gt_assert(guc_to_gt(guc), info->hwlrca_lo == in __register_exec_queue() 989 xe_gt_assert(gt, timeout_ms < 100 * MSEC_PER_SEC); in check_timeout() 1886 xe_gt_assert(guc_to_gt(guc), guc_id >= q->guc->id); in g2h_exec_queue_lookup() [all …]
|
| A D | xe_gt_sriov_pf.c | 51 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_init_workers() 57 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_fini_workers() 101 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_init_late() 201 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_sanitize_hw() 208 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_cancel_restart() 248 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_queue_restart() 267 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_flush_restart()
|
| A D | xe_gt_sriov_pf_service.c | 139 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_alloc_runtime_info() 140 xe_gt_assert(gt, !gt->sriov.pf.service.runtime.size); in pf_alloc_runtime_info() 141 xe_gt_assert(gt, !gt->sriov.pf.service.runtime.regs); in pf_alloc_runtime_info() 142 xe_gt_assert(gt, !gt->sriov.pf.service.runtime.values); in pf_alloc_runtime_info() 252 xe_gt_assert(gt, major || minor); in pf_process_handshake_msg() 253 xe_gt_assert(gt, size >= VF2PF_HANDSHAKE_RESPONSE_MSG_LEN); in pf_process_handshake_msg() 278 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_service_runtime_query() 358 xe_gt_assert(gt, msg_len >= GUC_HXG_MSG_MIN_LEN); in xe_gt_sriov_pf_service_process_request() 359 xe_gt_assert(gt, FIELD_GET(GUC_HXG_MSG_0_TYPE, msg[0]) == GUC_HXG_TYPE_REQUEST); in xe_gt_sriov_pf_service_process_request() 394 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in xe_gt_sriov_pf_service_print_runtime()
|
| A D | xe_force_wake.h | 26 xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL); in xe_force_wake_ref() 44 xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL); in xe_force_wake_assert_held() 45 xe_gt_assert(fw->gt, fw->awake_domains & domain); in xe_force_wake_assert_held()
|
| A D | xe_gt_tlb_invalidation.c | 198 xe_gt_assert(gt, fence); in send_tlb_invalidation() 358 xe_gt_assert(gt, gt == fence->gt); in xe_gt_tlb_invalidation_all() 401 xe_gt_assert(gt, fence); in xe_gt_tlb_invalidation_range() 445 xe_gt_assert(gt, length >= SZ_4K); in xe_gt_tlb_invalidation_range() 446 xe_gt_assert(gt, is_power_of_2(length)); in xe_gt_tlb_invalidation_range() 447 xe_gt_assert(gt, !(length & GENMASK(ilog2(SZ_16M) - 1, in xe_gt_tlb_invalidation_range() 449 xe_gt_assert(gt, IS_ALIGNED(start, length)); in xe_gt_tlb_invalidation_range() 458 xe_gt_assert(gt, len <= MAX_TLB_INVALIDATION_LEN); in xe_gt_tlb_invalidation_range()
|
| A D | xe_gt_sriov_pf_debugfs.c | 167 xe_gt_assert(gt, gt == extract_gt(parent)); in pf_add_policy_attrs() 168 xe_gt_assert(gt, PFID == extract_vfid(parent)); in pf_add_policy_attrs() 302 xe_gt_assert(gt, gt == extract_gt(parent)); in MAKE_XE_GUC_KLV_THRESHOLDS_SET() 303 xe_gt_assert(gt, vfid == extract_vfid(parent)); in MAKE_XE_GUC_KLV_THRESHOLDS_SET() 361 xe_gt_assert(gt, vfid); in control_write() 376 xe_gt_assert(gt, sizeof(cmd) > strlen(control_cmds[n].cmd)); in control_write() 540 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in xe_gt_sriov_pf_debugfs_register() 541 xe_gt_assert(gt, root->d_inode->i_private == gt); in xe_gt_sriov_pf_debugfs_register()
|
| A D | xe_gt_sriov_pf_control.c | 61 xe_gt_assert(gt, vfid != PFID); in pf_send_vf_control_cmd() 216 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_pick_vf_control() 217 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_pick_vf_control() 334 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in pf_queue_control_worker() 343 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_queue_vf() 1319 xe_gt_assert(gt, len); in xe_gt_sriov_pf_control_process_guc2pf() 1416 xe_gt_assert(gt, vfid <= xe_gt_sriov_pf_get_totalvfs(gt)); in pf_worker_find_work() 1429 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in control_worker_func() 1435 xe_gt_assert(gt, IS_SRIOV_PF(gt_to_xe(gt))); in pf_stop_worker() 1459 xe_gt_assert(gt, IS_SRIOV_PF(xe)); in xe_gt_sriov_pf_control_init() [all …]
|
| A D | xe_force_wake.c | 181 xe_gt_assert(gt, is_power_of_2(domains)); in xe_force_wake_get() 182 xe_gt_assert(gt, domains <= XE_FORCEWAKE_ALL); in xe_force_wake_get() 183 xe_gt_assert(gt, domains == XE_FORCEWAKE_ALL || fw->initialized_domains & domains); in xe_force_wake_get() 244 xe_gt_assert(gt, domain->ref); in xe_force_wake_put()
|
| A D | xe_guc_ct.c | 474 xe_gt_assert(gt, !xe_guc_ct_enabled(ct)); in xe_guc_ct_enable() 823 xe_gt_assert(gt, xe_guc_ct_initialized(ct)); in __guc_ct_send_locked() 824 xe_gt_assert(gt, !g2h_len || !g2h_fence); in __guc_ct_send_locked() 825 xe_gt_assert(gt, !num_g2h || !g2h_fence); in __guc_ct_send_locked() 826 xe_gt_assert(gt, !g2h_len || num_g2h); in __guc_ct_send_locked() 827 xe_gt_assert(gt, g2h_len || !num_g2h); in __guc_ct_send_locked() 845 xe_gt_assert(gt, xe_guc_ct_enabled(ct)); in __guc_ct_send_locked() 904 xe_gt_assert(gt, !g2h_len || !g2h_fence); in guc_ct_send_locked() 1306 xe_gt_assert(gt, fence == g2h_fence->seqno); in parse_g2h_response() 1477 xe_gt_assert(gt, xe_guc_ct_enabled(ct)); in g2h_read() [all …]
|
| A D | xe_gt_mcr.c | 369 xe_gt_assert(gt, dss < XE_MAX_DSS_FUSE_BITS); in xe_gt_mcr_get_dss_steering() 720 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read_any() 753 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_read() 776 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_unicast_write() 796 xe_gt_assert(gt, !IS_SRIOV_VF(gt_to_xe(gt))); in xe_gt_mcr_multicast_write()
|
| A D | xe_ring_ops.c | 279 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_simple() 342 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_video() 393 xe_gt_assert(gt, i <= MAX_JOB_SIZE_DW); in __emit_job_gen12_render_compute() 426 xe_gt_assert(job->q->gt, i <= MAX_JOB_SIZE_DW); in emit_migration_job_gen12() 435 xe_gt_assert(gt, job->q->width <= 1); /* no parallel submission for GSCCS */ in emit_job_gen12_gsc()
|
| A D | xe_gt_topology.c | 25 xe_gt_assert(gt, numregs <= ARRAY_SIZE(fuse_val)); in load_dss_mask() 238 xe_gt_assert(gt, num_geometry_regs <= ARRAY_SIZE(geometry_regs)); in xe_gt_topology_init() 239 xe_gt_assert(gt, num_compute_regs <= ARRAY_SIZE(compute_regs)); in xe_gt_topology_init()
|
| A D | xe_guc_ads.c | 449 xe_gt_assert(gt, ads->bo); in xe_guc_ads_init_post_hwconfig() 456 xe_gt_assert(gt, ads->golden_lrc_size + in xe_guc_ads_init_post_hwconfig() 836 xe_gt_assert(gt, regset_used <= ads->regset_size); in guc_mmio_reg_state_init() 891 xe_gt_assert(gt, ads->bo); in xe_guc_ads_populate_minimal() 915 xe_gt_assert(gt, ads->bo); in xe_guc_ads_populate() 967 xe_gt_assert(gt, gt->default_lrc[class]); in guc_golden_lrc_populate() 979 xe_gt_assert(gt, total_size == ads->golden_lrc_size); in guc_golden_lrc_populate()
|
| A D | xe_hw_engine.c | 299 xe_gt_assert(hwe->gt, !(reg.addr & hwe->mmio_base)); in xe_hw_engine_mmio_write32() 319 xe_gt_assert(hwe->gt, !(reg.addr & hwe->mmio_base)); in xe_hw_engine_mmio_read32() 489 xe_gt_assert(gt, info); in get_msix_irq_offset() 507 xe_gt_assert(gt, !hwe->gt); in hw_engine_init_early() 605 xe_gt_assert(gt, id < ARRAY_SIZE(engine_infos) && engine_infos[id].name); in hw_engine_init() 606 xe_gt_assert(gt, gt->info.engine_mask & BIT(id)); in hw_engine_init()
|
| A D | xe_uc.c | 223 xe_gt_assert(uc_to_gt(uc), !ret); in xe_uc_load_hw() 313 xe_gt_assert(uc_to_gt(uc), uc_to_xe(uc)->wedged.mode); in xe_uc_declare_wedged()
|
| A D | xe_wa.h | 33 xe_gt_assert(gt__, (gt__)->wa_active.oob_initialized); \
|
| A D | xe_uc_fw.c | 242 xe_gt_assert(gt, uc_fw->type < ARRAY_SIZE(blobs_all)); in uc_fw_auto_select() 243 xe_gt_assert(gt, gt->info.type != XE_GT_TYPE_UNINITIALIZED); in uc_fw_auto_select() 337 xe_gt_assert(gt, uc_fw->type == XE_UC_FW_TYPE_GUC); in guc_read_css_info() 709 xe_gt_assert(gt, !uc_fw->status); in uc_fw_request() 710 xe_gt_assert(gt, !uc_fw->path); in uc_fw_request()
|
| A D | xe_migrate.c | 637 xe_gt_assert(gt, FIELD_FIT(XE2_CCS_SIZE_MASK, num_pages - 1)); in emit_copy_ccs() 645 xe_gt_assert(gt, FIELD_FIT(CCS_SIZE_MASK, num_ccs_blks - 1)); in emit_copy_ccs() 672 xe_gt_assert(gt, !(pitch & 3)); in emit_copy() 673 xe_gt_assert(gt, size / pitch <= S16_MAX); in emit_copy() 674 xe_gt_assert(gt, pitch / 4 <= S16_MAX); in emit_copy() 675 xe_gt_assert(gt, pitch <= U16_MAX); in emit_copy() 731 xe_gt_assert(gt, src_is_indirect || dst_is_indirect); in xe_migrate_ccs_copy() 961 xe_gt_assert(gt, cs - bb->cs == len + bb->len); in emit_clear_link_copy() 1002 xe_gt_assert(gt, cs - bb->cs == len + bb->len); in emit_clear_main_copy()
|
| A D | xe_assert.h | 167 #define xe_gt_assert(gt, condition) xe_gt_assert_msg((gt), condition, "") macro
|