Lines Matching refs:adev
87 int (*req_full_gpu)(struct amdgpu_device *adev, bool init);
88 int (*rel_full_gpu)(struct amdgpu_device *adev, bool init);
89 int (*req_init_data)(struct amdgpu_device *adev);
90 int (*reset_gpu)(struct amdgpu_device *adev);
91 void (*ready_to_reset)(struct amdgpu_device *adev);
92 int (*wait_reset)(struct amdgpu_device *adev);
93 void (*trans_msg)(struct amdgpu_device *adev, enum idh_request req,
95 void (*ras_poison_handler)(struct amdgpu_device *adev,
97 bool (*rcvd_ras_intr)(struct amdgpu_device *adev);
98 int (*req_ras_err_count)(struct amdgpu_device *adev);
99 int (*req_ras_cper_dump)(struct amdgpu_device *adev, u64 vf_rptr);
100 int (*req_bad_pages)(struct amdgpu_device *adev);
313 #define amdgpu_sriov_enabled(adev) \ argument
314 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_ENABLE_IOV)
316 #define amdgpu_sriov_vf(adev) \ argument
317 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_IS_VF)
319 #define amdgpu_sriov_bios(adev) \ argument
320 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS)
322 #define amdgpu_sriov_runtime(adev) \ argument
323 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_RUNTIME)
325 #define amdgpu_sriov_fullaccess(adev) \ argument
326 (amdgpu_sriov_vf((adev)) && !amdgpu_sriov_runtime((adev)))
328 #define amdgpu_sriov_reg_indirect_en(adev) \ argument
329 (amdgpu_sriov_vf((adev)) && \
330 ((adev)->virt.gim_feature & (AMDGIM_FEATURE_INDIRECT_REG_ACCESS)))
332 #define amdgpu_sriov_reg_indirect_ih(adev) \ argument
333 (amdgpu_sriov_vf((adev)) && \
334 ((adev)->virt.reg_access & (AMDGIM_FEATURE_IH_REG_PSP_EN)))
336 #define amdgpu_sriov_reg_indirect_mmhub(adev) \ argument
337 (amdgpu_sriov_vf((adev)) && \
338 ((adev)->virt.reg_access & (AMDGIM_FEATURE_MMHUB_REG_RLC_EN)))
340 #define amdgpu_sriov_reg_indirect_gc(adev) \ argument
341 (amdgpu_sriov_vf((adev)) && \
342 ((adev)->virt.reg_access & (AMDGIM_FEATURE_GC_REG_RLC_EN)))
344 #define amdgpu_sriov_reg_indirect_l1_tlb_cntl(adev) \ argument
345 (amdgpu_sriov_vf((adev)) && \
346 ((adev)->virt.reg_access & (AMDGIM_FEATURE_L1_TLB_CNTL_PSP_EN)))
348 #define amdgpu_sriov_rlcg_error_report_enabled(adev) \ argument
349 (amdgpu_sriov_reg_indirect_mmhub(adev) || amdgpu_sriov_reg_indirect_gc(adev))
351 #define amdgpu_sriov_reg_access_sq_config(adev) \ argument
352 (amdgpu_sriov_vf((adev)) && \
353 ((adev)->virt.reg_access & (AMDGIM_FEATURE_REG_ACCESS_SQ_CONFIG)))
355 #define amdgpu_passthrough(adev) \ argument
356 ((adev)->virt.caps & AMDGPU_PASSTHROUGH_MODE)
358 #define amdgpu_sriov_vf_mmio_access_protection(adev) \ argument
359 ((adev)->virt.caps & AMDGPU_VF_MMIO_ACCESS_PROTECT)
361 #define amdgpu_sriov_ras_caps_en(adev) \ argument
362 ((adev)->virt.gim_feature & AMDGIM_FEATURE_RAS_CAPS)
364 #define amdgpu_sriov_ras_telemetry_en(adev) \ argument
365 (((adev)->virt.gim_feature & AMDGIM_FEATURE_RAS_TELEMETRY) && (adev)->virt.fw_reserve.ras_telemetry)
367 #define amdgpu_sriov_ras_telemetry_block_en(adev, sriov_blk) \ argument
368 (amdgpu_sriov_ras_telemetry_en((adev)) && (adev)->virt.ras_telemetry_en_caps.all & BIT(sriov_blk))
370 #define amdgpu_sriov_ras_cper_en(adev) \ argument
371 ((adev)->virt.gim_feature & AMDGIM_FEATURE_RAS_CPER)
384 #define amdgpu_sriov_is_pp_one_vf(adev) \ argument
385 ((adev)->virt.gim_feature & AMDGIM_FEATURE_PP_ONE_VF)
386 #define amdgpu_sriov_multi_vf_mode(adev) \ argument
387 (amdgpu_sriov_vf(adev) && !amdgpu_sriov_is_pp_one_vf(adev))
388 #define amdgpu_sriov_is_debug(adev) \ argument
389 ((!amdgpu_in_reset(adev)) && adev->virt.tdr_debug)
390 #define amdgpu_sriov_is_normal(adev) \ argument
391 ((!amdgpu_in_reset(adev)) && (!adev->virt.tdr_debug))
392 #define amdgpu_sriov_is_av1_support(adev) \ argument
393 ((adev)->virt.gim_feature & AMDGIM_FEATURE_AV1_SUPPORT)
394 #define amdgpu_sriov_is_vcn_rb_decouple(adev) \ argument
395 ((adev)->virt.gim_feature & AMDGIM_FEATURE_VCN_RB_DECOUPLE)
396 #define amdgpu_sriov_is_mes_info_enable(adev) \ argument
397 ((adev)->virt.gim_feature & AMDGIM_FEATURE_MES_INFO_ENABLE)
399 #define amdgpu_virt_xgmi_migrate_enabled(adev) \ argument
400 ((adev)->virt.is_xgmi_node_migrate_enabled && (adev)->gmc.xgmi.node_segment_size != 0)
402 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev);
403 void amdgpu_virt_init_setting(struct amdgpu_device *adev);
404 int amdgpu_virt_request_full_gpu(struct amdgpu_device *adev, bool init);
405 int amdgpu_virt_release_full_gpu(struct amdgpu_device *adev, bool init);
406 int amdgpu_virt_reset_gpu(struct amdgpu_device *adev);
407 void amdgpu_virt_request_init_data(struct amdgpu_device *adev);
408 void amdgpu_virt_ready_to_reset(struct amdgpu_device *adev);
409 int amdgpu_virt_wait_reset(struct amdgpu_device *adev);
410 int amdgpu_virt_alloc_mm_table(struct amdgpu_device *adev);
411 void amdgpu_virt_free_mm_table(struct amdgpu_device *adev);
412 bool amdgpu_virt_rcvd_ras_interrupt(struct amdgpu_device *adev);
413 void amdgpu_virt_release_ras_err_handler_data(struct amdgpu_device *adev);
414 void amdgpu_virt_init_data_exchange(struct amdgpu_device *adev);
415 void amdgpu_virt_exchange_data(struct amdgpu_device *adev);
416 void amdgpu_virt_fini_data_exchange(struct amdgpu_device *adev);
417 void amdgpu_virt_init(struct amdgpu_device *adev);
419 bool amdgpu_virt_can_access_debugfs(struct amdgpu_device *adev);
420 int amdgpu_virt_enable_access_debugfs(struct amdgpu_device *adev);
421 void amdgpu_virt_disable_access_debugfs(struct amdgpu_device *adev);
423 enum amdgpu_sriov_vf_mode amdgpu_virt_get_sriov_vf_mode(struct amdgpu_device *adev);
425 void amdgpu_virt_update_sriov_video_codec(struct amdgpu_device *adev,
428 void amdgpu_sriov_wreg(struct amdgpu_device *adev,
431 u32 amdgpu_sriov_rreg(struct amdgpu_device *adev,
433 bool amdgpu_virt_fw_load_skip_check(struct amdgpu_device *adev,
435 void amdgpu_virt_pre_reset(struct amdgpu_device *adev);
436 void amdgpu_virt_post_reset(struct amdgpu_device *adev);
437 bool amdgpu_sriov_xnack_support(struct amdgpu_device *adev);
438 bool amdgpu_virt_get_rlcg_reg_access_flag(struct amdgpu_device *adev,
441 u32 amdgpu_virt_rlcg_reg_rw(struct amdgpu_device *adev, u32 offset, u32 v, u32 flag, u32 xcc_id);
442 bool amdgpu_virt_get_ras_capability(struct amdgpu_device *adev);
443 int amdgpu_virt_req_ras_err_count(struct amdgpu_device *adev, enum amdgpu_ras_block block,
445 int amdgpu_virt_req_ras_cper_dump(struct amdgpu_device *adev, bool force_update);
446 int amdgpu_virt_ras_telemetry_post_reset(struct amdgpu_device *adev);
447 bool amdgpu_virt_ras_telemetry_block_en(struct amdgpu_device *adev,
449 void amdgpu_virt_request_bad_pages(struct amdgpu_device *adev);