Lines Matching refs:adev
47 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument
55 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument
57 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting()
60 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
61 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
62 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
63 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
64 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
65 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
68 adev->cg_flags = 0; in amdgpu_virt_init_setting()
69 adev->pg_flags = 0; in amdgpu_virt_init_setting()
83 int amdgpu_virt_request_full_gpu(struct amdgpu_device *adev, bool init) in amdgpu_virt_request_full_gpu() argument
85 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu()
89 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu()
91 adev->no_hw_access = true; in amdgpu_virt_request_full_gpu()
95 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu()
108 int amdgpu_virt_release_full_gpu(struct amdgpu_device *adev, bool init) in amdgpu_virt_release_full_gpu() argument
110 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu()
114 r = virt->ops->rel_full_gpu(adev, init); in amdgpu_virt_release_full_gpu()
118 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_release_full_gpu()
129 int amdgpu_virt_reset_gpu(struct amdgpu_device *adev) in amdgpu_virt_reset_gpu() argument
131 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_reset_gpu()
135 r = virt->ops->reset_gpu(adev); in amdgpu_virt_reset_gpu()
139 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_reset_gpu()
145 void amdgpu_virt_request_init_data(struct amdgpu_device *adev) in amdgpu_virt_request_init_data() argument
147 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_init_data()
150 virt->ops->req_init_data(adev); in amdgpu_virt_request_init_data()
152 if (adev->virt.req_init_data_ver > 0) in amdgpu_virt_request_init_data()
164 void amdgpu_virt_ready_to_reset(struct amdgpu_device *adev) in amdgpu_virt_ready_to_reset() argument
166 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ready_to_reset()
169 virt->ops->ready_to_reset(adev); in amdgpu_virt_ready_to_reset()
178 int amdgpu_virt_wait_reset(struct amdgpu_device *adev) in amdgpu_virt_wait_reset() argument
180 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_wait_reset()
185 return virt->ops->wait_reset(adev); in amdgpu_virt_wait_reset()
194 int amdgpu_virt_alloc_mm_table(struct amdgpu_device *adev) in amdgpu_virt_alloc_mm_table() argument
198 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
201 r = amdgpu_bo_create_kernel(adev, PAGE_SIZE, PAGE_SIZE, in amdgpu_virt_alloc_mm_table()
204 &adev->virt.mm_table.bo, in amdgpu_virt_alloc_mm_table()
205 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
206 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
212 memset((void *)adev->virt.mm_table.cpu_addr, 0, PAGE_SIZE); in amdgpu_virt_alloc_mm_table()
214 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
215 adev->virt.mm_table.cpu_addr); in amdgpu_virt_alloc_mm_table()
224 void amdgpu_virt_free_mm_table(struct amdgpu_device *adev) in amdgpu_virt_free_mm_table() argument
226 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
229 amdgpu_bo_free_kernel(&adev->virt.mm_table.bo, in amdgpu_virt_free_mm_table()
230 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
231 (void *)&adev->virt.mm_table.cpu_addr); in amdgpu_virt_free_mm_table()
232 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
241 bool amdgpu_virt_rcvd_ras_interrupt(struct amdgpu_device *adev) in amdgpu_virt_rcvd_ras_interrupt() argument
243 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_rcvd_ras_interrupt()
248 return virt->ops->rcvd_ras_intr(adev); in amdgpu_virt_rcvd_ras_interrupt()
272 static int amdgpu_virt_init_ras_err_handler_data(struct amdgpu_device *adev) in amdgpu_virt_init_ras_err_handler_data() argument
274 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_init_ras_err_handler_data()
312 static void amdgpu_virt_ras_release_bp(struct amdgpu_device *adev) in amdgpu_virt_ras_release_bp() argument
314 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_release_bp()
332 void amdgpu_virt_release_ras_err_handler_data(struct amdgpu_device *adev) in amdgpu_virt_release_ras_err_handler_data() argument
334 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_ras_err_handler_data()
342 amdgpu_virt_ras_release_bp(adev); in amdgpu_virt_release_ras_err_handler_data()
350 static void amdgpu_virt_ras_add_bps(struct amdgpu_device *adev, in amdgpu_virt_ras_add_bps() argument
353 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_add_bps()
363 static void amdgpu_virt_ras_reserve_bps(struct amdgpu_device *adev) in amdgpu_virt_ras_reserve_bps() argument
365 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_reserve_bps()
367 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_virt_ras_reserve_bps()
385 amdgpu_vram_mgr_reserve_range(&adev->mman.vram_mgr, in amdgpu_virt_ras_reserve_bps()
390 if (amdgpu_bo_create_kernel_at(adev, bp << AMDGPU_GPU_PAGE_SHIFT, in amdgpu_virt_ras_reserve_bps()
401 static bool amdgpu_virt_ras_check_bad_page(struct amdgpu_device *adev, in amdgpu_virt_ras_check_bad_page() argument
404 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_check_bad_page()
418 static void amdgpu_virt_add_bad_page(struct amdgpu_device *adev, in amdgpu_virt_add_bad_page() argument
426 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page()
427 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page()
429 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page()
440 if (amdgpu_virt_ras_check_bad_page(adev, retired_page)) in amdgpu_virt_add_bad_page()
443 amdgpu_virt_ras_add_bps(adev, &bp, 1); in amdgpu_virt_add_bad_page()
445 amdgpu_virt_ras_reserve_bps(adev); in amdgpu_virt_add_bad_page()
450 static int amdgpu_virt_read_pf2vf_data(struct amdgpu_device *adev) in amdgpu_virt_read_pf2vf_data() argument
452 struct amd_sriov_msg_pf2vf_info_header *pf2vf_info = adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_read_pf2vf_data()
459 if (adev->virt.fw_reserve.p_pf2vf == NULL) in amdgpu_virt_read_pf2vf_data()
463 dev_err(adev->dev, "invalid pf2vf message size: 0x%x\n", pf2vf_info->size); in amdgpu_virt_read_pf2vf_data()
471 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
472 adev->virt.fw_reserve.checksum_key, checksum); in amdgpu_virt_read_pf2vf_data()
474 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
480 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
487 adev->virt.fw_reserve.p_pf2vf, pf2vf_info->size, in amdgpu_virt_read_pf2vf_data()
490 dev_err(adev->dev, in amdgpu_virt_read_pf2vf_data()
496 adev->virt.vf2pf_update_interval_ms = in amdgpu_virt_read_pf2vf_data()
498 adev->virt.gim_feature = in amdgpu_virt_read_pf2vf_data()
500 adev->virt.reg_access = in amdgpu_virt_read_pf2vf_data()
503 adev->virt.decode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
504 adev->virt.decode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
505 adev->virt.encode_max_dimension_pixels = 0; in amdgpu_virt_read_pf2vf_data()
506 adev->virt.encode_max_frame_pixels = 0; in amdgpu_virt_read_pf2vf_data()
507 adev->virt.is_mm_bw_enabled = false; in amdgpu_virt_read_pf2vf_data()
510 adev->virt.decode_max_dimension_pixels = max(tmp, adev->virt.decode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
513 adev->virt.decode_max_frame_pixels = max(tmp, adev->virt.decode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
516 adev->virt.encode_max_dimension_pixels = max(tmp, adev->virt.encode_max_dimension_pixels); in amdgpu_virt_read_pf2vf_data()
519 adev->virt.encode_max_frame_pixels = max(tmp, adev->virt.encode_max_frame_pixels); in amdgpu_virt_read_pf2vf_data()
521 if ((adev->virt.decode_max_dimension_pixels > 0) || (adev->virt.encode_max_dimension_pixels > 0)) in amdgpu_virt_read_pf2vf_data()
522 adev->virt.is_mm_bw_enabled = true; in amdgpu_virt_read_pf2vf_data()
524 adev->unique_id = in amdgpu_virt_read_pf2vf_data()
526 adev->virt.ras_en_caps.all = ((struct amd_sriov_msg_pf2vf_info *)pf2vf_info)->ras_en_caps.all; in amdgpu_virt_read_pf2vf_data()
527 adev->virt.ras_telemetry_en_caps.all = in amdgpu_virt_read_pf2vf_data()
531 dev_err(adev->dev, "invalid pf2vf version: 0x%x\n", pf2vf_info->version); in amdgpu_virt_read_pf2vf_data()
536 if (adev->virt.vf2pf_update_interval_ms < 200 || adev->virt.vf2pf_update_interval_ms > 10000) in amdgpu_virt_read_pf2vf_data()
537 adev->virt.vf2pf_update_interval_ms = 2000; in amdgpu_virt_read_pf2vf_data()
542 static void amdgpu_virt_populate_vf2pf_ucode_info(struct amdgpu_device *adev) in amdgpu_virt_populate_vf2pf_ucode_info() argument
545 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_populate_vf2pf_ucode_info()
547 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_populate_vf2pf_ucode_info()
550 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCE, adev->vce.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
551 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_UVD, adev->uvd.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
552 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MC, adev->gmc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
553 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_ME, adev->gfx.me_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
554 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_PFP, adev->gfx.pfp_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
555 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_CE, adev->gfx.ce_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
556 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC, adev->gfx.rlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
557 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLC, adev->gfx.rlc_srlc_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
558 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLG, adev->gfx.rlc_srlg_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
559 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_RLC_SRLS, adev->gfx.rlc_srls_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
560 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC, adev->gfx.mec_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
561 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_MEC2, adev->gfx.mec2_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
562 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SOS, adev->psp.sos.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
564 adev->psp.asd_context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
566 adev->psp.ras_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
568 adev->psp.xgmi_context.context.bin_desc.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
569 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SMC, adev->pm.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
570 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA, adev->sdma.instance[0].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
571 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_SDMA2, adev->sdma.instance[1].fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
572 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_VCN, adev->vcn.fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
573 POPULATE_UCODE_INFO(vf2pf_info, AMD_SRIOV_UCODE_ID_DMCU, adev->dm.dmcu_fw_version); in amdgpu_virt_populate_vf2pf_ucode_info()
576 static int amdgpu_virt_write_vf2pf_data(struct amdgpu_device *adev) in amdgpu_virt_write_vf2pf_data() argument
580 vf2pf_info = (struct amd_sriov_msg_vf2pf_info *) adev->virt.fw_reserve.p_vf2pf; in amdgpu_virt_write_vf2pf_data()
582 if (adev->virt.fw_reserve.p_vf2pf == NULL) in amdgpu_virt_write_vf2pf_data()
602 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data()
604 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data()
605 vf2pf_info->fb_size = adev->gmc.real_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
606 vf2pf_info->fb_vis_size = adev->gmc.visible_vram_size >> 20; in amdgpu_virt_write_vf2pf_data()
608 amdgpu_virt_populate_vf2pf_ucode_info(adev); in amdgpu_virt_write_vf2pf_data()
616 vf2pf_info->dummy_page_addr = (uint64_t)adev->dummy_page_addr; in amdgpu_virt_write_vf2pf_data()
617 if (amdgpu_sriov_is_mes_info_enable(adev)) { in amdgpu_virt_write_vf2pf_data()
619 (uint64_t)(adev->mes.resource_1_gpu_addr[0] + AMDGPU_GPU_PAGE_SIZE); in amdgpu_virt_write_vf2pf_data()
621 adev->mes.resource_1[0]->tbo.base.size - AMDGPU_GPU_PAGE_SIZE; in amdgpu_virt_write_vf2pf_data()
632 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, virt.vf2pf_work.work); in amdgpu_virt_update_vf2pf_work_item() local
635 ret = amdgpu_virt_read_pf2vf_data(adev); in amdgpu_virt_update_vf2pf_work_item()
637 adev->virt.vf2pf_update_retry_cnt++; in amdgpu_virt_update_vf2pf_work_item()
639 if ((amdgpu_virt_rcvd_ras_interrupt(adev) || in amdgpu_virt_update_vf2pf_work_item()
640 adev->virt.vf2pf_update_retry_cnt >= AMDGPU_VF2PF_UPDATE_MAX_RETRY_LIMIT) && in amdgpu_virt_update_vf2pf_work_item()
641 amdgpu_sriov_runtime(adev)) { in amdgpu_virt_update_vf2pf_work_item()
643 amdgpu_ras_set_fed(adev, true); in amdgpu_virt_update_vf2pf_work_item()
644 if (amdgpu_reset_domain_schedule(adev->reset_domain, in amdgpu_virt_update_vf2pf_work_item()
645 &adev->kfd.reset_work)) in amdgpu_virt_update_vf2pf_work_item()
648 dev_err(adev->dev, "Failed to queue work! at %s", __func__); in amdgpu_virt_update_vf2pf_work_item()
654 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_update_vf2pf_work_item()
655 amdgpu_virt_write_vf2pf_data(adev); in amdgpu_virt_update_vf2pf_work_item()
658 schedule_delayed_work(&(adev->virt.vf2pf_work), adev->virt.vf2pf_update_interval_ms); in amdgpu_virt_update_vf2pf_work_item()
661 void amdgpu_virt_fini_data_exchange(struct amdgpu_device *adev) in amdgpu_virt_fini_data_exchange() argument
663 if (adev->virt.vf2pf_update_interval_ms != 0) { in amdgpu_virt_fini_data_exchange()
665 cancel_delayed_work_sync(&adev->virt.vf2pf_work); in amdgpu_virt_fini_data_exchange()
666 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_fini_data_exchange()
670 void amdgpu_virt_init_data_exchange(struct amdgpu_device *adev) in amdgpu_virt_init_data_exchange() argument
672 adev->virt.fw_reserve.p_pf2vf = NULL; in amdgpu_virt_init_data_exchange()
673 adev->virt.fw_reserve.p_vf2pf = NULL; in amdgpu_virt_init_data_exchange()
674 adev->virt.vf2pf_update_interval_ms = 0; in amdgpu_virt_init_data_exchange()
675 adev->virt.vf2pf_update_retry_cnt = 0; in amdgpu_virt_init_data_exchange()
677 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
679 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
681 amdgpu_virt_exchange_data(adev); in amdgpu_virt_init_data_exchange()
683 INIT_DELAYED_WORK(&adev->virt.vf2pf_work, amdgpu_virt_update_vf2pf_work_item); in amdgpu_virt_init_data_exchange()
684 …schedule_delayed_work(&(adev->virt.vf2pf_work), msecs_to_jiffies(adev->virt.vf2pf_update_interval_… in amdgpu_virt_init_data_exchange()
685 } else if (adev->bios != NULL) { in amdgpu_virt_init_data_exchange()
687 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_init_data_exchange()
689 (adev->bios + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_init_data_exchange()
691 amdgpu_virt_read_pf2vf_data(adev); in amdgpu_virt_init_data_exchange()
696 void amdgpu_virt_exchange_data(struct amdgpu_device *adev) in amdgpu_virt_exchange_data() argument
702 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
703 if (adev->mman.fw_vram_usage_va) { in amdgpu_virt_exchange_data()
704 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
706 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
707 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
709 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
710 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
711 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
712 } else if (adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data()
713 adev->virt.fw_reserve.p_pf2vf = in amdgpu_virt_exchange_data()
715 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
716 adev->virt.fw_reserve.p_vf2pf = in amdgpu_virt_exchange_data()
718 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
719 adev->virt.fw_reserve.ras_telemetry = in amdgpu_virt_exchange_data()
720 (adev->mman.drv_vram_usage_va + (AMD_SRIOV_MSG_RAS_TELEMETRY_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
723 amdgpu_virt_read_pf2vf_data(adev); in amdgpu_virt_exchange_data()
724 amdgpu_virt_write_vf2pf_data(adev); in amdgpu_virt_exchange_data()
727 if (adev->virt.fw_reserve.p_pf2vf->version == 2) { in amdgpu_virt_exchange_data()
728 pf2vf_v2 = (struct amd_sriov_msg_pf2vf_info *)adev->virt.fw_reserve.p_pf2vf; in amdgpu_virt_exchange_data()
734 if (bp_block_size && !adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
735 amdgpu_virt_init_ras_err_handler_data(adev); in amdgpu_virt_exchange_data()
737 if (adev->virt.ras_init_done) in amdgpu_virt_exchange_data()
738 amdgpu_virt_add_bad_page(adev, bp_block_offset, bp_block_size); in amdgpu_virt_exchange_data()
743 static u32 amdgpu_virt_init_detect_asic(struct amdgpu_device *adev) in amdgpu_virt_init_detect_asic() argument
747 switch (adev->asic_type) { in amdgpu_virt_init_detect_asic()
768 adev->virt.caps |= AMDGPU_SRIOV_CAPS_IS_VF; in amdgpu_virt_init_detect_asic()
771 adev->virt.caps |= AMDGPU_SRIOV_CAPS_ENABLE_IOV; in amdgpu_virt_init_detect_asic()
776 adev->virt.caps |= AMDGPU_PASSTHROUGH_MODE; in amdgpu_virt_init_detect_asic()
782 static bool amdgpu_virt_init_req_data(struct amdgpu_device *adev, u32 reg) in amdgpu_virt_init_req_data() argument
787 if (amdgpu_sriov_vf(adev)) { in amdgpu_virt_init_req_data()
790 switch (adev->asic_type) { in amdgpu_virt_init_req_data()
793 vi_set_virt_ops(adev); in amdgpu_virt_init_req_data()
796 soc15_set_virt_ops(adev); in amdgpu_virt_init_req_data()
802 amdgpu_virt_request_init_data(adev); in amdgpu_virt_init_req_data()
807 soc15_set_virt_ops(adev); in amdgpu_virt_init_req_data()
813 nv_set_virt_ops(adev); in amdgpu_virt_init_req_data()
815 amdgpu_virt_request_init_data(adev); in amdgpu_virt_init_req_data()
819 DRM_ERROR("Unknown asic type: %d!\n", adev->asic_type); in amdgpu_virt_init_req_data()
827 static void amdgpu_virt_init_ras(struct amdgpu_device *adev) in amdgpu_virt_init_ras() argument
829 ratelimit_state_init(&adev->virt.ras.ras_error_cnt_rs, 5 * HZ, 1); in amdgpu_virt_init_ras()
830 ratelimit_state_init(&adev->virt.ras.ras_cper_dump_rs, 5 * HZ, 1); in amdgpu_virt_init_ras()
832 ratelimit_set_flags(&adev->virt.ras.ras_error_cnt_rs, in amdgpu_virt_init_ras()
834 ratelimit_set_flags(&adev->virt.ras.ras_cper_dump_rs, in amdgpu_virt_init_ras()
837 mutex_init(&adev->virt.ras.ras_telemetry_mutex); in amdgpu_virt_init_ras()
839 adev->virt.ras.cper_rptr = 0; in amdgpu_virt_init_ras()
842 void amdgpu_virt_init(struct amdgpu_device *adev) in amdgpu_virt_init() argument
845 uint32_t reg = amdgpu_virt_init_detect_asic(adev); in amdgpu_virt_init()
847 is_sriov = amdgpu_virt_init_req_data(adev, reg); in amdgpu_virt_init()
850 amdgpu_virt_init_ras(adev); in amdgpu_virt_init()
853 static bool amdgpu_virt_access_debugfs_is_mmio(struct amdgpu_device *adev) in amdgpu_virt_access_debugfs_is_mmio() argument
855 return amdgpu_sriov_is_debug(adev) ? true : false; in amdgpu_virt_access_debugfs_is_mmio()
858 static bool amdgpu_virt_access_debugfs_is_kiq(struct amdgpu_device *adev) in amdgpu_virt_access_debugfs_is_kiq() argument
860 return amdgpu_sriov_is_normal(adev) ? true : false; in amdgpu_virt_access_debugfs_is_kiq()
863 int amdgpu_virt_enable_access_debugfs(struct amdgpu_device *adev) in amdgpu_virt_enable_access_debugfs() argument
865 if (!amdgpu_sriov_vf(adev) || in amdgpu_virt_enable_access_debugfs()
866 amdgpu_virt_access_debugfs_is_kiq(adev)) in amdgpu_virt_enable_access_debugfs()
869 if (amdgpu_virt_access_debugfs_is_mmio(adev)) in amdgpu_virt_enable_access_debugfs()
870 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_enable_access_debugfs()
877 void amdgpu_virt_disable_access_debugfs(struct amdgpu_device *adev) in amdgpu_virt_disable_access_debugfs() argument
879 if (amdgpu_sriov_vf(adev)) in amdgpu_virt_disable_access_debugfs()
880 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_disable_access_debugfs()
883 enum amdgpu_sriov_vf_mode amdgpu_virt_get_sriov_vf_mode(struct amdgpu_device *adev) in amdgpu_virt_get_sriov_vf_mode() argument
887 if (amdgpu_sriov_vf(adev)) { in amdgpu_virt_get_sriov_vf_mode()
888 if (amdgpu_sriov_is_pp_one_vf(adev)) in amdgpu_virt_get_sriov_vf_mode()
899 void amdgpu_virt_pre_reset(struct amdgpu_device *adev) in amdgpu_virt_pre_reset() argument
902 amdgpu_virt_fini_data_exchange(adev); in amdgpu_virt_pre_reset()
903 amdgpu_dpm_set_mp1_state(adev, PP_MP1_STATE_FLR); in amdgpu_virt_pre_reset()
906 void amdgpu_virt_post_reset(struct amdgpu_device *adev) in amdgpu_virt_post_reset() argument
908 if (amdgpu_ip_version(adev, GC_HWIP, 0) == IP_VERSION(11, 0, 3)) { in amdgpu_virt_post_reset()
912 adev->gfx.is_poweron = false; in amdgpu_virt_post_reset()
915 adev->mes.ring[0].sched.ready = false; in amdgpu_virt_post_reset()
918 bool amdgpu_virt_fw_load_skip_check(struct amdgpu_device *adev, uint32_t ucode_id) in amdgpu_virt_fw_load_skip_check() argument
920 switch (amdgpu_ip_version(adev, MP0_HWIP, 0)) { in amdgpu_virt_fw_load_skip_check()
983 void amdgpu_virt_update_sriov_video_codec(struct amdgpu_device *adev, in amdgpu_virt_update_sriov_video_codec() argument
989 if (!adev->virt.is_mm_bw_enabled) in amdgpu_virt_update_sriov_video_codec()
994 encode[i].max_width = adev->virt.encode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
995 encode[i].max_pixels_per_frame = adev->virt.encode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
1005 decode[i].max_width = adev->virt.decode_max_dimension_pixels; in amdgpu_virt_update_sriov_video_codec()
1006 decode[i].max_pixels_per_frame = adev->virt.decode_max_frame_pixels; in amdgpu_virt_update_sriov_video_codec()
1015 bool amdgpu_virt_get_rlcg_reg_access_flag(struct amdgpu_device *adev, in amdgpu_virt_get_rlcg_reg_access_flag() argument
1023 if (amdgpu_sriov_reg_indirect_gc(adev)) { in amdgpu_virt_get_rlcg_reg_access_flag()
1036 if (amdgpu_sriov_reg_indirect_mmhub(adev) && in amdgpu_virt_get_rlcg_reg_access_flag()
1048 u32 amdgpu_virt_rlcg_reg_rw(struct amdgpu_device *adev, u32 offset, u32 v, u32 flag, u32 xcc_id) in amdgpu_virt_rlcg_reg_rw() argument
1061 if (!adev->gfx.rlc.rlcg_reg_access_supported) { in amdgpu_virt_rlcg_reg_rw()
1062 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1067 if (adev->gfx.xcc_mask && (((1 << xcc_id) & adev->gfx.xcc_mask) == 0)) { in amdgpu_virt_rlcg_reg_rw()
1068 dev_err(adev->dev, "invalid xcc\n"); in amdgpu_virt_rlcg_reg_rw()
1072 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_virt_rlcg_reg_rw()
1075 reg_access_ctrl = &adev->gfx.rlc.reg_access_ctrl[xcc_id]; in amdgpu_virt_rlcg_reg_rw()
1076 scratch_reg0 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg0; in amdgpu_virt_rlcg_reg_rw()
1077 scratch_reg1 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg1; in amdgpu_virt_rlcg_reg_rw()
1078 scratch_reg2 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg2; in amdgpu_virt_rlcg_reg_rw()
1079 scratch_reg3 = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->scratch_reg3; in amdgpu_virt_rlcg_reg_rw()
1081 spin_lock_irqsave(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1084 spare_int = (void __iomem *)adev->rmmio + 4 * reg_access_ctrl->spare_int; in amdgpu_virt_rlcg_reg_rw()
1090 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1095 writel(v, ((void __iomem *)adev->rmmio) + (offset * 4)); in amdgpu_virt_rlcg_reg_rw()
1117 if (amdgpu_sriov_rlcg_error_report_enabled(adev)) { in amdgpu_virt_rlcg_reg_rw()
1119 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1122 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1125 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1128 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1132 dev_err(adev->dev, in amdgpu_virt_rlcg_reg_rw()
1140 spin_unlock_irqrestore(&adev->virt.rlcg_reg_lock, flags); in amdgpu_virt_rlcg_reg_rw()
1145 void amdgpu_sriov_wreg(struct amdgpu_device *adev, in amdgpu_sriov_wreg() argument
1151 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_sriov_wreg()
1154 if (!amdgpu_sriov_runtime(adev) && in amdgpu_sriov_wreg()
1155 amdgpu_virt_get_rlcg_reg_access_flag(adev, acc_flags, hwip, true, &rlcg_flag)) { in amdgpu_sriov_wreg()
1156 amdgpu_virt_rlcg_reg_rw(adev, offset, value, rlcg_flag, xcc_id); in amdgpu_sriov_wreg()
1166 u32 amdgpu_sriov_rreg(struct amdgpu_device *adev, in amdgpu_sriov_rreg() argument
1171 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_sriov_rreg()
1174 if (!amdgpu_sriov_runtime(adev) && in amdgpu_sriov_rreg()
1175 amdgpu_virt_get_rlcg_reg_access_flag(adev, acc_flags, hwip, false, &rlcg_flag)) in amdgpu_sriov_rreg()
1176 return amdgpu_virt_rlcg_reg_rw(adev, offset, 0, rlcg_flag, xcc_id); in amdgpu_sriov_rreg()
1184 bool amdgpu_sriov_xnack_support(struct amdgpu_device *adev) in amdgpu_sriov_xnack_support() argument
1188 if (amdgpu_sriov_vf(adev) && in amdgpu_sriov_xnack_support()
1189 amdgpu_ip_version(adev, GC_HWIP, 0) == IP_VERSION(9, 4, 2)) in amdgpu_sriov_xnack_support()
1195 bool amdgpu_virt_get_ras_capability(struct amdgpu_device *adev) in amdgpu_virt_get_ras_capability() argument
1197 struct amdgpu_ras *con = amdgpu_ras_get_context(adev); in amdgpu_virt_get_ras_capability()
1199 if (!amdgpu_sriov_ras_caps_en(adev)) in amdgpu_virt_get_ras_capability()
1202 if (adev->virt.ras_en_caps.bits.block_umc) in amdgpu_virt_get_ras_capability()
1203 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__UMC); in amdgpu_virt_get_ras_capability()
1204 if (adev->virt.ras_en_caps.bits.block_sdma) in amdgpu_virt_get_ras_capability()
1205 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SDMA); in amdgpu_virt_get_ras_capability()
1206 if (adev->virt.ras_en_caps.bits.block_gfx) in amdgpu_virt_get_ras_capability()
1207 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__GFX); in amdgpu_virt_get_ras_capability()
1208 if (adev->virt.ras_en_caps.bits.block_mmhub) in amdgpu_virt_get_ras_capability()
1209 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MMHUB); in amdgpu_virt_get_ras_capability()
1210 if (adev->virt.ras_en_caps.bits.block_athub) in amdgpu_virt_get_ras_capability()
1211 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__ATHUB); in amdgpu_virt_get_ras_capability()
1212 if (adev->virt.ras_en_caps.bits.block_pcie_bif) in amdgpu_virt_get_ras_capability()
1213 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__PCIE_BIF); in amdgpu_virt_get_ras_capability()
1214 if (adev->virt.ras_en_caps.bits.block_hdp) in amdgpu_virt_get_ras_capability()
1215 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__HDP); in amdgpu_virt_get_ras_capability()
1216 if (adev->virt.ras_en_caps.bits.block_xgmi_wafl) in amdgpu_virt_get_ras_capability()
1217 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__XGMI_WAFL); in amdgpu_virt_get_ras_capability()
1218 if (adev->virt.ras_en_caps.bits.block_df) in amdgpu_virt_get_ras_capability()
1219 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__DF); in amdgpu_virt_get_ras_capability()
1220 if (adev->virt.ras_en_caps.bits.block_smn) in amdgpu_virt_get_ras_capability()
1221 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SMN); in amdgpu_virt_get_ras_capability()
1222 if (adev->virt.ras_en_caps.bits.block_sem) in amdgpu_virt_get_ras_capability()
1223 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__SEM); in amdgpu_virt_get_ras_capability()
1224 if (adev->virt.ras_en_caps.bits.block_mp0) in amdgpu_virt_get_ras_capability()
1225 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP0); in amdgpu_virt_get_ras_capability()
1226 if (adev->virt.ras_en_caps.bits.block_mp1) in amdgpu_virt_get_ras_capability()
1227 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MP1); in amdgpu_virt_get_ras_capability()
1228 if (adev->virt.ras_en_caps.bits.block_fuse) in amdgpu_virt_get_ras_capability()
1229 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__FUSE); in amdgpu_virt_get_ras_capability()
1230 if (adev->virt.ras_en_caps.bits.block_mca) in amdgpu_virt_get_ras_capability()
1231 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MCA); in amdgpu_virt_get_ras_capability()
1232 if (adev->virt.ras_en_caps.bits.block_vcn) in amdgpu_virt_get_ras_capability()
1233 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__VCN); in amdgpu_virt_get_ras_capability()
1234 if (adev->virt.ras_en_caps.bits.block_jpeg) in amdgpu_virt_get_ras_capability()
1235 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__JPEG); in amdgpu_virt_get_ras_capability()
1236 if (adev->virt.ras_en_caps.bits.block_ih) in amdgpu_virt_get_ras_capability()
1237 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__IH); in amdgpu_virt_get_ras_capability()
1238 if (adev->virt.ras_en_caps.bits.block_mpio) in amdgpu_virt_get_ras_capability()
1239 adev->ras_hw_enabled |= BIT(AMDGPU_RAS_BLOCK__MPIO); in amdgpu_virt_get_ras_capability()
1241 if (adev->virt.ras_en_caps.bits.poison_propogation_mode) in amdgpu_virt_get_ras_capability()
1248 amdgpu_ras_block_to_sriov(struct amdgpu_device *adev, enum amdgpu_ras_block block) { in amdgpu_ras_block_to_sriov() argument
1295 static int amdgpu_virt_cache_host_error_counts(struct amdgpu_device *adev, in amdgpu_virt_cache_host_error_counts() argument
1314 memcpy(&adev->virt.count_cache, tmp, in amdgpu_virt_cache_host_error_counts()
1315 min(used_size, sizeof(adev->virt.count_cache))); in amdgpu_virt_cache_host_error_counts()
1322 static int amdgpu_virt_req_ras_err_count_internal(struct amdgpu_device *adev, bool force_update) in amdgpu_virt_req_ras_err_count_internal() argument
1324 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_err_count_internal()
1335 if (!virt->ops->req_ras_err_count(adev)) in amdgpu_virt_req_ras_err_count_internal()
1336 amdgpu_virt_cache_host_error_counts(adev, in amdgpu_virt_req_ras_err_count_internal()
1345 int amdgpu_virt_req_ras_err_count(struct amdgpu_device *adev, enum amdgpu_ras_block block, in amdgpu_virt_req_ras_err_count() argument
1350 sriov_block = amdgpu_ras_block_to_sriov(adev, block); in amdgpu_virt_req_ras_err_count()
1353 !amdgpu_sriov_ras_telemetry_block_en(adev, sriov_block)) in amdgpu_virt_req_ras_err_count()
1357 if (down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_virt_req_ras_err_count()
1358 amdgpu_virt_req_ras_err_count_internal(adev, false); in amdgpu_virt_req_ras_err_count()
1359 up_read(&adev->reset_domain->sem); in amdgpu_virt_req_ras_err_count()
1362 err_data->ue_count = adev->virt.count_cache.block[sriov_block].ue_count; in amdgpu_virt_req_ras_err_count()
1363 err_data->ce_count = adev->virt.count_cache.block[sriov_block].ce_count; in amdgpu_virt_req_ras_err_count()
1364 err_data->de_count = adev->virt.count_cache.block[sriov_block].de_count; in amdgpu_virt_req_ras_err_count()
1370 amdgpu_virt_write_cpers_to_ring(struct amdgpu_device *adev, in amdgpu_virt_write_cpers_to_ring() argument
1376 struct amdgpu_ring *ring = &adev->cper.ring_buf; in amdgpu_virt_write_cpers_to_ring()
1397 if (cper_dump->wptr < adev->virt.ras.cper_rptr) { in amdgpu_virt_write_cpers_to_ring()
1399 adev->dev, in amdgpu_virt_write_cpers_to_ring()
1401 adev->virt.ras.cper_rptr, cper_dump->wptr); in amdgpu_virt_write_cpers_to_ring()
1403 adev->virt.ras.cper_rptr = cper_dump->wptr; in amdgpu_virt_write_cpers_to_ring()
1416 dev_warn(adev->dev, in amdgpu_virt_write_cpers_to_ring()
1420 adev->virt.ras.cper_rptr = cper_dump->wptr; in amdgpu_virt_write_cpers_to_ring()
1427 static int amdgpu_virt_req_ras_cper_dump_internal(struct amdgpu_device *adev) in amdgpu_virt_req_ras_cper_dump_internal() argument
1429 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_cper_dump_internal()
1437 if (!virt->ops->req_ras_cper_dump(adev, virt->ras.cper_rptr)) in amdgpu_virt_req_ras_cper_dump_internal()
1439 adev, virt->fw_reserve.ras_telemetry, &more); in amdgpu_virt_req_ras_cper_dump_internal()
1447 int amdgpu_virt_req_ras_cper_dump(struct amdgpu_device *adev, bool force_update) in amdgpu_virt_req_ras_cper_dump() argument
1449 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_req_ras_cper_dump()
1452 if (!amdgpu_sriov_ras_cper_en(adev)) in amdgpu_virt_req_ras_cper_dump()
1456 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_virt_req_ras_cper_dump()
1458 ret = amdgpu_virt_req_ras_cper_dump_internal(adev); in amdgpu_virt_req_ras_cper_dump()
1460 up_read(&adev->reset_domain->sem); in amdgpu_virt_req_ras_cper_dump()
1466 int amdgpu_virt_ras_telemetry_post_reset(struct amdgpu_device *adev) in amdgpu_virt_ras_telemetry_post_reset() argument
1470 if (amdgpu_sriov_ras_telemetry_en(adev)) { in amdgpu_virt_ras_telemetry_post_reset()
1471 amdgpu_virt_req_ras_err_count_internal(adev, true); in amdgpu_virt_ras_telemetry_post_reset()
1472 amdgpu_ras_query_error_count(adev, &ce_count, &ue_count, NULL); in amdgpu_virt_ras_telemetry_post_reset()
1478 bool amdgpu_virt_ras_telemetry_block_en(struct amdgpu_device *adev, in amdgpu_virt_ras_telemetry_block_en() argument
1483 sriov_block = amdgpu_ras_block_to_sriov(adev, block); in amdgpu_virt_ras_telemetry_block_en()
1486 !amdgpu_sriov_ras_telemetry_block_en(adev, sriov_block)) in amdgpu_virt_ras_telemetry_block_en()
1497 void amdgpu_virt_request_bad_pages(struct amdgpu_device *adev) in amdgpu_virt_request_bad_pages() argument
1499 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_bad_pages()
1502 virt->ops->req_bad_pages(adev); in amdgpu_virt_request_bad_pages()