Lines Matching refs:gmc

45 	return adev->gmc.xgmi.connected_to_cpu || amdgpu_virt_xgmi_migrate_enabled(adev);  in amdgpu_gmc_is_pdb0_enabled()
60 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_pdb0_alloc()
61 uint32_t pde0_page_shift = adev->gmc.vmid0_page_table_block_size + 21; in amdgpu_gmc_pdb0_alloc()
74 r = amdgpu_bo_create(adev, &bp, &adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
78 r = amdgpu_bo_reserve(adev->gmc.pdb0_bo, false); in amdgpu_gmc_pdb0_alloc()
82 r = amdgpu_bo_pin(adev->gmc.pdb0_bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gmc_pdb0_alloc()
85 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc()
89 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
93 amdgpu_bo_unpin(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
95 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
97 amdgpu_bo_unref(&adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
197 if (bo->ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr()
200 return adev->gmc.agp_start + bo->ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
298 u64 max_mc_address = min(adev->gmc.mc_mask, AMDGPU_GMC_HOLE_START - 1); in amdgpu_gmc_gart_location()
421 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults() local
433 if (gmc->fault_ring[gmc->last_fault].timestamp >= stamp) in amdgpu_gmc_filter_faults()
438 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults()
459 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults()
467 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults()
472 fault->next = gmc->fault_hash[hash].idx; in amdgpu_gmc_filter_faults()
473 gmc->fault_hash[hash].idx = gmc->last_fault++; in amdgpu_gmc_filter_faults()
490 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults_remove() local
511 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults_remove()
523 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults_remove()
670 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb()
671 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
674 if (adev->gmc.flush_tlb_needs_extra_type_0 && flush_type == 2) in amdgpu_gmc_flush_gpu_tlb()
675 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
678 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, vmhub, in amdgpu_gmc_flush_gpu_tlb()
731 if (!adev->gmc.flush_pasid_uses_kiq || !ring->sched.ready) { in amdgpu_gmc_flush_gpu_tlb_pasid()
732 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
733 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
737 if (adev->gmc.flush_tlb_needs_extra_type_0 && flush_type == 2) in amdgpu_gmc_flush_gpu_tlb_pasid()
738 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
742 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
750 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
753 if (adev->gmc.flush_tlb_needs_extra_type_0) in amdgpu_gmc_flush_gpu_tlb_pasid()
762 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
765 if (flush_type == 2 && adev->gmc.flush_tlb_needs_extra_type_0) in amdgpu_gmc_flush_gpu_tlb_pasid()
874 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
878 adev->gmc.tmz_enabled = true; in amdgpu_gmc_tmz_set()
904 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
908 adev->gmc.tmz_enabled = true; in amdgpu_gmc_tmz_set()
914 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
930 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_noretry_set() local
942 gmc->noretry = 1; in amdgpu_gmc_noretry_set()
944 gmc->noretry = (amdgpu_noretry == -1) ? noretry_default : amdgpu_noretry; in amdgpu_gmc_noretry_set()
1024 if ((adev->gmc.real_vram_size - size) < (8 * 1024 * 1024)) in amdgpu_gmc_get_vbios_allocations()
1058 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_init_pdb0()
1059 u64 pde0_page_size = (1ULL<<adev->gmc.vmid0_page_table_block_size)<<21; in amdgpu_gmc_init_pdb0()
1070 flags |= AMDGPU_PTE_FRAG((adev->gmc.vmid0_page_table_block_size + 9*1)); in amdgpu_gmc_init_pdb0()
1075 vram_addr -= adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in amdgpu_gmc_init_pdb0()
1082 amdgpu_gmc_set_pte_pde(adev, adev->gmc.ptr_pdb0, i, vram_addr, flags); in amdgpu_gmc_init_pdb0()
1091 amdgpu_gmc_set_pte_pde(adev, adev->gmc.ptr_pdb0, i, gart_ptb_gpu_pa, flags); in amdgpu_gmc_init_pdb0()
1104 return mc_addr - adev->gmc.vram_start + adev->vm_manager.vram_base_offset; in amdgpu_gmc_vram_mc2pa()
1190 for_each_inst(mode, adev->gmc.supported_nps_modes) { in available_memory_partition_show()
1210 for_each_inst(i, adev->gmc.supported_nps_modes) { in current_memory_partition_store()
1220 if (mode == adev->gmc.gmc_funcs->query_mem_partition_mode(adev)) { in current_memory_partition_store()
1235 adev->gmc.requested_nps_mode = mode; in current_memory_partition_store()
1256 mode = adev->gmc.gmc_funcs->query_mem_partition_mode(adev); in current_memory_partition_show()
1272 if (!adev->gmc.gmc_funcs->query_mem_partition_mode) in amdgpu_gmc_sysfs_init()
1275 nps_switch_support = (hweight32(adev->gmc.supported_nps_modes & in amdgpu_gmc_sysfs_init()
1293 if (!adev->gmc.gmc_funcs->query_mem_partition_mode) in amdgpu_gmc_sysfs_fini()
1313 (adev->gmc.reset_flags & AMDGPU_GMC_INIT_RESET_NPS); in amdgpu_gmc_get_nps_memranges()
1401 return (((BIT(req_nps_mode) & adev->gmc.supported_nps_modes) == in amdgpu_gmc_need_nps_switch_req()
1411 if (amdgpu_sriov_vf(adev) || !adev->gmc.supported_nps_modes || in amdgpu_gmc_prepare_nps_mode_change()
1412 !adev->gmc.gmc_funcs->request_mem_partition_mode) in amdgpu_gmc_prepare_nps_mode_change()
1415 cur_nps_mode = adev->gmc.gmc_funcs->query_mem_partition_mode(adev); in amdgpu_gmc_prepare_nps_mode_change()
1429 req_nps_mode = adev->gmc.requested_nps_mode; in amdgpu_gmc_prepare_nps_mode_change()
1434 r = adev->gmc.gmc_funcs->request_mem_partition_mode(adev, req_nps_mode); in amdgpu_gmc_prepare_nps_mode_change()
1446 if (adev->gmc.gmc_funcs->need_reset_on_init) in amdgpu_gmc_need_reset_on_init()
1447 return adev->gmc.gmc_funcs->need_reset_on_init(adev); in amdgpu_gmc_need_reset_on_init()
1455 switch (adev->gmc.num_mem_partitions) { in amdgpu_gmc_get_vf_memory_partition()
1511 valid = (adev->gmc.num_mem_partitions == 1); in amdgpu_gmc_validate_partition_info()
1514 valid = (adev->gmc.num_mem_partitions == 2); in amdgpu_gmc_validate_partition_info()
1517 valid = (adev->gmc.num_mem_partitions == 3 || in amdgpu_gmc_validate_partition_info()
1518 adev->gmc.num_mem_partitions == 4); in amdgpu_gmc_validate_partition_info()
1521 valid = (adev->gmc.num_mem_partitions == 8); in amdgpu_gmc_validate_partition_info()
1577 adev->gmc.num_mem_partitions = num_ranges; in amdgpu_gmc_init_acpi_mem_ranges()
1591 adev->gmc.num_mem_partitions = 0; in amdgpu_gmc_init_sw_mem_ranges()
1594 adev->gmc.num_mem_partitions = 1; in amdgpu_gmc_init_sw_mem_ranges()
1597 adev->gmc.num_mem_partitions = 2; in amdgpu_gmc_init_sw_mem_ranges()
1601 adev->gmc.num_mem_partitions = 3; in amdgpu_gmc_init_sw_mem_ranges()
1603 adev->gmc.num_mem_partitions = 4; in amdgpu_gmc_init_sw_mem_ranges()
1606 adev->gmc.num_mem_partitions = 8; in amdgpu_gmc_init_sw_mem_ranges()
1609 adev->gmc.num_mem_partitions = 1; in amdgpu_gmc_init_sw_mem_ranges()
1615 &adev->gmc.num_mem_partitions); in amdgpu_gmc_init_sw_mem_ranges()
1618 for (i = 1; i < adev->gmc.num_mem_partitions; ++i) { in amdgpu_gmc_init_sw_mem_ranges()
1625 if (!adev->gmc.num_mem_partitions) { in amdgpu_gmc_init_sw_mem_ranges()
1628 adev->gmc.num_mem_partitions = 1; in amdgpu_gmc_init_sw_mem_ranges()
1631 size = (adev->gmc.real_vram_size + SZ_16M) >> AMDGPU_GPU_PAGE_SHIFT; in amdgpu_gmc_init_sw_mem_ranges()
1632 size /= adev->gmc.num_mem_partitions; in amdgpu_gmc_init_sw_mem_ranges()
1634 for (i = 0; i < adev->gmc.num_mem_partitions; ++i) { in amdgpu_gmc_init_sw_mem_ranges()
1642 l = adev->gmc.num_mem_partitions - 1; in amdgpu_gmc_init_sw_mem_ranges()
1647 (adev->gmc.real_vram_size >> AMDGPU_GPU_PAGE_SHIFT) - 1; in amdgpu_gmc_init_sw_mem_ranges()
1649 adev->gmc.real_vram_size - in amdgpu_gmc_init_sw_mem_ranges()
1657 adev->gmc.mem_partitions = kcalloc(AMDGPU_MAX_MEM_RANGES, in amdgpu_gmc_init_mem_ranges()
1660 if (!adev->gmc.mem_partitions) in amdgpu_gmc_init_mem_ranges()
1663 if (adev->gmc.is_app_apu) in amdgpu_gmc_init_mem_ranges()
1664 amdgpu_gmc_init_acpi_mem_ranges(adev, adev->gmc.mem_partitions); in amdgpu_gmc_init_mem_ranges()
1666 amdgpu_gmc_init_sw_mem_ranges(adev, adev->gmc.mem_partitions); in amdgpu_gmc_init_mem_ranges()