Lines Matching refs:gmc

134 		} else if (!amdgpu_gmc_vram_full_visible(&adev->gmc) &&  in amdgpu_evict_flags()
146 abo->placements[0].fpfn = adev->gmc.visible_vram_size >> PAGE_SHIFT; in amdgpu_evict_flags()
218 *addr = adev->gmc.gart_start; in amdgpu_ttm_map_buffer()
458 if ((cursor.start + cursor.size) > adev->gmc.visible_vram_size) in amdgpu_res_cpu_visible()
623 mem->bus.offset += adev->gmc.aper_base; in amdgpu_ttm_io_mem_reserve()
650 return (adev->gmc.aper_base + cursor.start) >> PAGE_SHIFT; in amdgpu_ttm_io_mem_pfn()
666 return adev->gmc.gart_start; in amdgpu_ttm_domain_start()
668 return adev->gmc.vram_start; in amdgpu_ttm_domain_start()
1010 placements.lpfn = adev->gmc.gart_size >> PAGE_SHIFT; in amdgpu_ttm_alloc_gart()
1116 if (adev->gmc.mem_partitions && abo->xcp_id >= 0) in amdgpu_ttm_tt_create()
1656 uint64_t vram_size = adev->gmc.visible_vram_size; in amdgpu_ttm_fw_reserve_vram_init()
1681 u64 vram_size = adev->gmc.visible_vram_size; in amdgpu_ttm_drv_reserve_vram_init()
1727 ALIGN((adev->gmc.mc_vram_size - reserve_size - SZ_1M), SZ_1M); in amdgpu_ttm_training_data_block_init()
1729 (adev->gmc.mc_vram_size - GDDR6_MEM_TRAINING_OFFSET); in amdgpu_ttm_training_data_block_init()
1792 if (!adev->gmc.is_app_apu) { in amdgpu_ttm_reserve_tmr()
1794 adev, adev->gmc.real_vram_size - reserve_size, in amdgpu_ttm_reserve_tmr()
1813 if (!adev->gmc.is_app_apu || !adev->gmc.num_mem_partitions) in amdgpu_ttm_pools_init()
1816 adev->mman.ttm_pools = kcalloc(adev->gmc.num_mem_partitions, in amdgpu_ttm_pools_init()
1822 for (i = 0; i < adev->gmc.num_mem_partitions; i++) { in amdgpu_ttm_pools_init()
1824 adev->gmc.mem_partitions[i].numa.node, in amdgpu_ttm_pools_init()
1834 if (!adev->gmc.is_app_apu || !adev->mman.ttm_pools) in amdgpu_ttm_pools_fini()
1837 for (i = 0; i < adev->gmc.num_mem_partitions; i++) in amdgpu_ttm_pools_fini()
1891 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_ttm_init()
1892 adev->mman.aper_base_kaddr = ioremap_cache(adev->gmc.aper_base, in amdgpu_ttm_init()
1893 adev->gmc.visible_vram_size); in amdgpu_ttm_init()
1895 else if (adev->gmc.is_app_apu) in amdgpu_ttm_init()
1900 adev->mman.aper_base_kaddr = ioremap_wc(adev->gmc.aper_base, in amdgpu_ttm_init()
1901 adev->gmc.visible_vram_size); in amdgpu_ttm_init()
1936 if (!adev->gmc.is_app_apu) { in amdgpu_ttm_init()
1964 (unsigned int)(adev->gmc.real_vram_size / (1024 * 1024))); in amdgpu_ttm_init()
1993 if (adev->gmc.real_vram_size < gtt_size) in amdgpu_ttm_init()
2059 if (!adev->gmc.is_app_apu) { in amdgpu_ttm_fini()
2115 adev->mman.buffer_funcs_enabled == enable || adev->gmc.is_app_apu) in amdgpu_ttm_set_buffer_funcs_status()
2152 size = adev->gmc.real_vram_size; in amdgpu_ttm_set_buffer_funcs_status()
2154 size = adev->gmc.visible_vram_size; in amdgpu_ttm_set_buffer_funcs_status()
2185 (*job)->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gmc.pdb0_bo ? in amdgpu_ttm_prepare_job()
2186 adev->gmc.pdb0_bo : in amdgpu_ttm_prepare_job()
2459 if (*pos >= adev->gmc.mc_vram_size) in amdgpu_ttm_vram_read()
2462 size = min(size, (size_t)(adev->gmc.mc_vram_size - *pos)); in amdgpu_ttm_vram_read()
2495 if (*pos >= adev->gmc.mc_vram_size) in amdgpu_ttm_vram_write()
2501 if (*pos >= adev->gmc.mc_vram_size) in amdgpu_ttm_vram_write()
2649 &amdgpu_ttm_vram_fops, adev->gmc.mc_vram_size); in amdgpu_ttm_debugfs_init()