| /linux/drivers/gpu/drm/radeon/ |
| A D | radeon_gart.c | 87 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc() 102 if (!rdev->gart.ptr) in radeon_gart_table_ram_free() 113 (void *)rdev->gart.ptr, rdev->gart.table_addr); in radeon_gart_table_ram_free() 114 rdev->gart.ptr = NULL; in radeon_gart_table_ram_free() 166 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr); in radeon_gart_table_vram_pin() 199 if (!rdev->gart.robj) in radeon_gart_table_vram_unpin() 222 if (!rdev->gart.robj) in radeon_gart_table_vram_free() 265 if (rdev->gart.ptr) { in radeon_gart_unbind() 307 if (rdev->gart.ptr) in radeon_gart_bind() 313 if (rdev->gart.ptr) { in radeon_gart_bind() [all …]
|
| A D | rs400.c | 85 if (rdev->gart.ptr) { in rs400_gart_init() 107 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rs400_gart_init() 165 tmp = (u32)rdev->gart.table_addr & 0xfffff000; in rs400_gart_enable() 166 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; in rs400_gart_enable() 193 (unsigned long long)rdev->gart.table_addr); in rs400_gart_enable() 194 rdev->gart.ready = true; in rs400_gart_enable() 237 u32 *gtt = rdev->gart.ptr; in rs400_gart_set_page()
|
| A D | radeon_asic.c | 208 .gart = { 276 .gart = { 372 .gart = { 440 .gart = { 508 .gart = { 576 .gart = { 644 .gart = { 712 .gart = { 780 .gart = { 848 .gart = { [all …]
|
| A D | r300.c | 122 void __iomem *ptr = rdev->gart.ptr; in rv370_pcie_gart_set_page() 134 if (rdev->gart.robj) { in rv370_pcie_gart_init() 144 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rv370_pcie_gart_init() 145 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in rv370_pcie_gart_init() 146 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in rv370_pcie_gart_init() 147 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in rv370_pcie_gart_init() 157 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable() 172 table_addr = rdev->gart.table_addr; in rv370_pcie_gart_enable() 187 rdev->gart.ready = true; in rv370_pcie_gart_enable()
|
| A D | rs600.c | 549 if (rdev->gart.robj) { in rs600_gart_init() 558 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in rs600_gart_init() 567 if (rdev->gart.robj == NULL) { in rs600_gart_enable() 604 rdev->gart.table_addr); in rs600_gart_enable() 621 (unsigned long long)rdev->gart.table_addr); in rs600_gart_enable() 622 rdev->gart.ready = true; in rs600_gart_enable() 662 void __iomem *ptr = (void *)rdev->gart.ptr; in rs600_gart_set_page()
|
| A D | r100.c | 656 if (rdev->gart.ptr) { in r100_pci_gart_init() 664 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in r100_pci_gart_init() 665 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in r100_pci_gart_init() 666 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in r100_pci_gart_init() 667 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in r100_pci_gart_init() 682 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); in r100_pci_gart_enable() 688 (unsigned long long)rdev->gart.table_addr); in r100_pci_gart_enable() 689 rdev->gart.ready = true; in r100_pci_gart_enable() 712 u32 *gtt = rdev->gart.ptr; in r100_pci_gart_set_page()
|
| A D | rv770.c | 899 if (rdev->gart.robj == NULL) { in rv770_pcie_gart_enable() 928 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in rv770_pcie_gart_enable() 939 (unsigned long long)rdev->gart.table_addr); in rv770_pcie_gart_enable() 940 rdev->gart.ready = true; in rv770_pcie_gart_enable()
|
| A D | radeon_ttm.c | 856 if (p >= rdev->gart.num_cpu_pages) in radeon_ttm_gtt_read() 859 page = rdev->gart.pages[p]; in radeon_ttm_gtt_read()
|
| A D | ni.c | 1252 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable() 1281 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable() 1327 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable() 1328 rdev->gart.ready = true; in cayman_pcie_gart_enable()
|
| A D | r600.c | 1082 void __iomem *ptr = (void *)rdev->gart.ptr; in r600_pcie_gart_tlb_flush() 1116 if (rdev->gart.robj) { in r600_pcie_gart_init() 1124 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in r600_pcie_gart_init() 1133 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable() 1170 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in r600_pcie_gart_enable() 1181 (unsigned long long)rdev->gart.table_addr); in r600_pcie_gart_enable() 1182 rdev->gart.ready = true; in r600_pcie_gart_enable()
|
| A D | radeon_vm.c | 369 uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8; in radeon_vm_set_pages() 601 result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT]; in radeon_vm_map_gart()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_gart.c | 128 if (adev->gart.bo != NULL) in amdgpu_gart_table_ram_alloc() 170 bp.size = adev->gart.table_size; in amdgpu_gart_table_ram_alloc() 197 adev->gart.bo = bo; in amdgpu_gart_table_ram_alloc() 248 adev->gart.ptr = NULL; in amdgpu_gart_table_ram_free() 263 if (adev->gart.bo != NULL) in amdgpu_gart_table_vram_alloc() 282 amdgpu_bo_free_kernel(&adev->gart.bo, NULL, (void *)&adev->gart.ptr); in amdgpu_gart_table_vram_free() 310 if (!adev->gart.ptr) in amdgpu_gart_unbind() 320 if (!adev->gart.ptr) in amdgpu_gart_unbind() 387 if (!adev->gart.ptr) in amdgpu_gart_bind() 405 if (!adev->gart.ptr) in amdgpu_gart_invalidate_tlb() [all …]
|
| A D | gmc_v11_0.c | 715 if (adev->gart.bo) { in gmc_v11_0_gart_init() 725 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v11_0_gart_init() 726 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(0ULL, MTYPE_UC) | in gmc_v11_0_gart_init() 884 if (adev->gart.bo == NULL) { in gmc_v11_0_gart_enable() 906 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v11_0_gart_enable()
|
| A D | gmc_v12_0.c | 716 if (adev->gart.bo) { in gmc_v12_0_gart_init() 726 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v12_0_gart_init() 727 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_GFX12(0ULL, MTYPE_UC) | in gmc_v12_0_gart_init() 870 if (adev->gart.bo == NULL) { in gmc_v12_0_gart_enable() 892 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v12_0_gart_enable()
|
| A D | gmc_v6_0.c | 463 if (adev->gart.bo == NULL) { in gmc_v6_0_gart_enable() 469 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v6_0_gart_enable() 554 if (adev->gart.bo) { in gmc_v6_0_gart_init() 561 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v6_0_gart_init() 562 adev->gart.gart_pte_flags = 0; in gmc_v6_0_gart_init()
|
| A D | gmc_v10_0.c | 755 if (adev->gart.bo) { in gmc_v10_0_gart_init() 765 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v10_0_gart_init() 766 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(0ULL, MTYPE_UC) | in gmc_v10_0_gart_init() 949 if (adev->gart.bo == NULL) { in gmc_v10_0_gart_enable() 983 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v10_0_gart_enable()
|
| A D | gmc_v7_0.c | 605 if (adev->gart.bo == NULL) { in gmc_v7_0_gart_enable() 610 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v7_0_gart_enable() 705 if (adev->gart.bo) { in gmc_v7_0_gart_init() 713 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v7_0_gart_init() 714 adev->gart.gart_pte_flags = 0; in gmc_v7_0_gart_init()
|
| A D | gmc_v8_0.c | 820 if (adev->gart.bo == NULL) { in gmc_v8_0_gart_enable() 825 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v8_0_gart_enable() 937 if (adev->gart.bo) { in gmc_v8_0_gart_init() 945 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v8_0_gart_init() 946 adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE; in gmc_v8_0_gart_init()
|
| A D | gmc_v9_0.c | 1759 if (adev->gart.bo) { in gmc_v9_0_gart_init() 1776 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v9_0_gart_init() 1777 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(0ULL, MTYPE_UC) | in gmc_v9_0_gart_init() 2286 if (adev->gart.bo == NULL) { in gmc_v9_0_gart_enable() 2309 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v9_0_gart_enable()
|
| A D | mmhub_v3_3.c | 150 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v3_3_init_gart_aperture_regs() 368 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v3_3_init_saw_regs()
|
| A D | amdgpu_gmc.c | 660 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in amdgpu_gmc_flush_gpu_tlb() 1007 uint64_t flags = adev->gart.gart_pte_flags; //TODO it is UC. explore NC/RW? in amdgpu_gmc_init_pdb0() 1015 u64 gart_ptb_gpu_pa = amdgpu_gmc_vram_pa(adev, adev->gart.bo); in amdgpu_gmc_init_pdb0()
|
| /linux/drivers/gpu/drm/nouveau/ |
| A D | nouveau_chan.h | 26 struct nvif_object gart; member 64 u32 vram, u32 gart, struct nouveau_channel **);
|
| A D | nouveau_chan.c | 101 nvif_object_dtor(&chan->gart); in nouveau_channel_del() 359 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart) in nouveau_channel_init() argument 430 ret = nvif_object_ctor(&chan->user, "abi16ChanGartCtxDma", gart, in nouveau_channel_init() 432 &chan->gart); in nouveau_channel_init() 492 bool priv, u64 runm, u32 vram, u32 gart, struct nouveau_channel **pchan) in nouveau_channel_new() argument 502 ret = nouveau_channel_init(*pchan, vram, gart); in nouveau_channel_new()
|
| /linux/Documentation/devicetree/bindings/memory-controllers/ |
| A D | nvidia,tegra20-mc.yaml | 27 const: nvidia,tegra20-mc-gart 68 compatible = "nvidia,tegra20-mc-gart";
|
| /linux/drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_migrate.c | 78 dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in svm_migrate_gart_map() 89 pte_flags |= adev->gart.gart_pte_flags; in svm_migrate_gart_map()
|