| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_gmc.c | 73 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc() 364 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults() local 372 if (gmc->fault_ring[gmc->last_fault].timestamp >= stamp) in amdgpu_gmc_filter_faults() 377 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults() 393 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults() 399 gmc->fault_hash[hash].idx = gmc->last_fault++; in amdgpu_gmc_filter_faults() 416 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults_remove() local 423 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults_remove() 610 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_noretry_set() local 622 gmc->noretry = 1; in amdgpu_gmc_noretry_set() [all …]
|
| A D | gmc_v9_0.c | 1325 base += adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in gmc_v9_0_vram_gtt_location() 1338 adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in gmc_v9_0_vram_gtt_location() 1355 adev->gmc.mc_vram_size = in gmc_v9_0_mc_init() 1357 adev->gmc.real_vram_size = adev->gmc.mc_vram_size; in gmc_v9_0_mc_init() 1384 adev->gmc.aper_base = in gmc_v9_0_mc_init() 1388 adev->gmc.aper_size = adev->gmc.real_vram_size; in gmc_v9_0_mc_init() 1393 adev->gmc.visible_vram_size = adev->gmc.aper_size; in gmc_v9_0_mc_init() 1394 if (adev->gmc.visible_vram_size > adev->gmc.real_vram_size) in gmc_v9_0_mc_init() 1395 adev->gmc.visible_vram_size = adev->gmc.real_vram_size; in gmc_v9_0_mc_init() 1560 &adev->gmc.vm_fault); in gmc_v9_0_sw_init() [all …]
|
| A D | amdgpu_xgmi.c | 360 if (!adev->gmc.xgmi.hive_id) in amdgpu_get_xgmi_hive() 495 adev->gmc.xgmi.node_id, in amdgpu_xgmi_update_topology() 594 adev->gmc.xgmi.hive_id = 16; in amdgpu_xgmi_add_device() 595 adev->gmc.xgmi.node_id = adev->gmc.xgmi.physical_node_id + 16; in amdgpu_xgmi_add_device() 603 adev->gmc.xgmi.node_id, adev->gmc.xgmi.hive_id); in amdgpu_xgmi_add_device() 625 adev->gmc.xgmi.node_id; in amdgpu_xgmi_add_device() 685 adev->gmc.xgmi.physical_node_id, adev->gmc.xgmi.hive_id); in amdgpu_xgmi_add_device() 689 adev->gmc.xgmi.physical_node_id, adev->gmc.xgmi.hive_id, in amdgpu_xgmi_add_device() 745 if (!adev->gmc.xgmi.ras_if) { in amdgpu_xgmi_ras_late_init() 747 if (!adev->gmc.xgmi.ras_if) in amdgpu_xgmi_ras_late_init() [all …]
|
| A D | gmc_v10_0.c | 750 base += adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in gmc_v10_0_vram_gtt_location() 761 adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in gmc_v10_0_vram_gtt_location() 778 adev->gmc.mc_vram_size = in gmc_v10_0_mc_init() 780 adev->gmc.real_vram_size = adev->gmc.mc_vram_size; in gmc_v10_0_mc_init() 793 adev->gmc.aper_size = adev->gmc.real_vram_size; in gmc_v10_0_mc_init() 798 adev->gmc.visible_vram_size = adev->gmc.aper_size; in gmc_v10_0_mc_init() 799 if (adev->gmc.visible_vram_size > adev->gmc.real_vram_size) in gmc_v10_0_mc_init() 800 adev->gmc.visible_vram_size = adev->gmc.real_vram_size; in gmc_v10_0_mc_init() 847 adev->gmc.vram_width = 64; in gmc_v10_0_sw_init() 886 &adev->gmc.vm_fault); in gmc_v10_0_sw_init() [all …]
|
| A D | gmc_v7_0.c | 167 release_firmware(adev->gmc.fw); in gmc_v7_0_init_microcode() 168 adev->gmc.fw = NULL; in gmc_v7_0_init_microcode() 189 if (!adev->gmc.fw) in gmc_v7_0_mc_load_microcode() 327 if (!adev->gmc.vram_width) { in gmc_v7_0_mc_init() 385 adev->gmc.real_vram_size > adev->gmc.aper_size) { in gmc_v7_0_mc_init() 387 adev->gmc.aper_size = adev->gmc.real_vram_size; in gmc_v7_0_mc_init() 392 adev->gmc.visible_vram_size = adev->gmc.aper_size; in gmc_v7_0_mc_init() 393 if (adev->gmc.visible_vram_size > adev->gmc.real_vram_size) in gmc_v7_0_mc_init() 394 adev->gmc.visible_vram_size = adev->gmc.real_vram_size; in gmc_v7_0_mc_init() 557 adev->gmc.prt_warning = true; in gmc_v7_0_set_prt() [all …]
|
| A D | gmc_v8_0.c | 276 adev->gmc.fw = NULL; in gmc_v8_0_init_microcode() 305 if (!adev->gmc.fw) in gmc_v8_0_tonga_mc_load_microcode() 374 if (!adev->gmc.fw) in gmc_v8_0_polaris_mc_load_microcode() 520 if (!adev->gmc.vram_width) { in gmc_v8_0_mc_init() 579 adev->gmc.aper_size = adev->gmc.real_vram_size; in gmc_v8_0_mc_init() 584 adev->gmc.visible_vram_size = adev->gmc.aper_size; in gmc_v8_0_mc_init() 585 if (adev->gmc.visible_vram_size > adev->gmc.real_vram_size) in gmc_v8_0_mc_init() 586 adev->gmc.visible_vram_size = adev->gmc.real_vram_size; in gmc_v8_0_mc_init() 774 adev->gmc.prt_warning = true; in gmc_v8_0_set_prt() 1188 if (!adev->gmc.vm_fault_info) in gmc_v8_0_sw_init() [all …]
|
| A D | gmc_v6_0.c | 145 release_firmware(adev->gmc.fw); in gmc_v6_0_init_microcode() 146 adev->gmc.fw = NULL; in gmc_v6_0_init_microcode() 159 if (!adev->gmc.fw) in gmc_v6_0_mc_load_microcode() 257 adev->gmc.vram_start >> 12); in gmc_v6_0_mc_program() 259 adev->gmc.vram_end >> 12); in gmc_v6_0_mc_program() 329 adev->gmc.visible_vram_size = adev->gmc.aper_size; in gmc_v6_0_mc_init() 336 adev->gmc.gart_size = 256ULL << 20; in gmc_v6_0_mc_init() 426 adev->gmc.prt_warning = true; in gmc_v6_0_set_prt() 901 release_firmware(adev->gmc.fw); in gmc_v6_0_sw_fini() 902 adev->gmc.fw = NULL; in gmc_v6_0_sw_fini() [all …]
|
| A D | gfxhub_v1_0.c | 58 if (adev->gmc.pdb0_bo) in gfxhub_v1_0_init_gart_aperture_regs() 68 if (adev->gmc.pdb0_bo) { in gfxhub_v1_0_init_gart_aperture_regs() 70 (u32)(adev->gmc.fb_start >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 72 (u32)(adev->gmc.fb_start >> 44)); in gfxhub_v1_0_init_gart_aperture_regs() 75 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 103 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in gfxhub_v1_0_init_system_aperture_regs() 116 adev->gmc.agp_end >> 18)); in gfxhub_v1_0_init_system_aperture_regs() 120 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in gfxhub_v1_0_init_system_aperture_regs() 142 if (adev->gmc.pdb0_bo) { in gfxhub_v1_0_init_system_aperture_regs() 257 if (adev->gmc.translate_further) in gfxhub_v1_0_setup_vmid_config() [all …]
|
| A D | amdgpu_gmc.h | 265 #define amdgpu_gmc_flush_gpu_tlb(adev, vmid, vmhub, type) ((adev)->gmc.gmc_funcs->flush_gpu_tlb((ad… 267 ((adev)->gmc.gmc_funcs->flush_gpu_tlb_pasid \ 269 #define amdgpu_gmc_emit_flush_gpu_tlb(r, vmid, addr) (r)->adev->gmc.gmc_funcs->emit_flush_gpu_tlb((… 270 #define amdgpu_gmc_emit_pasid_mapping(r, vmid, pasid) (r)->adev->gmc.gmc_funcs->emit_pasid_mapping(… 271 #define amdgpu_gmc_map_mtype(adev, flags) (adev)->gmc.gmc_funcs->map_mtype((adev),(flags)) 272 #define amdgpu_gmc_get_vm_pde(adev, level, dst, flags) (adev)->gmc.gmc_funcs->get_vm_pde((adev), (l… 273 #define amdgpu_gmc_get_vm_pte(adev, mapping, flags) (adev)->gmc.gmc_funcs->get_vm_pte((adev), (mapp… 274 #define amdgpu_gmc_get_vbios_fb_size(adev) (adev)->gmc.gmc_funcs->get_vbios_fb_size((adev)) 284 static inline bool amdgpu_gmc_vram_full_visible(struct amdgpu_gmc *gmc) in amdgpu_gmc_vram_full_visible() argument 286 WARN_ON(gmc->real_vram_size < gmc->visible_vram_size); in amdgpu_gmc_vram_full_visible() [all …]
|
| A D | gfxhub_v1_1.c | 88 if (max_region || adev->gmc.xgmi.connected_to_cpu) { in gfxhub_v1_1_get_xgmi_info() 89 adev->gmc.xgmi.num_physical_nodes = max_region + 1; in gfxhub_v1_1_get_xgmi_info() 91 if (adev->gmc.xgmi.num_physical_nodes > max_num_physical_nodes) in gfxhub_v1_1_get_xgmi_info() 95 adev->gmc.xgmi.physical_node_id = in gfxhub_v1_1_get_xgmi_info() 99 adev->gmc.xgmi.physical_node_id = in gfxhub_v1_1_get_xgmi_info() 104 if (adev->gmc.xgmi.physical_node_id > max_physical_node_id) in gfxhub_v1_1_get_xgmi_info() 107 adev->gmc.xgmi.node_segment_size = seg_size; in gfxhub_v1_1_get_xgmi_info()
|
| A D | gfxhub_v2_1.c | 144 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v2_1_init_gart_aperture_regs() 146 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v2_1_init_gart_aperture_regs() 149 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v2_1_init_gart_aperture_regs() 151 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v2_1_init_gart_aperture_regs() 165 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in gfxhub_v2_1_init_system_aperture_regs() 167 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in gfxhub_v2_1_init_system_aperture_regs() 236 if (adev->gmc.translate_further) { in gfxhub_v2_1_init_cache_regs() 323 !adev->gmc.noretry); in gfxhub_v2_1_setup_vmid_config() 361 adev->gmc.vram_start >> 24); in gfxhub_v2_1_gart_enable() 363 adev->gmc.vram_end >> 24); in gfxhub_v2_1_gart_enable() [all …]
|
| A D | mmhub_v1_0.c | 48 adev->gmc.fb_start = base; in mmhub_v1_0_get_fb_location() 49 adev->gmc.fb_end = top; in mmhub_v1_0_get_fb_location() 80 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v1_0_init_gart_aperture_regs() 97 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v1_0_init_system_aperture_regs() 108 adev->gmc.agp_end >> 18)); in mmhub_v1_0_init_system_aperture_regs() 111 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v1_0_init_system_aperture_regs() 179 if (adev->gmc.translate_further) { in mmhub_v1_0_init_cache_regs() 238 if (adev->gmc.translate_further) in mmhub_v1_0_setup_vmid_config() 269 !adev->gmc.noretry); in mmhub_v1_0_setup_vmid_config() 319 adev->gmc.vram_start >> 24); in mmhub_v1_0_gart_enable() [all …]
|
| A D | gfxhub_v2_0.c | 141 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 143 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 146 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 148 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 158 WREG32_SOC15(GC, 0, mmGCMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in gfxhub_v2_0_init_system_aperture_regs() 159 WREG32_SOC15(GC, 0, mmGCMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in gfxhub_v2_0_init_system_aperture_regs() 163 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in gfxhub_v2_0_init_system_aperture_regs() 165 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in gfxhub_v2_0_init_system_aperture_regs() 233 if (adev->gmc.translate_further) { in gfxhub_v2_0_init_cache_regs() 314 !adev->gmc.noretry); in gfxhub_v2_0_setup_vmid_config()
|
| A D | mmhub_v2_3.c | 139 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v2_3_init_gart_aperture_regs() 141 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v2_3_init_gart_aperture_regs() 144 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v2_3_init_gart_aperture_regs() 146 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v2_3_init_gart_aperture_regs() 157 WREG32_SOC15(MMHUB, 0, mmMMMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in mmhub_v2_3_init_system_aperture_regs() 161 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v2_3_init_system_aperture_regs() 163 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v2_3_init_system_aperture_regs() 227 if (adev->gmc.translate_further) { in mmhub_v2_3_init_cache_regs() 312 !adev->gmc.noretry); in mmhub_v2_3_setup_vmid_config() 352 adev->gmc.vram_start >> 24); in mmhub_v2_3_gart_enable() [all …]
|
| A D | aldebaran.c | 51 if (adev->gmc.xgmi.connected_to_cpu) { in aldebaran_get_reset_handler() 150 gmc.xgmi.head) { in aldebaran_mode2_perform_reset() 159 gmc.xgmi.head) { in aldebaran_mode2_perform_reset() 161 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset() 179 gmc.xgmi.head) { in aldebaran_mode2_perform_reset() 180 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset() 190 gmc.xgmi.head) { in aldebaran_mode2_perform_reset() 331 gmc.xgmi.head) { in aldebaran_mode2_restore_hwcontext() 349 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in aldebaran_mode2_restore_hwcontext()
|
| A D | mmhub_v1_7.c | 48 adev->gmc.fb_start = base; in mmhub_v1_7_get_fb_location() 49 adev->gmc.fb_end = top; in mmhub_v1_7_get_fb_location() 70 if (adev->gmc.pdb0_bo) in mmhub_v1_7_init_gart_aperture_regs() 80 if (adev->gmc.pdb0_bo) { in mmhub_v1_7_init_gart_aperture_regs() 82 (u32)(adev->gmc.fb_start >> 12)); in mmhub_v1_7_init_gart_aperture_regs() 84 (u32)(adev->gmc.fb_start >> 44)); in mmhub_v1_7_init_gart_aperture_regs() 119 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v1_7_init_system_aperture_regs() 122 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v1_7_init_system_aperture_regs() 127 if (adev->gmc.pdb0_bo) { in mmhub_v1_7_init_system_aperture_regs() 200 if (adev->gmc.translate_further) { in mmhub_v1_7_init_cache_regs() [all …]
|
| A D | amdgpu_test.c | 47 n = adev->gmc.gart_size - atomic64_read(&adev->gart_pin_size); in amdgpu_do_test_moves() 155 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 158 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 201 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 204 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 214 gart_addr - adev->gmc.gart_start); in amdgpu_do_test_moves()
|
| A D | mmhub_v2_0.c | 210 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 212 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 215 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 217 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 228 WREG32_SOC15_RLC(MMHUB, 0, mmMMMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in mmhub_v2_0_init_system_aperture_regs() 229 WREG32_SOC15_RLC(MMHUB, 0, mmMMMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in mmhub_v2_0_init_system_aperture_regs() 233 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v2_0_init_system_aperture_regs() 235 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v2_0_init_system_aperture_regs() 306 if (adev->gmc.translate_further) { in mmhub_v2_0_init_cache_regs() 397 !adev->gmc.noretry); in mmhub_v2_0_setup_vmid_config()
|
| A D | amdgpu_vram_mgr.c | 65 return sysfs_emit(buf, "%llu\n", adev->gmc.real_vram_size); in amdgpu_mem_info_vram_total_show() 82 return sysfs_emit(buf, "%llu\n", adev->gmc.visible_vram_size); in amdgpu_mem_info_vis_vram_total_show() 140 switch (adev->gmc.vram_vendor) { in amdgpu_mem_info_vram_vendor() 204 if (start >= adev->gmc.visible_vram_size) in amdgpu_vram_mgr_vis_size() 207 return (end > adev->gmc.visible_vram_size ? in amdgpu_vram_mgr_vis_size() 208 adev->gmc.visible_vram_size : end) - start; in amdgpu_vram_mgr_vis_size() 227 if (amdgpu_gmc_vram_full_visible(&adev->gmc)) in amdgpu_vram_mgr_bo_visible_size() 230 if (res->start >= adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_vram_mgr_bo_visible_size() 387 max_bytes = adev->gmc.mc_vram_size; in amdgpu_vram_mgr_new() 466 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_vram_mgr_new() [all …]
|
| A D | amdgpu_amdkfd.c | 78 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_probe() 411 adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info() 413 mem_info->vram_width = adev->gmc.vram_width; in amdgpu_amdkfd_get_local_mem_info() 416 &adev->gmc.aper_base, in amdgpu_amdkfd_get_local_mem_info() 543 return adev->gmc.xgmi.hive_id; in amdgpu_amdkfd_get_hive_id() 561 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_hops_count() 562 peer_adev->gmc.xgmi.physical_node_id, ret); in amdgpu_amdkfd_get_xgmi_hops_count() 583 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes() 584 peer_adev->gmc.xgmi.physical_node_id, num_links); in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes() 675 return adev->gmc.noretry; in amdgpu_amdkfd_get_noretry() [all …]
|
| A D | amdgpu_ttm.c | 218 *addr = adev->gmc.gart_start; in amdgpu_ttm_map_buffer() 629 return adev->gmc.gart_start; in amdgpu_ttm_domain_start() 631 return adev->gmc.vram_start; in amdgpu_ttm_domain_start() 1713 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_ttm_init() 1715 adev->gmc.visible_vram_size); in amdgpu_ttm_init() 1720 adev->gmc.visible_vram_size); in amdgpu_ttm_init() 1778 adev->gmc.mc_vram_size), in amdgpu_ttm_init() 1893 size = adev->gmc.real_vram_size; in amdgpu_ttm_set_buffer_funcs_status() 1895 size = adev->gmc.visible_vram_size; in amdgpu_ttm_set_buffer_funcs_status() 2175 if (*pos >= adev->gmc.mc_vram_size) in amdgpu_ttm_vram_read() [all …]
|
| A D | amdgpu_xgmi.h | 70 adev->gmc.xgmi.hive_id && in amdgpu_xgmi_same_hive() 71 adev->gmc.xgmi.hive_id == bo_adev->gmc.xgmi.hive_id); in amdgpu_xgmi_same_hive()
|
| A D | amdgpu_object.c | 595 if (!amdgpu_gmc_vram_full_visible(&adev->gmc) && in amdgpu_bo_create() 1033 if (!adev->gmc.xgmi.connected_to_cpu) { in amdgpu_bo_init() 1035 arch_io_reserve_memtype_wc(adev->gmc.aper_base, in amdgpu_bo_init() 1036 adev->gmc.aper_size); in amdgpu_bo_init() 1039 adev->gmc.vram_mtrr = arch_phys_wc_add(adev->gmc.aper_base, in amdgpu_bo_init() 1040 adev->gmc.aper_size); in amdgpu_bo_init() 1044 adev->gmc.mc_vram_size >> 20, in amdgpu_bo_init() 1045 (unsigned long long)adev->gmc.aper_size >> 20); in amdgpu_bo_init() 1047 adev->gmc.vram_width, amdgpu_vram_names[adev->gmc.vram_type]); in amdgpu_bo_init() 1351 (offset + bo->base.size) > adev->gmc.visible_vram_size) in amdgpu_bo_fault_reserve_notify() [all …]
|
| A D | amdgpu_device.c | 1196 if (adev->gmc.real_vram_size && in amdgpu_device_resize_fb_bar() 1291 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_need_post() 2398 if (!adev->gmc.xgmi.pending_reset) in amdgpu_device_ip_init() 2879 if (adev->gmc.xgmi.pending_reset && in amdgpu_device_ip_suspend_phase2() 3421 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init() 3583 adev->gmc.xgmi.pending_reset = true; in amdgpu_device_init() 3735 if (!adev->gmc.xgmi.pending_reset) { in amdgpu_device_init() 3777 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_init() 3809 arch_io_free_memtype_wc(adev->gmc.aper_base, adev->gmc.aper_size); in amdgpu_device_unmap_mmio() 5618 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_flush_hdp() [all …]
|
| /linux/drivers/video/fbdev/ |
| A D | w100fb.c | 322 gmc.val = 0; in w100_init_graphic_engine() 325 gmc.f.gmc_src_clipping = 1; in w100_init_graphic_engine() 326 gmc.f.gmc_dst_clipping = 1; in w100_init_graphic_engine() 330 gmc.f.gmc_byte_pix_order = 1; in w100_init_graphic_engine() 331 gmc.f.gmc_default_sel = 0; in w100_init_graphic_engine() 332 gmc.f.gmc_rop3 = ROP3_SRCCOPY; in w100_init_graphic_engine() 334 gmc.f.gmc_clr_cmp_fcn_dis = 1; in w100_init_graphic_engine() 335 gmc.f.gmc_wr_msk_dis = 1; in w100_init_graphic_engine() 336 gmc.f.gmc_dp_op = DP_OP_ROP; in w100_init_graphic_engine() 369 gmc.f.gmc_rop3 = ROP3_PATCOPY; in w100fb_fillrect() [all …]
|