| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_nbio.c | 30 if (!adev->nbio.ras) in amdgpu_nbio_ras_sw_init() 33 ras = adev->nbio.ras; in amdgpu_nbio_ras_sw_init() 43 adev->nbio.ras_if = &ras->ras_block.ras_comm; in amdgpu_nbio_ras_sw_init() 50 if (adev->nbio.funcs && adev->nbio.funcs->get_pcie_replay_count) in amdgpu_nbio_get_pcie_replay_count() 51 return adev->nbio.funcs->get_pcie_replay_count(adev); in amdgpu_nbio_get_pcie_replay_count() 64 r = amdgpu_irq_get(adev, &adev->nbio.ras_controller_irq, 0); in amdgpu_nbio_ras_late_init() 67 r = amdgpu_irq_get(adev, &adev->nbio.ras_err_event_athub_irq, 0); in amdgpu_nbio_ras_late_init()
|
| A D | soc24.c | 94 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize() 249 (adev->nbio.funcs->program_aspm)) in soc24_program_aspm() 250 adev->nbio.funcs->program_aspm(adev); in soc24_program_aspm() 370 adev->nbio.funcs->set_reg_remap(adev); in soc24_common_early_init() 480 adev->nbio.funcs->init_registers(adev); in soc24_common_hw_init() 485 if (adev->nbio.funcs->remap_hdp_registers) in soc24_common_hw_init() 486 adev->nbio.funcs->remap_hdp_registers(adev); in soc24_common_hw_init() 492 adev->nbio.funcs->enable_doorbell_aperture(adev, true); in soc24_common_hw_init() 506 adev->nbio.funcs->enable_doorbell_aperture(adev, false); in soc24_common_hw_fini() 553 adev->nbio.funcs->update_medium_grain_light_sleep(adev, in soc24_common_set_clockgating_state() [all …]
|
| A D | soc21.c | 229 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize() 358 u32 memsize = adev->nbio.funcs->get_memsize(adev); 445 if (adev->nbio.funcs->program_aspm) in soc21_program_aspm() 446 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm() 563 adev->nbio.funcs->set_reg_remap(adev); in soc21_common_early_init() 818 if (adev->nbio.ras && in soc21_common_late_init() 819 adev->nbio.ras_err_event_athub_irq.funcs) in soc21_common_late_init() 857 adev->nbio.funcs->init_registers(adev); in soc21_common_hw_init() 863 adev->nbio.funcs->remap_hdp_registers(adev); in soc21_common_hw_init() 885 if (adev->nbio.ras && in soc21_common_hw_fini() [all …]
|
| A D | soc15.c | 320 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize() 674 if (adev->nbio.funcs->program_aspm) in soc15_program_aspm() 675 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm() 936 adev->nbio.funcs->set_reg_remap(adev); in soc15_common_early_init() 1247 adev->nbio.funcs->sdma_doorbell_range(adev, i, in soc15_sdma_doorbell_range_init() 1261 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init() 1267 adev->nbio.funcs->remap_hdp_registers(adev); in soc15_common_hw_init() 1299 adev->nbio.ras_if && in soc15_common_hw_fini() 1301 if (adev->nbio.ras && in soc15_common_hw_fini() 1304 if (adev->nbio.ras && in soc15_common_hw_fini() [all …]
|
| A D | nv.c | 308 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize() 430 u32 memsize = adev->nbio.funcs->get_memsize(adev); in nv_asic_mode2_reset() 518 if (adev->nbio.funcs->program_aspm) in nv_program_aspm() 519 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm() 610 if (adev->nbio.funcs->enable_aspm && in nv_update_umd_stable_pstate() 612 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate() 641 adev->nbio.funcs->set_reg_remap(adev); in nv_common_early_init() 995 if (adev->nbio.funcs->apply_lc_spc_mode_wa) in nv_common_hw_init() 996 adev->nbio.funcs->apply_lc_spc_mode_wa(adev); in nv_common_hw_init() 1004 adev->nbio.funcs->init_registers(adev); in nv_common_hw_init() [all …]
|
| A D | df_v3_6.c | 51 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica() 52 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica() 74 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica() 75 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica() 102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg() 103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg() 124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg() 125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg() 143 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status() 144 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_arm_with_status()
|
| A D | nbio_v7_9.c | 524 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_9_handle_ras_controller_intr_no_bifring() 557 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_9_handle_ras_controller_intr_no_bifring() 563 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_9_handle_ras_controller_intr_no_bifring() 651 adev->nbio.ras_controller_irq.funcs = in nbio_v7_9_init_ras_controller_interrupt() 653 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_9_init_ras_controller_interrupt() 658 &adev->nbio.ras_controller_irq); in nbio_v7_9_init_ras_controller_interrupt() 669 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_9_init_ras_err_event_athub_interrupt() 671 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_9_init_ras_err_event_athub_interrupt() 676 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_9_init_ras_err_event_athub_interrupt()
|
| A D | nbio_v7_4.c | 363 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 402 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 408 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 554 adev->nbio.ras_controller_irq.funcs = in nbio_v7_4_init_ras_controller_interrupt() 556 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_4_init_ras_controller_interrupt() 561 &adev->nbio.ras_controller_irq); in nbio_v7_4_init_ras_controller_interrupt() 572 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_4_init_ras_err_event_athub_interrupt() 574 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_4_init_ras_err_event_athub_interrupt() 579 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_4_init_ras_err_event_athub_interrupt()
|
| A D | amdgpu_device.c | 844 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg() 867 if (unlikely(!adev->nbio.funcs)) { in amdgpu_device_indirect_rreg_ext() 876 if (unlikely(!adev->nbio.funcs)) in amdgpu_device_indirect_rreg_ext() 927 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg64() 1158 return adev->nbio.funcs->get_rev_id(adev); in amdgpu_device_get_rev_id() 5223 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset() 6124 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter() 6144 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit() 6147 if (amdgpu_passthrough(adev) && adev->nbio.funcs && in amdgpu_device_baco_exit() 6148 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit() [all …]
|
| A D | amdgpu_discovery.c | 2698 adev->nbio.funcs = &nbio_v6_1_funcs; in amdgpu_discovery_set_ip_blocks() 2704 adev->nbio.funcs = &nbio_v7_0_funcs; in amdgpu_discovery_set_ip_blocks() 2710 adev->nbio.funcs = &nbio_v7_4_funcs; in amdgpu_discovery_set_ip_blocks() 2714 adev->nbio.funcs = &nbio_v7_9_funcs; in amdgpu_discovery_set_ip_blocks() 2720 adev->nbio.funcs = &nbio_v7_11_funcs; in amdgpu_discovery_set_ip_blocks() 2728 adev->nbio.funcs = &nbio_v7_2_funcs; in amdgpu_discovery_set_ip_blocks() 2739 adev->nbio.funcs = &nbio_v2_3_funcs; in amdgpu_discovery_set_ip_blocks() 2745 adev->nbio.funcs = &nbio_v4_3_sriov_funcs; in amdgpu_discovery_set_ip_blocks() 2747 adev->nbio.funcs = &nbio_v4_3_funcs; in amdgpu_discovery_set_ip_blocks() 2752 adev->nbio.funcs = &nbio_v7_7_funcs; in amdgpu_discovery_set_ip_blocks() [all …]
|
| A D | amdgpu_bios.c | 530 if (adev->nbio.funcs && in amdgpu_soc15_read_bios_from_rom() 531 adev->nbio.funcs->get_rom_offset) { in amdgpu_soc15_read_bios_from_rom() 532 rom_offset = adev->nbio.funcs->get_rom_offset(adev); in amdgpu_soc15_read_bios_from_rom()
|
| A D | nbio_v4_3.c | 619 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v4_3_init_ras_err_event_athub_interrupt() 621 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v4_3_init_ras_err_event_athub_interrupt() 627 &adev->nbio.ras_err_event_athub_irq); in nbio_v4_3_init_ras_err_event_athub_interrupt()
|
| A D | amdgpu_ras.c | 2105 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler() 2106 adev->nbio.ras->handle_ras_controller_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler() 2109 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler() 3620 adev->nbio.ras = &nbio_v7_4_ras; in amdgpu_ras_init() 3630 adev->nbio.ras = &nbio_v4_3_ras; in amdgpu_ras_init() 3634 adev->nbio.ras = &nbio_v7_9_ras; in amdgpu_ras_init() 3647 if (adev->nbio.ras && in amdgpu_ras_init() 3648 adev->nbio.ras->init_ras_controller_interrupt) { in amdgpu_ras_init() 3649 r = adev->nbio.ras->init_ras_controller_interrupt(adev); in amdgpu_ras_init() 3654 if (adev->nbio.ras && in amdgpu_ras_init() [all …]
|
| A D | vega10_ih.c | 273 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init() 293 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega10_ih_irq_init()
|
| A D | vega20_ih.c | 292 adev->nbio.funcs->ih_control(adev); in vega20_ih_irq_init() 329 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega20_ih_irq_init()
|
| A D | navi10_ih.c | 329 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init() 361 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in navi10_ih_irq_init()
|
| A D | ih_v6_0.c | 334 adev->nbio.funcs->ih_control(adev); in ih_v6_0_irq_init() 355 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_0_irq_init()
|
| A D | ih_v6_1.c | 306 adev->nbio.funcs->ih_control(adev); in ih_v6_1_irq_init() 327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_1_irq_init()
|
| A D | ih_v7_0.c | 306 adev->nbio.funcs->ih_control(adev); in ih_v7_0_irq_init() 327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v7_0_irq_init()
|
| A D | sdma_v7_0.c | 357 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v7_0_ring_emit_hdp_flush() 364 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v7_0_ring_emit_hdp_flush() 365 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v7_0_ring_emit_hdp_flush() 582 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v7_0_gfx_resume()
|
| A D | sdma_v6_0.c | 325 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v6_0_ring_emit_hdp_flush() 332 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush() 333 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush() 558 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v6_0_gfx_resume()
|
| A D | sdma_v5_2.c | 340 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush() 350 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 351 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 615 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume()
|
| A D | jpeg_v3_0.c | 150 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v3_0_hw_init()
|
| A D | sdma_v5_0.c | 522 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush() 532 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 533 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 804 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume()
|
| /linux/drivers/block/xen-blkback/ |
| A D | blkback.c | 1299 int i, nbio = 0; in dispatch_rw_block_io() local 1437 biolist[nbio++] = bio; in dispatch_rw_block_io() 1452 biolist[nbio++] = bio; in dispatch_rw_block_io() 1457 atomic_set(&pending_req->pendcnt, nbio); in dispatch_rw_block_io() 1460 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()
|