| /drivers/accel/habanalabs/common/ |
| A D | security.c | 327 for (j = 0 ; j < num_instances ; j++) { in hl_init_pb_with_mask() 328 int seq = i * num_instances + j; in hl_init_pb_with_mask() 360 u32 num_instances, u32 instance_offset, in hl_init_pb() argument 411 for (j = 0 ; j < num_instances ; j++) { in hl_init_pb_ranges_with_mask() 412 int seq = i * num_instances + j; in hl_init_pb_ranges_with_mask() 472 u32 num_instances, u32 instance_offset, in hl_init_pb_single_dcore() argument 492 for (i = 0 ; i < num_instances ; i++) in hl_init_pb_single_dcore() 520 u32 num_instances, u32 instance_offset, in hl_init_pb_ranges_single_dcore() argument 539 for (i = 0 ; i < num_instances ; i++) in hl_init_pb_ranges_single_dcore() 572 int seq = i * num_instances + j; in hl_ack_pb_with_mask() [all …]
|
| A D | habanalabs.h | 4203 u32 dcore_offset, u32 num_instances, u32 instance_offset, 4207 u32 num_instances, u32 instance_offset, 4211 u32 dcore_offset, u32 num_instances, u32 instance_offset, 4216 u32 dcore_offset, u32 num_instances, u32 instance_offset, 4221 u32 num_instances, u32 instance_offset, 4225 u32 num_instances, u32 instance_offset, 4230 u32 num_instances, u32 instance_offset, 4233 u32 dcore_offset, u32 num_instances, u32 instance_offset, 4236 u32 num_instances, u32 instance_offset,
|
| /drivers/iommu/arm/arm-smmu/ |
| A D | arm-smmu-nvidia.c | 37 unsigned int num_instances; member 69 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg() 90 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg64() 112 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_tlb_sync() 137 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_reset() 182 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_global_fault() 230 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_context_fault() 323 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init() 334 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init() 337 if (nvidia_smmu->num_instances == 1) in nvidia_smmu_impl_init()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_sdma.c | 45 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring() 58 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring() 103 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init() 193 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_destroy_inst_ctx() 240 for (i = 1; i < adev->sdma.num_instances; i++) in amdgpu_sdma_init_microcode() 376 for (i = 0; i < adev->sdma.num_instances; ++i) { in amdgpu_debugfs_sdma_sched_mask_set() 415 for (i = 0; i < adev->sdma.num_instances; ++i) { in amdgpu_debugfs_sdma_sched_mask_get() 450 if (!(adev->sdma.num_instances > 1)) in amdgpu_debugfs_sdma_sched_mask_init() 481 if (adev->sdma.num_instances) { in amdgpu_sdma_sysfs_reset_mask_init() 496 if (adev->sdma.num_instances) in amdgpu_sdma_sysfs_reset_mask_fini() [all …]
|
| A D | sdma_v4_0.c | 602 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv() 627 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode() 927 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable() 961 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop() 1010 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable() 1056 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable() 1349 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_load_microcode() 1405 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_start() 2045 if (j == adev->sdma.num_instances) in sdma_v4_0_wait_for_idle() 2527 switch (adev->sdma.num_instances) { in sdma_v4_0_set_irq_funcs() [all …]
|
| A D | sdma_v3_0.c | 254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode() 305 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode() 334 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_init_microcode() 519 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop() 578 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable() 620 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable() 647 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume() 745 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume() 1092 adev->sdma.num_instances = 1; in sdma_v3_0_early_init() 1095 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v3_0_early_init() [all …]
|
| A D | sdma_v4_4_2.c | 168 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers() 197 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode() 1457 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_sw_init() 1534 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_sw_fini() 1614 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_is_idle() 1636 if (j == adev->sdma.num_instances) in sdma_v4_4_2_wait_for_idle() 1773 for (i = instance; i < adev->sdma.num_instances; in sdma_v4_4_2_process_trap_irq() 1780 if (i >= adev->sdma.num_instances) { in sdma_v4_4_2_process_trap_irq() 2061 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_print_ip_state() 2081 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_dump_ip_state() [all …]
|
| A D | cik_sdma.c | 76 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_free_microcode() 132 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode() 148 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_init_microcode() 313 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_stop() 370 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_ctx_switch_enable() 408 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_enable() 433 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_resume() 496 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_resume() 538 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_load_microcode() 927 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in cik_sdma_early_init() [all …]
|
| A D | si_dma.c | 134 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop() 149 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start() 480 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in si_dma_early_init() 508 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init() 530 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_sw_fini() 659 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_clockgating_state() 671 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_clockgating_state() 752 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_set_ring_funcs() 846 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_vm_pte_funcs() 850 adev->vm_manager.vm_pte_num_scheds = adev->sdma.num_instances; in si_dma_set_vm_pte_funcs()
|
| A D | sdma_v6_0.c | 399 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_stop() 435 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_ctxempty_int_enable() 465 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_enable() 635 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_resume() 767 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_soft_reset() 802 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_check_soft_reset() 1329 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_sw_init() 1427 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v6_0_sw_fini() 1502 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_is_idle() 1581 if (ring->me >= adev->sdma.num_instances) { in sdma_v6_0_reset_queue() [all …]
|
| A D | sdma_v7_0.c | 403 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_gfx_stop() 458 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_enable() 637 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_gfx_resume() 664 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v12_0_free_ucode_buffer() 701 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_load_microcode() 760 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_soft_reset() 795 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_check_soft_reset() 812 if (ring->me >= adev->sdma.num_instances) { in sdma_v7_0_reset_queue() 1315 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_sw_init() 1371 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v7_0_sw_fini() [all …]
|
| A D | sdma_v2_4.c | 114 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_free_microcode() 145 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode() 176 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_init_microcode() 343 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_stop() 383 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_enable() 408 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_resume() 471 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_resume() 817 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v2_4_early_init() 855 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_sw_init() 876 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_sw_fini() [all …]
|
| A D | sdma_v5_2.c | 477 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable() 518 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable() 700 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume() 740 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode() 795 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_soft_reset() 1312 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init() 1320 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init() 1382 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_2_sw_fini() 1428 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_is_idle() 1464 if (ring->me >= adev->sdma.num_instances) { in sdma_v5_2_reset_queue() [all …]
|
| A D | sdma_v5_0.c | 294 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode() 627 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable() 671 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable() 853 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume() 893 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_load_microcode() 1401 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_sw_init() 1458 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_0_sw_fini() 1509 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_is_idle() 1550 if (ring->me >= adev->sdma.num_instances) { in sdma_v5_0_reset_queue() 1775 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_update_medium_grain_clock_gating() [all …]
|
| A D | vpe_v6_1.c | 78 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_halt() 108 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_set_collaborate_mode() 133 for (j = 0; j < vpe->num_instances; j++) { in vpe_v6_1_load_microcode() 183 for (j = 0; j < vpe->num_instances; j++) { in vpe_v6_1_load_microcode() 215 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_ring_start() 282 for (i = 0; i < vpe->num_instances; i++) { in vpe_v_6_1_ring_stop()
|
| A D | aqua_vanjaram.c | 50 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init() 175 num_sdma = adev->sdma.num_instances; in __aqua_vanjaram_get_xcp_ip_info() 285 max_res[AMDGPU_XCP_RES_DMA] = adev->sdma.num_instances; in aqua_vanjaram_get_xcp_res_info() 542 adev->sdma.num_instances = NUM_SDMA(adev->sdma.sdma_mask); in aqua_vanjaram_init_soc_config() 675 pcie_reg_state->common_header.num_instances = 1; in aqua_vanjaram_read_pcie_state() 759 xgmi_reg_state->common_header.num_instances = max_xgmi_instances; in aqua_vanjaram_read_xgmi_state() 832 wafl_reg_state->common_header.num_instances = max_wafl_instances; in aqua_vanjaram_read_wafl_state() 951 usr_reg_state->common_header.num_instances = max_usr_instances; in aqua_vanjaram_read_usr_state()
|
| A D | sdma_v4_4.c | 243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count() 256 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_query_ras_error_count()
|
| A D | amdgpu_discovery.c | 1413 if (adev->sdma.num_instances < in amdgpu_discovery_reg_base_init() 1415 adev->sdma.num_instances++; in amdgpu_discovery_reg_base_init() 1420 adev->sdma.num_instances + 1, in amdgpu_discovery_reg_base_init() 1427 adev->vpe.num_instances++; in amdgpu_discovery_reg_base_init() 1430 adev->vpe.num_instances + 1, in amdgpu_discovery_reg_base_init() 2564 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks() 2591 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks() 2618 adev->sdma.num_instances = 1; in amdgpu_discovery_set_ip_blocks() 2664 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks() 2692 adev->sdma.num_instances = 8; in amdgpu_discovery_set_ip_blocks() [all …]
|
| A D | amdgpu_vpe.h | 80 uint32_t num_instances; member
|
| A D | amdgpu_sdma.h | 124 int num_instances; member
|
| A D | amdgpu_vpe.c | 735 for (i = 0; i < vpe->num_instances; i++) { in vpe_ring_set_wptr() 897 if (adev->vpe.num_instances) { in amdgpu_vpe_sysfs_reset_mask_init() 909 if (adev->vpe.num_instances) in amdgpu_vpe_sysfs_reset_mask_fini()
|
| /drivers/gpu/drm/amd/include/ |
| A D | amdgpu_reg_state.h | 51 uint8_t num_instances; member
|
| /drivers/hwmon/ |
| A D | ibmaem.c | 191 u8 num_instances; member 203 u8 num_instances; member 510 return ff_resp.num_instances; in aem_find_aem1_count() 646 fi_resp->num_instances <= instance_num) in aem_find_aem2()
|
| /drivers/media/platform/samsung/exynos4-is/ |
| A D | fimc-lite.h | 69 unsigned short num_instances; member
|
| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_device.c | 133 kfd->adev->sdma.num_instances * in kfd_device_info_set_sdma_info() 1469 return node->adev->sdma.num_instances/(int)node->kfd->num_nodes; in kfd_get_num_sdma_engines() 1471 return min(node->adev->sdma.num_instances/(int)node->kfd->num_nodes, 2); in kfd_get_num_sdma_engines() 1477 return node->adev->sdma.num_instances/(int)node->kfd->num_nodes - in kfd_get_num_xgmi_sdma_engines()
|