Searched refs:NUM_XCC (Results 1 – 18 of 18) sorted by relevance
89 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_calc_xcp_mode()144 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_xcc_per_xcp()180 num_xcp = NUM_XCC(adev->gfx.xcc_mask) / num_xcc_xcp; in __aqua_vanjaram_get_xcp_ip_info()258 *num_xcp = NUM_XCC(adev->gfx.xcc_mask); in __aqua_vanjaram_get_px_mode_info()284 max_res[AMDGPU_XCP_RES_XCC] = NUM_XCC(adev->gfx.xcc_mask); in aqua_vanjaram_get_xcp_res_info()319 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_auto_mode()353 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in __aqua_vanjaram_is_valid_mode()393 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in aqua_vanjaram_switch_partition_mode()
70 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_setup_vm_pt_regs()441 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_gart_enable()482 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_gart_disable()540 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_set_fault_enable_default()589 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_init()
342 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_set_kiq_pm4_funcs()351 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_golden_registers()627 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_mec_init()805 NUM_XCC(adev->gfx.xcc_mask) / in gfx_v9_4_3_switch_compute_partition()1014 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_alloc_ip_dump()1066 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_init()1189 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_fini()1345 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_constants_init()1438 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_rlcg_reg_access_ctrl()1531 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_rlc_stop()[all …]
210 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire()1044 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_ras_error_func()1376 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_set_compute_partition()1508 int num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_run_cleaner_shader()
74 #define NUM_XCC(x) hweight16(x) macro
618 switch (NUM_XCC(adev->gfx.xcc_mask)) { in amdgpu_xcp_update_supported_modes()
1552 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gmc_init_acpi_mem_ranges()
1945 NUM_XCC(adev->gfx.xcc_mask)); in gmc_v9_0_sw_init()
361 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_ras_instance_mask_check()
141 NUM_XCC(node->xcc_mask), in allocate_mqd()376 *ctl_stack_size = m->cp_hqd_cntl_stack_size * NUM_XCC(mm->dev->xcc_mask); in get_checkpoint_info()400 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in checkpoint_mqd_v9_4_3()571 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_hiq_v9_4_3()688 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_v9_4_3()717 NUM_XCC(mm->dev->xcc_mask); in init_mqd_v9_4_3()750 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in update_mqd_v9_4_3()795 num_xcc = NUM_XCC(mm->dev->xcc_mask); in restore_mqd_v9_4_3()883 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in get_wave_state_v9_4_3()
309 * NUM_XCC(pdd->dev->xcc_mask); in kfd_queue_acquire_buffers()356 * NUM_XCC(pdd->dev->xcc_mask); in kfd_queue_release_buffers()441 cu_num = props->simd_count / props->simd_per_cu / NUM_XCC(dev->gpu->xcc_mask); in kfd_queue_ctx_save_restore_size()
80 NUM_XCC(dev->xcc_mask); in allocate_sdma_mqd()109 int inc = cu_inc * NUM_XCC(mm->dev->xcc_mask); in mqd_symmetrically_map_cu_mask()
463 NUM_XCC(dev->gpu->xcc_mask)) : 0); in node_show()535 NUM_XCC(dev->gpu->xcc_mask)); in node_show()1105 buf[7] = (ffs(gpu->xcc_mask) - 1) | (NUM_XCC(gpu->xcc_mask) << 16); in kfd_generate_gpu_id()1683 int num_xcc = NUM_XCC(knode->xcc_mask); in fill_in_l2_l3_pcache()1815 end = start + NUM_XCC(kdev->xcc_mask); in kfd_fill_cache_non_crat_info()
1054 set_queue_properties_from_criu(&qp, q_data, NUM_XCC(pdd->dev->adev->gfx.xcc_mask)); in kfd_criu_restore_queue()1130 num_xccs = NUM_XCC(q->device->xcc_mask); in pqm_debugfs_mqds()
1098 device_info.num_xcc = NUM_XCC(pdd->dev->xcc_mask); in kfd_dbg_trap_device_snapshot()
1861 NUM_XCC(dqm->dev->xcc_mask); in start_cpsch()2728 *mqd_size = mqd_mgr->mqd_size * NUM_XCC(mqd_mgr->dev->xcc_mask); in get_queue_checkpoint_info()2896 NUM_XCC(dqm->dev->xcc_mask)); in allocate_hiq_sdma_mqd()
867 (1U << NUM_XCC(kfd->adev->gfx.xcc_mask)) - 1; in kgd2kfd_device_init()
317 wave_cnt += (NUM_XCC(dev->xcc_mask) * in kfd_get_cu_occupancy()
Completed in 68 milliseconds