| /drivers/media/tuners/ |
| A D | tuner-types.c | 73 .ranges = tuner_temic_pal_ranges, 156 .ranges = tuner_temic_ntsc_ranges, 204 .ranges = tuner_alps_tsb_1_ranges, 267 .ranges = tuner_lg_pal_ranges, 394 .ranges = tuner_lg_pal_ranges, 404 .ranges = tuner_lg_pal_ranges, 418 .ranges = tuner_lg_pal_ranges, 428 .ranges = tuner_lg_pal_ranges, 454 .ranges = tuner_lg_pal_ranges, 464 .ranges = tuner_lg_pal_ranges, [all …]
|
| /drivers/of/unittest-data/ |
| A D | tests-address.dtsi | 10 ranges; 15 /* ranges here is to make sure we don't use it for 16 * dma-ranges translation */ 17 ranges = <0x70000000 0x70000000 0x50000000>, 19 dma-ranges = <0x0 0x20000000 0x40000000>; 28 ranges = <0x0 0x0 0x80000000 0x0 0x100000>; 29 dma-ranges = <0x1 0x0 0x0 0x20 0x0>; 49 ranges = <0xf00baa 0x0 0x0 0xa0000000 0x0 0x100000>, 94 ranges = <0xf1000000 0x1 0x100000 0x10000 0x10000>, 125 ranges = <0x82000000 0 0xe8000000 [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_gt_mcr.c | 472 gt->steering[DSS].ranges = xe2lpg_dss_steering_table; in xe_gt_mcr_init_early() 478 gt->steering[DSS].ranges = xelpg_dss_steering_table; in xe_gt_mcr_init_early() 481 gt->steering[DSS].ranges = xehpc_dss_steering_table; in xe_gt_mcr_init_early() 485 gt->steering[LNCF].ranges = xehp_lncf_steering_table; in xe_gt_mcr_init_early() 486 gt->steering[DSS].ranges = xehp_dss_steering_table; in xe_gt_mcr_init_early() 490 gt->steering[DSS].ranges = xelp_dss_steering_table; in xe_gt_mcr_init_early() 571 if (!gt->steering[type].ranges) in xe_gt_mcr_get_nonterminated_steering() 811 if (gt->steering[i].ranges) { in xe_gt_mcr_steering_dump() 816 for (int j = 0; gt->steering[i].ranges[j].end; j++) in xe_gt_mcr_steering_dump() 818 gt->steering[i].ranges[j].start, in xe_gt_mcr_steering_dump() [all …]
|
| /drivers/gpu/drm/i915/selftests/ |
| A D | intel_uncore.c | 29 static int intel_fw_table_check(const struct intel_forcewake_range *ranges, in intel_fw_table_check() argument 36 for (i = 0, prev = -1; i < num_ranges; i++, ranges++) { in intel_fw_table_check() 38 if (is_watertight && (prev + 1) != (s32)ranges->start) { in intel_fw_table_check() 40 __func__, i, ranges->start, ranges->end, prev); in intel_fw_table_check() 45 if (prev >= (s32)ranges->start) { in intel_fw_table_check() 47 __func__, i, ranges->start, ranges->end, prev); in intel_fw_table_check() 52 if (ranges->start >= ranges->end) { in intel_fw_table_check() 54 __func__, i, ranges->start, ranges->end); in intel_fw_table_check() 58 prev = ranges->end; in intel_fw_table_check() 112 const struct intel_forcewake_range *ranges; in intel_uncore_mock_selftests() member [all …]
|
| /drivers/tty/vt/ |
| A D | ucs.c | 47 static bool cp_in_range16(u16 cp, const struct ucs_interval16 *ranges, size_t size) in cp_in_range16() argument 49 if (cp < ranges[0].first || cp > ranges[size - 1].last) in cp_in_range16() 52 return __inline_bsearch(&cp, ranges, size, sizeof(*ranges), in cp_in_range16() 56 static bool cp_in_range32(u32 cp, const struct ucs_interval32 *ranges, size_t size) in cp_in_range32() argument 58 if (cp < ranges[0].first || cp > ranges[size - 1].last) in cp_in_range32() 61 return __inline_bsearch(&cp, ranges, size, sizeof(*ranges), in cp_in_range32()
|
| A D | gen_ucs_width_table.py | 169 ranges = [] 175 ranges.append((start, prev)) 180 ranges.append((start, prev)) 181 return ranges 200 def split_ranges_by_size(ranges): argument 204 for start, end in ranges:
|
| /drivers/mmc/host/ |
| A D | dw_mmc-rockchip.c | 294 struct range_t *ranges; in dw_mci_rk3288_execute_tuning() local 307 sizeof(*ranges), GFP_KERNEL); in dw_mci_rk3288_execute_tuning() 308 if (!ranges) in dw_mci_rk3288_execute_tuning() 325 ranges[range_count-1].start = i; in dw_mci_rk3288_execute_tuning() 328 ranges[range_count-1].end = i; in dw_mci_rk3288_execute_tuning() 357 ranges[0].start = ranges[range_count-1].start; in dw_mci_rk3288_execute_tuning() 361 if (ranges[0].start == 0 && ranges[0].end == priv->num_phases - 1) { in dw_mci_rk3288_execute_tuning() 371 int len = (ranges[i].end - ranges[i].start + 1); in dw_mci_rk3288_execute_tuning() 382 TUNING_ITERATION_TO_PHASE(ranges[i].start, in dw_mci_rk3288_execute_tuning() 384 TUNING_ITERATION_TO_PHASE(ranges[i].end, in dw_mci_rk3288_execute_tuning() [all …]
|
| /drivers/net/wireless/intel/iwlegacy/ |
| A D | 4965-calib.c | 271 min((u32) ranges->auto_corr_max_cck, val); in il4965_sens_energy_cck() 275 min((u32) ranges->auto_corr_max_cck_mrc, val); in il4965_sens_energy_cck() 285 max((u32) ranges->auto_corr_min_cck_mrc, val); in il4965_sens_energy_cck() 311 min((u32) ranges->auto_corr_max_ofdm, val); in il4965_sens_auto_corr_ofdm() 319 min((u32) ranges->auto_corr_max_ofdm_x1, val); in il4965_sens_auto_corr_ofdm() 334 max((u32) ranges->auto_corr_min_ofdm, val); in il4965_sens_auto_corr_ofdm() 444 if (ranges == NULL) in il4965_init_sensitivity() 462 data->auto_corr_ofdm = ranges->auto_corr_min_ofdm; in il4965_init_sensitivity() 468 data->nrg_th_cck = ranges->nrg_th_cck; in il4965_init_sensitivity() 469 data->nrg_th_ofdm = ranges->nrg_th_ofdm; in il4965_init_sensitivity() [all …]
|
| /drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/ |
| A D | rn_clk_mgr.c | 472 if (ranges->reader_wm_sets[num_valid_sets].wm_type == WM_TYPE_PSTATE_CHG) { in build_watermark_ranges() 474 ranges->reader_wm_sets[num_valid_sets].min_drain_clk_mhz = 0; in build_watermark_ranges() 494 ranges->num_reader_wm_sets = num_valid_sets; in build_watermark_ranges() 498 ranges->reader_wm_sets[0].min_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN; in build_watermark_ranges() 499 …ranges->reader_wm_sets[ranges->num_reader_wm_sets - 1].max_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK… in build_watermark_ranges() 500 …ranges->reader_wm_sets[ranges->num_reader_wm_sets - 1].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_… in build_watermark_ranges() 503 ranges->num_writer_wm_sets = 1; in build_watermark_ranges() 504 ranges->writer_wm_sets[0].wm_inst = WM_A; in build_watermark_ranges() 505 ranges->writer_wm_sets[0].min_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN; in build_watermark_ranges() 519 build_watermark_ranges(clk_mgr_base->bw_params, &clk_mgr_base->ranges); in rn_notify_wm_ranges() [all …]
|
| /drivers/net/wireless/intel/iwlwifi/dvm/ |
| A D | calib.c | 288 min((u32)ranges->auto_corr_max_cck, val); in iwl_sens_energy_cck() 292 min((u32)ranges->auto_corr_max_cck_mrc, val); in iwl_sens_energy_cck() 300 max((u32)ranges->auto_corr_min_cck, val); in iwl_sens_energy_cck() 303 max((u32)ranges->auto_corr_min_cck_mrc, val); in iwl_sens_energy_cck() 331 min((u32)ranges->auto_corr_max_ofdm, val); in iwl_sens_auto_corr_ofdm() 339 min((u32)ranges->auto_corr_max_ofdm_x1, val); in iwl_sens_auto_corr_ofdm() 354 max((u32)ranges->auto_corr_min_ofdm, val); in iwl_sens_auto_corr_ofdm() 553 if (ranges == NULL) in iwl_init_sensitivity() 577 data->nrg_th_cck = ranges->nrg_th_cck; in iwl_init_sensitivity() 578 data->nrg_th_ofdm = ranges->nrg_th_ofdm; in iwl_init_sensitivity() [all …]
|
| /drivers/gpu/drm/i915/display/ |
| A D | intel_dmc_wl.c | 229 const struct intel_dmc_wl_range ranges[]) in intel_dmc_wl_reg_in_range() argument 233 for (int i = 0; ranges[i].start; i++) { in intel_dmc_wl_reg_in_range() 234 u32 end = ranges[i].end ?: ranges[i].start; in intel_dmc_wl_reg_in_range() 236 if (ranges[i].start <= offset && offset <= end) in intel_dmc_wl_reg_in_range() 247 const struct intel_dmc_wl_range *ranges; in intel_dmc_wl_check_range() local 265 ranges = xe3lpd_dc3co_dmc_ranges; in intel_dmc_wl_check_range() 269 ranges = xe3lpd_dc5_dc6_dmc_ranges; in intel_dmc_wl_check_range() 272 ranges = NULL; in intel_dmc_wl_check_range() 275 if (ranges && intel_dmc_wl_reg_in_range(reg, ranges)) in intel_dmc_wl_check_range()
|
| /drivers/gpu/drm/amd/display/amdgpu_dm/ |
| A D | amdgpu_dm_pp_smu.c | 465 struct pp_smu_wm_range_sets *ranges) in pp_rv_set_wm_ranges() argument 478 if (ranges->reader_wm_sets[i].wm_inst > 3) in pp_rv_set_wm_ranges() 482 ranges->reader_wm_sets[i].wm_inst; in pp_rv_set_wm_ranges() 488 ranges->reader_wm_sets[i].max_fill_clk_mhz * 1000; in pp_rv_set_wm_ranges() 490 ranges->reader_wm_sets[i].min_fill_clk_mhz * 1000; in pp_rv_set_wm_ranges() 494 if (ranges->writer_wm_sets[i].wm_inst > 3) in pp_rv_set_wm_ranges() 498 ranges->writer_wm_sets[i].wm_inst; in pp_rv_set_wm_ranges() 500 ranges->writer_wm_sets[i].max_fill_clk_mhz * 1000; in pp_rv_set_wm_ranges() 502 ranges->writer_wm_sets[i].min_fill_clk_mhz * 1000; in pp_rv_set_wm_ranges() 554 struct pp_smu_wm_range_sets *ranges) in pp_nv_set_wm_ranges() argument [all …]
|
| /drivers/power/supply/ |
| A D | bd99954-charger.c | 806 int ranges; member 813 int ranges; member 830 .ranges = 2, in bd9995x_fw_probe() 835 .ranges = 2, in bd9995x_fw_probe() 840 .ranges = 2, in bd9995x_fw_probe() 845 .ranges = 2, in bd9995x_fw_probe() 850 .ranges = 2, in bd9995x_fw_probe() 855 .ranges = 2, in bd9995x_fw_probe() 860 .ranges = 1, in bd9995x_fw_probe() 865 .ranges = 2, in bd9995x_fw_probe() [all …]
|
| /drivers/pci/ |
| A D | of_property.c | 419 u32 *ranges; in of_pci_host_bridge_prop_ranges() local 438 ranges = kcalloc(n_range, in of_pci_host_bridge_prop_ranges() 440 n_addr_cells) * sizeof(*ranges), in of_pci_host_bridge_prop_ranges() 442 if (!ranges) in of_pci_host_bridge_prop_ranges() 452 of_pci_set_address(NULL, &ranges[ranges_sz], in of_pci_host_bridge_prop_ranges() 458 ranges[ranges_sz++] = upper_32_bits(val64); in of_pci_host_bridge_prop_ranges() 459 ranges[ranges_sz++] = lower_32_bits(val64); in of_pci_host_bridge_prop_ranges() 463 ranges[ranges_sz] = upper_32_bits(val64); in of_pci_host_bridge_prop_ranges() 464 ranges[ranges_sz + 1] = lower_32_bits(val64); in of_pci_host_bridge_prop_ranges() 468 ret = of_changeset_add_prop_u32_array(ocs, np, "ranges", ranges, in of_pci_host_bridge_prop_ranges() [all …]
|
| /drivers/edac/ |
| A D | amd64_edac.h | 135 #define dram_rw(pvt, i) ((u8)(pvt->ranges[i].base.lo & 0x3)) 136 #define dram_intlv_sel(pvt, i) ((u8)((pvt->ranges[i].lim.lo >> 8) & 0x7)) 137 #define dram_dst_node(pvt, i) ((u8)(pvt->ranges[i].lim.lo & 0x7)) 352 struct dram_range ranges[DRAM_RANGES]; member 412 u64 addr = ((u64)pvt->ranges[i].base.lo & 0xffff0000) << 8; in get_dram_base() 417 return (((u64)pvt->ranges[i].base.hi & 0x000000ff) << 40) | addr; in get_dram_base() 422 u64 lim = (((u64)pvt->ranges[i].lim.lo & 0xffff0000) << 8) | 0x00ffffff; in get_dram_limit() 427 return (((u64)pvt->ranges[i].lim.hi & 0x000000ff) << 40) | lim; in get_dram_limit() 504 return (u8) (pvt->ranges[i].base.lo >> 8) & 0x7; in dram_intlv_en()
|
| /drivers/accel/ivpu/ |
| A D | ivpu_hw.c | 153 ivpu_hw_range_init(&vdev->hw->ranges.global, 0x80000000, SZ_512M); in memory_ranges_init() 154 ivpu_hw_range_init(&vdev->hw->ranges.user, 0x88000000, 511 * SZ_1M); in memory_ranges_init() 155 ivpu_hw_range_init(&vdev->hw->ranges.shave, 0x180000000, SZ_2G); in memory_ranges_init() 156 ivpu_hw_range_init(&vdev->hw->ranges.dma, 0x200000000, SZ_128G); in memory_ranges_init() 158 ivpu_hw_range_init(&vdev->hw->ranges.global, 0x80000000, SZ_512M); in memory_ranges_init() 159 ivpu_hw_range_init(&vdev->hw->ranges.shave, 0x80000000, SZ_2G); in memory_ranges_init() 160 ivpu_hw_range_init(&vdev->hw->ranges.user, 0x100000000, SZ_256G); in memory_ranges_init() 161 vdev->hw->ranges.dma = vdev->hw->ranges.user; in memory_ranges_init()
|
| A D | ivpu_fw.c | 305 ivpu_hw_range_init(&vdev->hw->ranges.global, start, size); in ivpu_fw_update_global_range() 358 fw->mem_shave_nn = ivpu_bo_create(vdev, &vdev->gctx, &vdev->hw->ranges.shave, in ivpu_fw_mem_init() 591 boot_params->shared_region_base = vdev->hw->ranges.global.start; in ivpu_fw_boot_params_setup() 592 boot_params->shared_region_size = vdev->hw->ranges.global.end - in ivpu_fw_boot_params_setup() 593 vdev->hw->ranges.global.start; in ivpu_fw_boot_params_setup() 602 boot_params->global_aliased_pio_base = vdev->hw->ranges.user.start; in ivpu_fw_boot_params_setup() 603 boot_params->global_aliased_pio_size = ivpu_hw_range_size(&vdev->hw->ranges.user); in ivpu_fw_boot_params_setup() 612 ADDR_TO_L2_CACHE_CFG(vdev->hw->ranges.shave.start); in ivpu_fw_boot_params_setup()
|
| /drivers/dpll/zl3073x/ |
| A D | prop.c | 194 struct dpll_pin_frequency *ranges; in zl3073x_pin_props_get() local 274 ranges = kcalloc(num_freqs, sizeof(*ranges), GFP_KERNEL); in zl3073x_pin_props_get() 275 if (!ranges) { in zl3073x_pin_props_get() 287 ranges[j] = freq; in zl3073x_pin_props_get() 293 props->dpll_props.freq_supported = ranges; in zl3073x_pin_props_get()
|
| /drivers/of/ |
| A D | fdt_address.c | 101 const __be32 *ranges; in fdt_translate_one() local 106 ranges = fdt_getprop(blob, parent, rprop, &rlen); in fdt_translate_one() 107 if (!ranges) in fdt_translate_one() 121 for (; rlen >= rone; rlen -= rone, ranges += rone) { in fdt_translate_one() 122 offset = bus->map(addr, ranges, na, ns, pna); in fdt_translate_one() 130 memcpy(addr, ranges + na, 4 * pna); in fdt_translate_one()
|
| A D | address.c | 424 const __be32 *ranges; in of_translate_one() local 448 ranges = of_get_property(parent, rprop, &rlen); in of_translate_one() 454 if (ranges == NULL || rlen == 0) { in of_translate_one() 476 memcpy(addr, ranges + na, 4 * pna); in of_translate_one() 896 const __be32 *ranges = NULL; in of_dma_get_range() local 907 if (ranges && len > 0) in of_dma_get_range() 911 if (found_dma_ranges && !ranges) in of_dma_get_range() 919 if (!node || !ranges) { in of_dma_get_range() 975 const __be32 *ranges; in of_dma_get_max_cpu_address() local 982 ranges = of_get_property(np, "dma-ranges", &len); in of_dma_get_max_cpu_address() [all …]
|
| /drivers/dax/ |
| A D | bus.c | 195 size += range_len(&dev_dax->ranges[i].range); in dev_dax_size() 444 struct range *range = &dev_dax->ranges[i].range; in trim_dev_dax_range() 454 kfree(dev_dax->ranges); in trim_dev_dax_range() 455 dev_dax->ranges = NULL; in trim_dev_dax_range() 720 return &dev_dax->ranges[mapping->range_id]; in get_dax_range() 819 dev_dax->ranges[range_id].mapping = mapping; in devm_register_dax_mapping() 845 struct dev_dax_range *ranges; in alloc_dev_dax_range() local 865 ranges = krealloc(dev_dax->ranges, sizeof(*ranges) in alloc_dev_dax_range() 867 if (!ranges) { in alloc_dev_dax_range() 874 dev_dax->ranges = ranges; in alloc_dev_dax_range() [all …]
|
| /drivers/gpu/drm/amd/display/dc/resource/dcn301/ |
| A D | dcn301_resource.c | 1326 struct pp_smu_wm_range_sets ranges = {0}; in set_wm_ranges() local 1329 ranges.num_reader_wm_sets = 0; in set_wm_ranges() 1332 ranges.reader_wm_sets[0].wm_inst = 0; in set_wm_ranges() 1338 ranges.num_reader_wm_sets = 1; in set_wm_ranges() 1341 ranges.reader_wm_sets[i].wm_inst = i; in set_wm_ranges() 1345 dcn301_fpu_set_wm_ranges(i, &ranges, loaded_bb); in set_wm_ranges() 1347 ranges.num_reader_wm_sets = i + 1; in set_wm_ranges() 1351 …ranges.reader_wm_sets[ranges.num_reader_wm_sets - 1].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UN… in set_wm_ranges() 1354 ranges.num_writer_wm_sets = 1; in set_wm_ranges() 1356 ranges.writer_wm_sets[0].wm_inst = 0; in set_wm_ranges() [all …]
|
| /drivers/gpu/drm/amd/display/dc/dml/calcs/ |
| A D | dcn_calcs.c | 1374 struct pp_smu_wm_range_sets ranges = {0}; in dcn_bw_notify_pplib_of_wm_ranges() local 1390 ranges.num_reader_wm_sets = WM_SET_COUNT; in dcn_bw_notify_pplib_of_wm_ranges() 1392 ranges.reader_wm_sets[0].wm_inst = WM_A; in dcn_bw_notify_pplib_of_wm_ranges() 1397 ranges.writer_wm_sets[0].wm_inst = WM_A; in dcn_bw_notify_pplib_of_wm_ranges() 1416 ranges.reader_wm_sets[1] = ranges.writer_wm_sets[0]; in dcn_bw_notify_pplib_of_wm_ranges() 1417 ranges.reader_wm_sets[1].wm_inst = WM_B; in dcn_bw_notify_pplib_of_wm_ranges() 1419 ranges.reader_wm_sets[2] = ranges.writer_wm_sets[0]; in dcn_bw_notify_pplib_of_wm_ranges() 1420 ranges.reader_wm_sets[2].wm_inst = WM_C; in dcn_bw_notify_pplib_of_wm_ranges() 1422 ranges.reader_wm_sets[3] = ranges.writer_wm_sets[0]; in dcn_bw_notify_pplib_of_wm_ranges() 1423 ranges.reader_wm_sets[3].wm_inst = WM_D; in dcn_bw_notify_pplib_of_wm_ranges() [all …]
|
| /drivers/gpu/drm/amd/display/dc/ |
| A D | dm_pp_smu.h | 114 struct pp_smu_wm_range_sets *ranges); 219 struct pp_smu_wm_range_sets *ranges); 283 struct pp_smu_wm_range_sets *ranges); 302 struct pp_smu_wm_range_sets *ranges);
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_gmc.c | 1333 if (ranges[i].base_address >= ranges[i].limit_address) { in amdgpu_gmc_get_nps_memranges() 1338 ranges[i].limit_address); in amdgpu_gmc_get_nps_memranges() 1346 ranges[i].base_address) <= in amdgpu_gmc_get_nps_memranges() 1352 ranges[j].base_address, in amdgpu_gmc_get_nps_memranges() 1353 ranges[j].limit_address, in amdgpu_gmc_get_nps_memranges() 1354 ranges[i].base_address, in amdgpu_gmc_get_nps_memranges() 1355 ranges[i].limit_address); in amdgpu_gmc_get_nps_memranges() 1362 (ranges[i].base_address - in amdgpu_gmc_get_nps_memranges() 1366 (ranges[i].limit_address - in amdgpu_gmc_get_nps_memranges() 1370 ranges[i].limit_address - ranges[i].base_address + 1; in amdgpu_gmc_get_nps_memranges() [all …]
|