Home
last modified time | relevance | path

Searched refs:range (Results 1 – 25 of 606) sorted by relevance

12345678910>>...25

/drivers/soc/ti/
A Dknav_qmss_acc.c90 range = _instdata; in knav_acc_int_handler()
92 kdev = range->kdev; in knav_acc_int_handler()
94 acc = range->acc; in knav_acc_int_handler()
205 acc = range->acc; in knav_range_setup_acc_irq()
233 range); in knav_range_setup_acc_irq()
311 acc = range->acc; in knav_acc_setup_cmd()
449 devm_kfree(range->kdev->dev, range->acc); in knav_acc_free_range()
530 range->name); in knav_init_acc_range()
536 range->name); in knav_init_acc_range()
547 range->acc = devm_kcalloc(kdev->dev, channels, sizeof(*range->acc), in knav_init_acc_range()
[all …]
A Dknav_qmss_queue.c140 struct knav_range_info *range = inst->range; in knav_queue_free_irq() local
242 struct knav_range_info *range = inst->range; in __knav_queue_open() local
245 if (range->ops && range->ops->open_queue) in __knav_queue_open()
317 struct knav_range_info *range = inst->range; in knav_queue_set_notify() local
319 if (range->ops && range->ops->set_notify) in knav_queue_set_notify()
566 range->ops->close_queue(range, inst); in knav_queue_close()
1262 range->num_irqs = min(range->num_irqs, range->num_queues); in knav_setup_queue_range()
1295 range->name, range->queue_base, in knav_setup_queue_range()
1334 range->ops->free_range(range); in knav_free_queue_range()
1723 inst->range = range; in knav_queue_init_queue()
[all …]
/drivers/gpu/drm/
A Ddrm_gpusvm.c694 range = kzalloc(sizeof(*range), GFP_KERNEL); in drm_gpusvm_range_alloc()
696 if (!range) in drm_gpusvm_range_alloc()
708 return range; in drm_gpusvm_range_alloc()
828 if (range) { in drm_gpusvm_range_chunk_size()
977 if (range) in drm_gpusvm_range_find_or_insert()
998 if (IS_ERR(range)) { in drm_gpusvm_range_find_or_insert()
1011 return range; in drm_gpusvm_range_find_or_insert()
1144 return range; in drm_gpusvm_range_get()
1165 kfree(range); in drm_gpusvm_range_destroy()
1200 return range->flags.has_devmem_pages || range->flags.has_dma_mapping; in drm_gpusvm_range_pages_valid()
[all …]
A Ddrm_gpuvm.c903 u64 end = addr + range; in drm_gpuvm_in_mm_range()
913 u64 end = addr + range; in drm_gpuvm_in_kernel_node()
1027 gpuvm->mm_range = range; in drm_gpuvm_init()
1217 u64 end = addr + range; in drm_gpuvm_prepare_range()
1763 u64 range = va->va.range; in drm_gpuva_insert() local
1920 va->va.range != range) in drm_gpuva_find()
2057 u64 addr, u64 range, in op_map_cb() argument
2064 op.map.va.range = range; in op_map_cb()
2119 u64 range = va->va.range; in __drm_gpuvm_sm_map() local
2144 .va.range = range - req_range, in __drm_gpuvm_sm_map()
[all …]
/drivers/dax/
A Dkmem.c34 struct range *range = &dax_range->range; in dax_kmem_range() local
41 r->end = range->end; in dax_kmem_range()
98 struct range range; in dev_dax_kmem_probe() local
104 i, range.start, range.end); in dev_dax_kmem_probe()
139 struct range range; in dev_dax_kmem_probe() local
146 res = request_mem_region(range.start, range_len(&range), data->res_name); in dev_dax_kmem_probe()
149 i, range.start, range.end); in dev_dax_kmem_probe()
182 i, range.start, range.end); in dev_dax_kmem_probe()
223 struct range range; in dev_dax_kmem_remove() local
230 rc = remove_memory(range.start, range_len(&range)); in dev_dax_kmem_remove()
[all …]
/drivers/gpu/drm/xe/
A Dxe_svm.h50 static inline bool xe_svm_range_pages_valid(struct xe_svm_range *range) in xe_svm_range_pages_valid() argument
52 return drm_gpusvm_range_pages_valid(range->base.gpusvm, &range->base); in xe_svm_range_pages_valid()
73 int xe_svm_alloc_vram(struct xe_tile *tile, struct xe_svm_range *range,
88 struct xe_svm_range *range,
101 lockdep_assert_held(&range->base.gpusvm->notifier_lock); in xe_svm_range_has_dma_mapping()
102 return range->base.flags.has_dma_mapping; in xe_svm_range_has_dma_mapping()
127 return drm_gpusvm_range_start(&range->base); in xe_svm_range_start()
138 return drm_gpusvm_range_end(&range->base); in xe_svm_range_end()
149 return drm_gpusvm_range_size(&range->base); in xe_svm_range_size()
240 xe_svm_alloc_vram(struct xe_tile *tile, struct xe_svm_range *range, in xe_svm_alloc_vram() argument
[all …]
A Dxe_svm.c39 return xe_svm_range_in_vram(range) && range->tile_present; in xe_svm_range_has_vram_binding()
77 struct xe_svm_range *range; in xe_svm_range_alloc() local
79 range = kzalloc(sizeof(*range), GFP_KERNEL); in xe_svm_range_alloc()
80 if (!range) in xe_svm_range_alloc()
86 return &range->base; in xe_svm_range_alloc()
92 kfree(range); in xe_svm_range_free()
131 if (range->base.flags.unmapped || !range->tile_present) in xe_svm_range_notifier_event_begin()
268 typeof(*range), in xe_svm_garbage_collector()
270 if (!range) in xe_svm_garbage_collector()
842 if (IS_ERR(range)) in xe_svm_handle_pagefault()
[all …]
A Dxe_pt.c710 .va_curs_start = range ? range->base.itree.start : in xe_pt_stage_bind()
719 if (range) { in xe_pt_stage_bind()
729 range->base.itree.last + 1 - range->base.itree.start, in xe_pt_stage_bind()
754 if (!range) in xe_pt_stage_bind()
773 range ? range->base.itree.start : xe_vma_start(vma), in xe_pt_stage_bind()
1484 xa_for_each(&op->prefetch_range.range, i, range) { in xe_pt_svm_pre_commit()
1496 range = op->map_range.range; in xe_pt_svm_pre_commit()
1926 range->base.itree.start, range->base.itree.last); in bind_range_prepare()
2037 range->base.itree.start, range->base.itree.last); in unbind_range_prepare()
2122 xa_for_each(&op->prefetch_range.range, i, range) { in op_prepare()
[all …]
/drivers/pci/hotplug/
A Dibmphp_res.c521 range = range->next; in fix_me()
938 range = range->next; in find_range()
1203 len_tmp = range->end - range->start; in ibmphp_check_resource()
1238 range = range->next; in ibmphp_check_resource()
1269 len_tmp = range->end - range->start; in ibmphp_check_resource()
1304 range = range->next; in ibmphp_check_resource()
1777 debug_pci("[%x - %x]\n", range->start, range->end); in ibmphp_print_test()
1778 range = range->next; in ibmphp_print_test()
1787 debug_pci("[%x - %x]\n", range->start, range->end); in ibmphp_print_test()
1788 range = range->next; in ibmphp_print_test()
[all …]
/drivers/of/
A Daddress.c95 if (*addr != *range) in of_bus_default_flags_map()
231 range->size); in of_pci_range_to_resource()
801 if (!range) in of_pci_range_parser_one()
804 if (!parser->range || parser->range + np > parser->end) in of_pci_range_parser_one()
807 range->flags = parser->bus->get_flags(parser->range); in of_pci_range_parser_one()
840 if (bus_addr != range->bus_addr + range->size || in of_pci_range_parser_one()
841 cpu_addr != range->cpu_addr + range->size) in of_pci_range_parser_one()
848 return range; in of_pci_range_parser_one()
948 range.bus_addr, range.cpu_addr, range.size); in of_dma_get_range()
986 if (range.cpu_addr + range.size > cpu_end) in of_dma_get_max_cpu_address()
[all …]
/drivers/gpu/drm/sprd/
A Dmegacores_pll.c236 range[L] = 50 * scale; in dphy_timing_config()
237 range[H] = INFINITY; in dphy_timing_config()
243 range[L] = 38 * scale; in dphy_timing_config()
245 tmp = AVERAGE(range[L], range[H]); in dphy_timing_config()
246 val[CLK] = DIV_ROUND_UP(AVERAGE(range[L], range[H]), t_half_byteck) - 1; in dphy_timing_config()
249 tmp |= AVERAGE(range[L], range[H]) << 16; in dphy_timing_config()
250 val[DATA] = DIV_ROUND_UP(AVERAGE(range[L], range[H]), t_half_byteck) - 1; in dphy_timing_config()
255 range[H] = INFINITY; in dphy_timing_config()
266 range[H] = INFINITY; in dphy_timing_config()
274 range[H] = INFINITY; in dphy_timing_config()
[all …]
/drivers/net/ethernet/mellanox/mlxsw/
A Dspectrum_port_range.c13 struct mlxsw_sp_port_range range; member
36 mlxsw_reg_pprr_src_set(pprr_pl, prr->range.source); in mlxsw_sp_port_range_reg_configure()
37 mlxsw_reg_pprr_dst_set(pprr_pl, !prr->range.source); in mlxsw_sp_port_range_reg_configure()
48 const struct mlxsw_sp_port_range *range, in mlxsw_sp_port_range_reg_create() argument
59 prr->range = *range; in mlxsw_sp_port_range_reg_create()
99 const struct mlxsw_sp_port_range *range) in mlxsw_sp_port_range_reg_find() argument
106 if (prr->range.min == range->min && in mlxsw_sp_port_range_reg_find()
107 prr->range.max == range->max && in mlxsw_sp_port_range_reg_find()
108 prr->range.source == range->source) in mlxsw_sp_port_range_reg_find()
116 const struct mlxsw_sp_port_range *range, in mlxsw_sp_port_range_reg_get() argument
[all …]
/drivers/pinctrl/
A Dcore.c314 (gc->base + offset) < range->base + range->npins) { in pinctrl_match_gpio_range()
352 if (range->base + range->npins - 1 < gc->base || in pinctrl_ready_for_gpio_range()
400 if (range) { in pinctrl_get_device_gpio_range()
499 pin < range->pin_base + range->npins) in pinctrl_find_gpio_range_from_pin_nolock()
1726 (pin < (range->pin_base + range->npins))) { in pinctrl_pins_show()
1728 range->base + (pin - range->pin_base); in pinctrl_pins_show()
1821 range->id, range->name, in pinctrl_gpioranges_show()
1822 range->base, (range->base + range->npins - 1)); in pinctrl_gpioranges_show()
1829 range->id, range->name, in pinctrl_gpioranges_show()
1830 range->base, (range->base + range->npins - 1), in pinctrl_gpioranges_show()
[all …]
/drivers/regulator/
A Dqcom_spmi-regulator.c700 range = &vreg->set_points->range[range_id]; in spmi_regulator_select_voltage()
732 range = vreg->set_points->range; in spmi_sw_selector_to_hw()
735 for (; range < end; range++) { in spmi_sw_selector_to_hw()
763 if (r == range && range->n_voltages) { in spmi_hw_selector_to_sw()
794 range = vreg->set_points->range; in spmi_regulator_find_range()
799 for (; range < end; range++) in spmi_regulator_find_range()
820 if (uV < range->min_uV || uV > range->max_uV) { in spmi_regulator_select_voltage_same_range()
830 uV = uV * range->step_uV + range->min_uV; in spmi_regulator_select_voltage_same_range()
945 range = vreg->set_points->range; in spmi_regulator_ftsmps426_get_voltage()
1684 for (; range < points->range + points->count; range++) in spmi_calculate_num_voltages()
[all …]
A Dhelpers.c134 int range; in regulator_get_voltage_sel_pickable_regmap() local
155 if (range < 0) in regulator_get_voltage_sel_pickable_regmap()
204 unsigned int range; in regulator_set_voltage_sel_pickable_regmap() local
430 const struct linear_range *range; in regulator_map_voltage_linear_range() local
442 range = &rdev->desc->linear_ranges[i]; in regulator_map_voltage_linear_range()
479 const struct linear_range *range; in regulator_map_voltage_pickable_linear_range() local
494 range = &rdev->desc->linear_ranges[i]; in regulator_map_voltage_pickable_linear_range()
509 ret = selector + sel - range->min_sel; in regulator_map_voltage_pickable_linear_range()
587 const struct linear_range *range; in regulator_list_voltage_pickable_linear_range() local
599 range = &rdev->desc->linear_ranges[i]; in regulator_list_voltage_pickable_linear_range()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_uvmm.c175 u64 range = reg->va.range; in nouveau_uvma_region_sparse_unref() local
184 u64 range = uvma->va.va.range; in nouveau_uvma_vmm_put() local
195 u64 range = uvma->va.va.range; in nouveau_uvma_map() local
205 u64 range = uvma->va.va.range; in nouveau_uvma_unmap() local
280 u64 range = reg->va.range; in __nouveau_uvma_region_insert() local
309 reg->va.range = range; in nouveau_uvma_region_insert()
378 reg->va.range != range) in nouveau_uvma_region_find()
399 u64 range = reg->va.range; in __nouveau_uvma_region_destroy() local
564 .range = range, in nouveau_uvmm_sm_map_prepare_unwind()
743 .range = range, in nouveau_uvmm_sm_map_prepare()
[all …]
/drivers/base/
A Dmap.c23 unsigned long range; member
32 int kobj_map(struct kobj_map *domain, dev_t dev, unsigned long range, in kobj_map() argument
36 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_map()
53 p->range = range; in kobj_map()
59 while (*s && (*s)->range < range) in kobj_map()
70 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_unmap()
83 if (p->dev == dev && p->range == range) { in kobj_unmap()
108 if (p->dev > dev || p->dev + p->range - 1 < dev) in kobj_lookup()
110 if (p->range - 1 >= best) in kobj_lookup()
117 best = p->range - 1; in kobj_lookup()
[all …]
/drivers/virt/acrn/
A Dioreq.c126 range = kzalloc(sizeof(*range), GFP_KERNEL); in acrn_ioreq_range_add()
127 if (!range) in acrn_ioreq_range_add()
130 range->type = type; in acrn_ioreq_range_add()
131 range->start = start; in acrn_ioreq_range_add()
132 range->end = end; in acrn_ioreq_range_add()
155 if (type == range->type && in acrn_ioreq_range_del()
157 end == range->end) { in acrn_ioreq_range_del()
158 list_del(&range->list); in acrn_ioreq_range_del()
159 kfree(range); in acrn_ioreq_range_del()
488 list_del(&range->list); in acrn_ioreq_client_destroy()
[all …]
/drivers/nvdimm/
A Dbadrange.c214 struct badblocks *bb, const struct range *range) in badblocks_populate() argument
225 if (bre_end < range->start) in badblocks_populate()
227 if (bre->start > range->end) in badblocks_populate()
230 if (bre->start >= range->start) { in badblocks_populate()
234 if (bre_end <= range->end) in badblocks_populate()
237 len = range->start + range_len(range) in badblocks_populate()
246 if (bre->start < range->start) { in badblocks_populate()
249 if (bre_end < range->end) in badblocks_populate()
250 len = bre->start + bre->length - range->start; in badblocks_populate()
252 len = range_len(range); in badblocks_populate()
[all …]
/drivers/comedi/drivers/
A Djr3_pci.c112 union jr3_pci_single_range range[9]; member
508 union jr3_pci_single_range *r = spriv->range; in jr3_pci_poll_subdevice()
518 r[3].l.range[0].max = get_s16(&fs->mx) * 100; in jr3_pci_poll_subdevice()
528 r[8].l.range[0].min = 0; in jr3_pci_poll_subdevice()
529 r[8].l.range[0].max = 65535; in jr3_pci_poll_subdevice()
621 spriv->range[j].l.length = 1; in jr3_pci_alloc_spriv()
622 spriv->range[j].l.range[0].min = -1000000; in jr3_pci_alloc_spriv()
623 spriv->range[j].l.range[0].max = 1000000; in jr3_pci_alloc_spriv()
630 spriv->range[8].l.length = 1; in jr3_pci_alloc_spriv()
631 spriv->range[8].l.range[0].min = 0; in jr3_pci_alloc_spriv()
[all …]
/drivers/gpu/drm/logicvc/
A Dlogicvc_of.c42 .range = {
50 .range = {
57 .range = { 8, 24 },
79 .range = { 8, 24 },
84 .range = {
92 .range = {
158 if (property->range[0] || property->range[1]) in logicvc_of_property_parse_u32()
159 if (value < property->range[0] || value > property->range[1]) in logicvc_of_property_parse_u32()
/drivers/infiniband/core/
A Dumem_odp.c333 struct hmm_range range = {}; in ib_umem_odp_map_dma_and_lock() local
353 range.notifier = &umem_odp->notifier; in ib_umem_odp_map_dma_and_lock()
354 range.start = ALIGN_DOWN(user_virt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock()
355 range.end = ALIGN(user_virt + bcnt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock()
357 num_pfns = (range.end - range.start) >> PAGE_SHIFT; in ib_umem_odp_map_dma_and_lock()
359 range.default_flags = HMM_PFN_REQ_FAULT; in ib_umem_odp_map_dma_and_lock()
362 range.default_flags |= HMM_PFN_REQ_WRITE; in ib_umem_odp_map_dma_and_lock()
369 current_seq = range.notifier_seq = in ib_umem_odp_map_dma_and_lock()
373 ret = hmm_range_fault(&range); in ib_umem_odp_map_dma_and_lock()
399 if (!(range.hmm_pfns[pfn_index] & HMM_PFN_VALID)) in ib_umem_odp_map_dma_and_lock()
[all …]
/drivers/gpu/drm/vkms/tests/
A Dvkms_format_test.c44 enum drm_color_range range; member
76 .range = DRM_COLOR_YCBCR_FULL_RANGE,
101 .range = DRM_COLOR_YCBCR_LIMITED_RANGE,
126 .range = DRM_COLOR_YCBCR_FULL_RANGE,
151 .range = DRM_COLOR_YCBCR_LIMITED_RANGE,
176 .range = DRM_COLOR_YCBCR_FULL_RANGE,
201 .range = DRM_COLOR_YCBCR_LIMITED_RANGE,
237 (DRM_FORMAT_NV12, param->encoding, param->range, &matrix); in vkms_format_test_yuv_u8_to_argb_u16()
260 drm_get_color_encoding_name(t->encoding), drm_get_color_range_name(t->range)); in vkms_format_test_yuv_u8_to_argb_u16_case_desc()
/drivers/gpu/drm/i915/selftests/
A Dintel_uncore.c77 const struct i915_range *range; in intel_shadow_table_check() local
82 range = range_lists[j].regs; in intel_shadow_table_check()
83 for (i = 0, prev = -1; i < range_lists[j].size; i++, range++) { in intel_shadow_table_check()
84 if (range->end < range->start) { in intel_shadow_table_check()
86 __func__, i, range->start, range->end); in intel_shadow_table_check()
90 if (prev >= (s32)range->start) { in intel_shadow_table_check()
92 __func__, i, range->start, range->end, prev); in intel_shadow_table_check()
96 if (range->start % 4) { in intel_shadow_table_check()
98 __func__, i, range->start, range->end); in intel_shadow_table_check()
102 prev = range->end; in intel_shadow_table_check()
/drivers/clk/at91/
A Dclk-generated.c24 struct clk_range range; member
146 if (gck->range.max && req->rate > gck->range.max) in clk_generated_determine_rate()
147 req->rate = gck->range.max; in clk_generated_determine_rate()
148 if (gck->range.min && req->rate < gck->range.min) in clk_generated_determine_rate()
149 req->rate = gck->range.min; in clk_generated_determine_rate()
162 (gck->range.max && min_rate > gck->range.max)) in clk_generated_determine_rate()
212 if (best_rate < 0 || (gck->range.max && best_rate > gck->range.max)) in clk_generated_determine_rate()
253 if (gck->range.max && rate > gck->range.max) in clk_generated_set_rate()
324 const struct clk_range *range, in at91_clk_register_generated() argument
354 gck->range = *range; in at91_clk_register_generated()

Completed in 75 milliseconds

12345678910>>...25