| /linux/drivers/clk/renesas/ |
| A D | rcar-gen4-cpg.c | 109 min_mult = max(div64_ul(req->min_rate, prate), 1ULL); in cpg_pll_8_25_clk_determine_rate() 110 max_mult = min(div64_ul(req->max_rate, prate), 256ULL); in cpg_pll_8_25_clk_determine_rate() 115 ni = div64_ul(req->rate, prate); in cpg_pll_8_25_clk_determine_rate() 121 nf = div64_ul((u64)(req->rate - prate * ni) << 24, in cpg_pll_8_25_clk_determine_rate() 144 ni = div64_ul(rate, prate); in cpg_pll_8_25_clk_set_rate() 150 nf = div64_ul((u64)(rate - prate * ni) << 24, in cpg_pll_8_25_clk_set_rate() 311 min_mult = max(div64_ul(req->min_rate * 32ULL, prate), 1ULL); in cpg_z_clk_determine_rate() 312 max_mult = min(div64_ul(req->max_rate * 32ULL, prate), 32ULL); in cpg_z_clk_determine_rate()
|
| A D | rcar-gen2-cpg.c | 72 min_mult = max(div64_ul(req->min_rate * 32ULL, prate), 1ULL); in cpg_z_clk_determine_rate() 73 max_mult = min(div64_ul(req->max_rate * 32ULL, prate), 32ULL); in cpg_z_clk_determine_rate() 77 mult = div64_ul(req->rate * 32ULL, prate); in cpg_z_clk_determine_rate() 92 mult = div64_ul(rate * 32ULL, parent_rate); in cpg_z_clk_set_rate()
|
| A D | rcar-gen3-cpg.c | 73 min_mult = max(div64_ul(req->min_rate, prate), 1ULL); in cpg_pll_clk_determine_rate() 74 max_mult = min(div64_ul(req->max_rate, prate), 128ULL); in cpg_pll_clk_determine_rate() 208 min_mult = max(div64_ul(req->min_rate * 32ULL, prate), 1ULL); in cpg_z_clk_determine_rate() 209 max_mult = min(div64_ul(req->max_rate * 32ULL, prate), 32ULL); in cpg_z_clk_determine_rate()
|
| /linux/drivers/gpu/drm/i915/gt/ |
| A D | selftest_timeline.c | 317 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 318 prng32_1M = div64_ul(ktime_to_ns(kt) << 20, count); in bench_sync() 334 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 352 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 368 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 382 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 406 __func__, count, (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync() 435 (long long)div64_ul(ktime_to_ns(kt), count)); in bench_sync()
|
| /linux/scripts/coccinelle/misc/ |
| A D | do_div.cocci | 106 coccilib.org.print_todo(p[0], construct_warnings("div64_ul")) 134 coccilib.report.print_report(p[0], construct_warnings("div64_ul"))
|
| /linux/drivers/clk/imx/ |
| A D | clk-pllv3.c | 251 temp64 = div64_ul(temp64, parent_rate); in clk_pllv3_av_round_rate() 281 temp64 = div64_ul(temp64, parent_rate); in clk_pllv3_av_set_rate() 338 temp64 = div64_ul(temp64, parent_rate); in clk_pllv3_vf610_rate_to_mf()
|
| /linux/include/linux/ |
| A D | math64.h | 13 #define div64_ul(x, y) div64_u64((x), (y)) macro 87 #define div64_ul(x, y) div_u64((x), (y)) macro
|
| /linux/drivers/pwm/ |
| A D | pwm-sifive.c | 95 scale_pow = div64_ul(ddata->approx_period * (u64)rate, NSEC_PER_SEC); in pwm_sifive_update_clock() 104 ddata->real_period = div64_ul(num, rate); in pwm_sifive_update_clock()
|
| A D | pwm-meson.c | 304 return div64_ul(NSEC_PER_SEC * (u64)cnt, fin_freq); in meson_pwm_cnt_to_ns()
|
| /linux/kernel/dma/ |
| A D | map_benchmark.c | 82 map_100ns = div64_ul(map_delta, 100); in map_benchmark_thread() 83 unmap_100ns = div64_ul(unmap_delta, 100); in map_benchmark_thread()
|
| /linux/tools/testing/selftests/bpf/progs/ |
| A D | bpf_cubic.c | 128 #define div64_ul div64_u64 macro 451 div64_ul((__u64)GSO_MAX_SIZE * 4 * USEC_PER_SEC, rate)); in hystart_ack_delay()
|
| /linux/drivers/watchdog/ |
| A D | davinci_wdt.c | 137 timer_counter = div64_ul(timer_counter, freq); in davinci_wdt_get_timeleft()
|
| A D | rzg2l_wdt.c | 71 return div64_ul(timer_cycle_us, cycle); in rzg2l_wdt_get_cycle_usec()
|
| /linux/drivers/pci/ |
| A D | setup-bus.c | 1984 io_per_b = div64_ul(resource_size(&io), hotplug_bridges); in pci_bus_distribute_available_resources() 1985 mmio_per_b = div64_ul(resource_size(&mmio), hotplug_bridges); in pci_bus_distribute_available_resources() 1986 mmio_pref_per_b = div64_ul(resource_size(&mmio_pref), in pci_bus_distribute_available_resources() 1989 io_per_b = div64_ul(resource_size(&io), normal_bridges); in pci_bus_distribute_available_resources() 1990 mmio_per_b = div64_ul(resource_size(&mmio), normal_bridges); in pci_bus_distribute_available_resources() 1991 mmio_pref_per_b = div64_ul(resource_size(&mmio_pref), in pci_bus_distribute_available_resources()
|
| /linux/net/ipv4/ |
| A D | inetpeer.c | 79 nr_entries = div64_ul((u64)totalram_pages() << PAGE_SHIFT, in inet_initpeers()
|
| A D | tcp_cubic.c | 383 div64_ul((u64)sk->sk_gso_max_size * 4 * USEC_PER_SEC, rate)); in hystart_ack_delay()
|
| /linux/drivers/memory/ |
| A D | jz4780-nemc.c | 150 return div64_ul(1000000000000ull, rate); in jz4780_nemc_clk_period()
|
| /linux/drivers/gpu/drm/msm/ |
| A D | msm_gpu_devfreq.c | 106 busy_time = div64_ul(busy_time, sample_rate); in msm_devfreq_get_dev_status()
|
| /linux/mm/ |
| A D | page-writeback.c | 201 min = div64_ul(min, tot_bw); in wb_min_max_ratio() 205 max = div64_ul(max, tot_bw); in wb_min_max_ratio() 932 wb_thresh = div64_ul(wb_thresh, denominator); in __wb_calc_thresh() 1269 bw = div64_ul(bw, elapsed); in wb_update_write_bandwidth()
|
| /linux/drivers/i2c/busses/ |
| A D | i2c-rzv2m.c | 146 hold_time_ns = div64_ul((u64)(priv->iicb0wl >> 2) * NSEC_PER_SEC, pclk_hz); in rzv2m_i2c_clock_calculate()
|
| /linux/fs/nilfs2/ |
| A D | ioctl.c | 1128 minseg = div64_ul(minseg, segbytes); in nilfs_ioctl_set_alloc_range() 1137 maxseg = div64_ul(maxseg, segbytes); in nilfs_ioctl_set_alloc_range()
|
| A D | dat.c | 460 first = div64_ul(first, entries_per_block); in nilfs_dat_get_vinfo()
|
| A D | the_nilfs.c | 414 max_count = div64_ul(max_count, nilfs->ns_blocks_per_segment); in nilfs_max_segment_count()
|
| /linux/drivers/char/ |
| A D | hpet.c | 547 return div64_ul(m, dis); in hpet_time_div()
|
| /linux/block/ |
| A D | bfq-wf2q.c | 288 return div64_ul((u64)service << WFQ_SERVICE_SHIFT, weight); in bfq_delta()
|