Searched refs:load_sum (Results 1 – 4 of 4) sorted by relevance
| /linux/kernel/sched/ |
| A D | pelt.c | 115 sa->load_sum = decay_load(sa->load_sum, periods); in accumulate_sum() 142 sa->load_sum += load * contrib; in accumulate_sum() 264 sa->load_avg = div_u64(load * sa->load_sum, divider); in ___update_load_avg()
|
| A D | fair.c | 3749 cfs_rq->avg.load_sum += se_weight(se) * se->avg.load_sum; in enqueue_load_avg() 3756 sub_positive(&cfs_rq->avg.load_sum, se_weight(se) * se->avg.load_sum); in dequeue_load_avg() 3758 cfs_rq->avg.load_sum = max_t(u32, cfs_rq->avg.load_sum, in dequeue_load_avg() 4106 if (sa->load_sum) in load_avg_is_decayed() 4426 u64 load_sum = 0; in update_tg_cfs_load() local 4454 load_sum = div_u64(gcfs_rq->avg.load_sum, in update_tg_cfs_load() 4459 runnable_sum = min(se->avg.load_sum, load_sum); in update_tg_cfs_load() 4478 delta_sum = load_sum - (s64)se_weight(se) * se->avg.load_sum; in update_tg_cfs_load() 4485 cfs_rq->avg.load_sum = max_t(u32, cfs_rq->avg.load_sum, in update_tg_cfs_load() 4688 sa->load_sum = max_t(u32, sa->load_sum, sa->load_avg * PELT_MIN_DIVIDER); in update_cfs_rq_load_avg() [all …]
|
| A D | debug.c | 1246 P(se.avg.load_sum); in proc_sched_show_task()
|
| /linux/include/linux/ |
| A D | sched.h | 478 u64 load_sum; member
|
Completed in 43 milliseconds