Lines Matching refs:rq
7 int update_rt_rq_load_avg(u64 now, struct rq *rq, int running);
8 int update_dl_rq_load_avg(u64 now, struct rq *rq, int running);
11 int update_thermal_load_avg(u64 now, struct rq *rq, u64 capacity);
13 static inline u64 thermal_load_avg(struct rq *rq) in thermal_load_avg() argument
15 return READ_ONCE(rq->avg_thermal.load_avg); in thermal_load_avg()
19 update_thermal_load_avg(u64 now, struct rq *rq, u64 capacity) in update_thermal_load_avg() argument
24 static inline u64 thermal_load_avg(struct rq *rq) in thermal_load_avg() argument
31 int update_irq_load_avg(struct rq *rq, u64 running);
34 update_irq_load_avg(struct rq *rq, u64 running) in update_irq_load_avg() argument
74 static inline void update_rq_clock_pelt(struct rq *rq, s64 delta) in update_rq_clock_pelt() argument
76 if (unlikely(is_idle_task(rq->curr))) { in update_rq_clock_pelt()
78 rq->clock_pelt = rq_clock_task(rq); in update_rq_clock_pelt()
98 delta = cap_scale(delta, arch_scale_cpu_capacity(cpu_of(rq))); in update_rq_clock_pelt()
99 delta = cap_scale(delta, arch_scale_freq_capacity(cpu_of(rq))); in update_rq_clock_pelt()
101 rq->clock_pelt += delta; in update_rq_clock_pelt()
113 static inline void update_idle_rq_clock_pelt(struct rq *rq) in update_idle_rq_clock_pelt() argument
116 u32 util_sum = rq->cfs.avg.util_sum; in update_idle_rq_clock_pelt()
117 util_sum += rq->avg_rt.util_sum; in update_idle_rq_clock_pelt()
118 util_sum += rq->avg_dl.util_sum; in update_idle_rq_clock_pelt()
130 rq->lost_idle_time += rq_clock_task(rq) - rq->clock_pelt; in update_idle_rq_clock_pelt()
133 static inline u64 rq_clock_pelt(struct rq *rq) in rq_clock_pelt() argument
135 lockdep_assert_rq_held(rq); in rq_clock_pelt()
136 assert_clock_updated(rq); in rq_clock_pelt()
138 return rq->clock_pelt - rq->lost_idle_time; in rq_clock_pelt()
166 update_rt_rq_load_avg(u64 now, struct rq *rq, int running) in update_rt_rq_load_avg() argument
172 update_dl_rq_load_avg(u64 now, struct rq *rq, int running) in update_dl_rq_load_avg() argument
178 update_thermal_load_avg(u64 now, struct rq *rq, u64 capacity) in update_thermal_load_avg() argument
183 static inline u64 thermal_load_avg(struct rq *rq) in thermal_load_avg() argument
189 update_irq_load_avg(struct rq *rq, u64 running) in update_irq_load_avg() argument
194 static inline u64 rq_clock_pelt(struct rq *rq) in rq_clock_pelt() argument
196 return rq_clock_task(rq); in rq_clock_pelt()
200 update_rq_clock_pelt(struct rq *rq, s64 delta) { } in update_rq_clock_pelt() argument
203 update_idle_rq_clock_pelt(struct rq *rq) { } in update_idle_rq_clock_pelt() argument