Lines Matching refs:vmpr

75 static struct vmpressure *vmpressure_parent(struct vmpressure *vmpr)  in vmpressure_parent()  argument
77 struct mem_cgroup *memcg = vmpressure_to_memcg(vmpr); in vmpressure_parent()
157 static bool vmpressure_event(struct vmpressure *vmpr, in vmpressure_event() argument
164 mutex_lock(&vmpr->events_lock); in vmpressure_event()
165 list_for_each_entry(ev, &vmpr->events, node) { in vmpressure_event()
175 mutex_unlock(&vmpr->events_lock); in vmpressure_event()
182 struct vmpressure *vmpr = work_to_vmpressure(work); in vmpressure_work_fn() local
189 spin_lock(&vmpr->sr_lock); in vmpressure_work_fn()
198 scanned = vmpr->tree_scanned; in vmpressure_work_fn()
200 spin_unlock(&vmpr->sr_lock); in vmpressure_work_fn()
204 reclaimed = vmpr->tree_reclaimed; in vmpressure_work_fn()
205 vmpr->tree_scanned = 0; in vmpressure_work_fn()
206 vmpr->tree_reclaimed = 0; in vmpressure_work_fn()
207 spin_unlock(&vmpr->sr_lock); in vmpressure_work_fn()
212 if (vmpressure_event(vmpr, level, ancestor, signalled)) in vmpressure_work_fn()
215 } while ((vmpr = vmpressure_parent(vmpr))); in vmpressure_work_fn()
242 struct vmpressure *vmpr; in vmpressure() local
247 vmpr = memcg_to_vmpressure(memcg); in vmpressure()
275 spin_lock(&vmpr->sr_lock); in vmpressure()
276 scanned = vmpr->tree_scanned += scanned; in vmpressure()
277 vmpr->tree_reclaimed += reclaimed; in vmpressure()
278 spin_unlock(&vmpr->sr_lock); in vmpressure()
282 schedule_work(&vmpr->work); in vmpressure()
290 spin_lock(&vmpr->sr_lock); in vmpressure()
291 scanned = vmpr->scanned += scanned; in vmpressure()
292 reclaimed = vmpr->reclaimed += reclaimed; in vmpressure()
294 spin_unlock(&vmpr->sr_lock); in vmpressure()
297 vmpr->scanned = vmpr->reclaimed = 0; in vmpressure()
298 spin_unlock(&vmpr->sr_lock); in vmpressure()
369 struct vmpressure *vmpr = memcg_to_vmpressure(memcg); in vmpressure_register_event() local
407 mutex_lock(&vmpr->events_lock); in vmpressure_register_event()
408 list_add(&ev->node, &vmpr->events); in vmpressure_register_event()
409 mutex_unlock(&vmpr->events_lock); in vmpressure_register_event()
430 struct vmpressure *vmpr = memcg_to_vmpressure(memcg); in vmpressure_unregister_event() local
433 mutex_lock(&vmpr->events_lock); in vmpressure_unregister_event()
434 list_for_each_entry(ev, &vmpr->events, node) { in vmpressure_unregister_event()
441 mutex_unlock(&vmpr->events_lock); in vmpressure_unregister_event()
451 void vmpressure_init(struct vmpressure *vmpr) in vmpressure_init() argument
453 spin_lock_init(&vmpr->sr_lock); in vmpressure_init()
454 mutex_init(&vmpr->events_lock); in vmpressure_init()
455 INIT_LIST_HEAD(&vmpr->events); in vmpressure_init()
456 INIT_WORK(&vmpr->work, vmpressure_work_fn); in vmpressure_init()
466 void vmpressure_cleanup(struct vmpressure *vmpr) in vmpressure_cleanup() argument
472 flush_work(&vmpr->work); in vmpressure_cleanup()