Lines Matching refs:cpu
321 static void folio_activate_drain(int cpu) in folio_activate_drain() argument
323 struct folio_batch *fbatch = &per_cpu(cpu_fbatches.lru_activate, cpu); in folio_activate_drain()
338 static inline void folio_activate_drain(int cpu) in folio_activate_drain() argument
641 void lru_add_drain_cpu(int cpu) in lru_add_drain_cpu() argument
643 struct cpu_fbatches *fbatches = &per_cpu(cpu_fbatches, cpu); in lru_add_drain_cpu()
672 folio_activate_drain(cpu); in lru_add_drain_cpu()
773 static bool cpu_needs_drain(unsigned int cpu) in cpu_needs_drain() argument
775 struct cpu_fbatches *fbatches = &per_cpu(cpu_fbatches, cpu); in cpu_needs_drain()
784 need_mlock_drain(cpu) || in cpu_needs_drain()
785 has_bh_in_lru(cpu, NULL); in cpu_needs_drain()
810 unsigned cpu, this_gen; in __lru_add_drain_all() local
869 for_each_online_cpu(cpu) { in __lru_add_drain_all()
870 struct work_struct *work = &per_cpu(lru_add_drain_work, cpu); in __lru_add_drain_all()
872 if (cpu_needs_drain(cpu)) { in __lru_add_drain_all()
874 queue_work_on(cpu, mm_percpu_wq, work); in __lru_add_drain_all()
875 __cpumask_set_cpu(cpu, &has_work); in __lru_add_drain_all()
879 for_each_cpu(cpu, &has_work) in __lru_add_drain_all()
880 flush_work(&per_cpu(lru_add_drain_work, cpu)); in __lru_add_drain_all()