Lines Matching refs:paravirt
76 static __always_inline int get_steal_spins(bool paravirt, bool sleepy) in get_steal_spins() argument
78 if (paravirt && sleepy) in get_steal_spins()
84 static __always_inline int get_remote_steal_spins(bool paravirt, bool sleepy) in get_remote_steal_spins() argument
86 if (paravirt && sleepy) in get_remote_steal_spins()
92 static __always_inline int get_head_spins(bool paravirt, bool sleepy) in get_head_spins() argument
94 if (paravirt && sleepy) in get_head_spins()
284 static __always_inline bool __yield_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt,… in __yield_to_locked_owner() argument
292 if (!paravirt) in __yield_to_locked_owner()
337 static __always_inline bool yield_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt) in yield_to_locked_owner() argument
339 return __yield_to_locked_owner(lock, val, paravirt, false); in yield_to_locked_owner()
343 …tic __always_inline bool yield_head_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt) in yield_head_to_locked_owner() argument
350 return __yield_to_locked_owner(lock, val, paravirt, mustq); in yield_head_to_locked_owner()
353 static __always_inline void propagate_sleepy(struct qnode *node, u32 val, bool paravirt) in propagate_sleepy() argument
358 if (!paravirt) in propagate_sleepy()
376 …_inline bool yield_to_prev(struct qspinlock *lock, struct qnode *node, int prev_cpu, bool paravirt) in yield_to_prev() argument
381 if (!paravirt) in yield_to_prev()
407 preempted = yield_to_locked_owner(lock, val, paravirt); in yield_to_prev()
442 static __always_inline bool steal_break(u32 val, int iters, bool paravirt, bool sleepy) in steal_break() argument
444 if (iters >= get_steal_spins(paravirt, sleepy)) in steal_break()
448 (iters >= get_remote_steal_spins(paravirt, sleepy))) { in steal_break()
456 static __always_inline bool try_to_steal_lock(struct qspinlock *lock, bool paravirt) in try_to_steal_lock() argument
484 preempted = yield_to_locked_owner(lock, val, paravirt); in try_to_steal_lock()
487 if (paravirt && pv_sleepy_lock) { in try_to_steal_lock()
519 } while (!steal_break(val, iters, paravirt, sleepy)); in try_to_steal_lock()
526 static __always_inline void queued_spin_lock_mcs_queue(struct qspinlock *lock, bool paravirt) in queued_spin_lock_mcs_queue() argument
586 if (yield_to_prev(lock, node, prev_cpu, paravirt)) in queued_spin_lock_mcs_queue()
617 if (paravirt && pv_sleepy_lock && maybe_stealers) { in queued_spin_lock_mcs_queue()
633 propagate_sleepy(node, val, paravirt); in queued_spin_lock_mcs_queue()
634 preempted = yield_head_to_locked_owner(lock, val, paravirt); in queued_spin_lock_mcs_queue()
641 if (paravirt && preempted) { in queued_spin_lock_mcs_queue()
650 if (!mustq && iters >= get_head_spins(paravirt, sleepy)) { in queued_spin_lock_mcs_queue()
686 if (paravirt && pv_prod_head) { in queued_spin_lock_mcs_queue()