Lines Matching refs:lock
14 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire() argument
23 val = GEN_BINARY_RMWcc(LOCK_PREFIX "btsl", lock->val.counter, c, in queued_fetch_set_pending_acquire()
25 val |= atomic_read(&lock->val) & ~_Q_PENDING_MASK; in queued_fetch_set_pending_acquire()
31 extern void native_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
33 extern void __pv_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
34 extern void __raw_callee_save___pv_queued_spin_unlock(struct qspinlock *lock);
44 static inline void native_queued_spin_unlock(struct qspinlock *lock) in native_queued_spin_unlock() argument
46 smp_store_release(&lock->locked, 0); in native_queued_spin_unlock()
49 static inline void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
51 pv_queued_spin_lock_slowpath(lock, val); in queued_spin_lock_slowpath()
54 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() argument
57 pv_queued_spin_unlock(lock); in queued_spin_unlock()
88 static inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock() argument
102 val = atomic_read(&lock->val); in virt_spin_lock()
104 if (val || !atomic_try_cmpxchg(&lock->val, &val, _Q_LOCKED_VAL)) { in virt_spin_lock()