Lines Matching refs:lock
33 static __always_inline void ticket_spin_lock(arch_spinlock_t *lock) in ticket_spin_lock() argument
35 u32 val = atomic_fetch_add(1<<16, &lock->val); in ticket_spin_lock()
49 atomic_cond_read_acquire(&lock->val, ticket == (u16)VAL); in ticket_spin_lock()
53 static __always_inline bool ticket_spin_trylock(arch_spinlock_t *lock) in ticket_spin_trylock() argument
55 u32 old = atomic_read(&lock->val); in ticket_spin_trylock()
60 return atomic_try_cmpxchg(&lock->val, &old, old + (1<<16)); /* SC, for RCsc */ in ticket_spin_trylock()
63 static __always_inline void ticket_spin_unlock(arch_spinlock_t *lock) in ticket_spin_unlock() argument
65 u16 *ptr = (u16 *)lock + IS_ENABLED(CONFIG_CPU_BIG_ENDIAN); in ticket_spin_unlock()
66 u32 val = atomic_read(&lock->val); in ticket_spin_unlock()
71 static __always_inline int ticket_spin_value_unlocked(arch_spinlock_t lock) in ticket_spin_value_unlocked() argument
73 u32 val = lock.val.counter; in ticket_spin_value_unlocked()
78 static __always_inline int ticket_spin_is_locked(arch_spinlock_t *lock) in ticket_spin_is_locked() argument
80 arch_spinlock_t val = READ_ONCE(*lock); in ticket_spin_is_locked()
85 static __always_inline int ticket_spin_is_contended(arch_spinlock_t *lock) in ticket_spin_is_contended() argument
87 u32 val = atomic_read(&lock->val); in ticket_spin_is_contended()