Lines Matching refs:lock
40 extern int resilient_tas_spin_lock(rqspinlock_t *lock);
42 extern int resilient_queued_spin_lock_slowpath(rqspinlock_t *lock, u32 val);
53 static __always_inline int resilient_virt_spin_lock(rqspinlock_t *lock) in resilient_virt_spin_lock() argument
76 static __always_inline void grab_held_lock_entry(void *lock) in grab_held_lock_entry() argument
95 this_cpu_write(rqspinlock_held_locks.locks[cnt - 1], lock); in grab_held_lock_entry()
174 static __always_inline int res_spin_lock(rqspinlock_t *lock) in res_spin_lock() argument
178 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) { in res_spin_lock()
179 grab_held_lock_entry(lock); in res_spin_lock()
182 return resilient_queued_spin_lock_slowpath(lock, val); in res_spin_lock()
187 #define res_spin_lock(lock) resilient_tas_spin_lock(lock) argument
191 static __always_inline void res_spin_unlock(rqspinlock_t *lock) in res_spin_unlock() argument
216 smp_store_release(&lock->locked, 0); in res_spin_unlock()
221 #define raw_res_spin_lock_init(lock) ({ *(lock) = (rqspinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; }) argument
223 #define raw_res_spin_lock_init(lock) ({ *(lock) = (rqspinlock_t){0}; }) argument
226 #define raw_res_spin_lock(lock) \ argument
230 __ret = res_spin_lock(lock); \
236 #define raw_res_spin_unlock(lock) ({ res_spin_unlock(lock); preempt_enable(); }) argument
238 #define raw_res_spin_lock_irqsave(lock, flags) \ argument
242 __ret = raw_res_spin_lock(lock); \
248 #define raw_res_spin_unlock_irqrestore(lock, flags) ({ raw_res_spin_unlock(lock); local_irq_restore… argument