Lines Matching refs:val
94 if (!(atomic_read_acquire(&lock->val) & (mask))) in is_lock_released()
278 int val, ret = 0; in resilient_tas_spin_lock() local
290 val = atomic_read(&lock->val); in resilient_tas_spin_lock()
292 if (val || !atomic_try_cmpxchg(&lock->val, &val, 1)) { in resilient_tas_spin_lock()
348 int __lockfunc resilient_queued_spin_lock_slowpath(rqspinlock_t *lock, u32 val) in resilient_queued_spin_lock_slowpath() argument
368 if (val == _Q_PENDING_VAL) { in resilient_queued_spin_lock_slowpath()
370 val = atomic_cond_read_relaxed(&lock->val, in resilient_queued_spin_lock_slowpath()
377 if (val & ~_Q_LOCKED_MASK) in resilient_queued_spin_lock_slowpath()
385 val = queued_fetch_set_pending_acquire(lock); in resilient_queued_spin_lock_slowpath()
394 if (unlikely(val & ~_Q_LOCKED_MASK)) { in resilient_queued_spin_lock_slowpath()
397 if (!(val & _Q_PENDING_MASK)) in resilient_queued_spin_lock_slowpath()
419 if (val & _Q_LOCKED_MASK) { in resilient_queued_spin_lock_slowpath()
534 int val; in resilient_queued_spin_lock_slowpath() local
541 val = arch_mcs_spin_lock_contended(&node->locked); in resilient_queued_spin_lock_slowpath()
542 if (val == RES_TIMEOUT_VAL) { in resilient_queued_spin_lock_slowpath()
575 val = res_atomic_cond_read_acquire(&lock->val, !(VAL & _Q_LOCKED_PENDING_MASK) || in resilient_queued_spin_lock_slowpath()
628 if ((val & _Q_TAIL_MASK) == tail) { in resilient_queued_spin_lock_slowpath()
629 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in resilient_queued_spin_lock_slowpath()