Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 322) sorted by relevance

12345678910>>...13

/arch/alpha/include/asm/
A Dspinlock.h21 return lock.lock == 0; in arch_spin_value_unlocked()
27 lock->lock = 0; in arch_spin_unlock()
46 : "=&r" (tmp), "=m" (lock->lock) in arch_spin_lock()
47 : "m"(lock->lock) : "memory"); in arch_spin_lock()
52 return !test_and_set_bit(0, &lock->lock); in arch_spin_trylock()
74 : "m" (*lock) : "memory"); in arch_read_lock()
94 : "m" (*lock) : "memory"); in arch_write_lock()
114 : "m" (*lock) : "memory"); in arch_read_trylock()
136 : "m" (*lock) : "memory"); in arch_write_trylock()
154 : "m" (*lock) : "memory"); in arch_read_unlock()
[all …]
/arch/hexagon/include/asm/
A Dspinlock.h28 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock() argument
37 : "r" (&lock->lock) in arch_read_lock()
51 : "r" (&lock->lock) in arch_read_unlock()
69 : "r" (&lock->lock) in arch_read_trylock()
85 : "r" (&lock->lock) in arch_write_lock()
102 : "r" (&lock->lock) in arch_write_trylock()
112 lock->lock = 0; in arch_write_unlock()
124 : "r" (&lock->lock) in arch_spin_lock()
133 lock->lock = 0; in arch_spin_unlock()
147 : "r" (&lock->lock) in arch_spin_trylock()
[all …]
/arch/powerpc/include/asm/
A Dsimple_spinlock.h37 return lock.slock == 0; in arch_spin_value_unlocked()
101 splpar_spin_yield(lock); in spin_yield()
109 splpar_rw_yield(lock); in rw_yield()
122 splpar_spin_yield(lock); in arch_spin_lock()
133 lock->slock = 0; in arch_spin_unlock()
173 : "r" (&rw->lock), [eh] "n" (eh) in __arch_read_trylock()
253 : "r"(&rw->lock) in arch_read_unlock()
261 rw->lock = 0; in arch_write_unlock()
264 #define arch_spin_relax(lock) spin_yield(lock) argument
265 #define arch_read_relax(lock) rw_yield(lock) argument
[all …]
A Dqspinlock.h73 return READ_ONCE(lock->val); in queued_spin_is_locked()
78 return !lock.val; in queued_spin_value_unlocked()
83 return !!(READ_ONCE(lock->val) & _Q_TAIL_CPU_MASK); in queued_spin_is_contended()
107 : "r" (&lock->val), "r" (new), in __queued_spin_trylock_nosteal()
131 : "r" (&lock->val), "r" (new), "r" (_Q_TAIL_CPU_MASK), in __queued_spin_trylock_steal()
141 return __queued_spin_trylock_nosteal(lock); in queued_spin_trylock()
143 return __queued_spin_trylock_steal(lock); in queued_spin_trylock()
146 void queued_spin_lock_slowpath(struct qspinlock *lock);
150 if (!queued_spin_trylock(lock)) in queued_spin_lock()
151 queued_spin_lock_slowpath(lock); in queued_spin_lock()
[all …]
/arch/sh/include/asm/
A Dspinlock-cas.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0)
28 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
30 while (!__sl_cas(&lock->lock, 1, 0)); in arch_spin_lock()
35 __sl_cas(&lock->lock, 0, 1); in arch_spin_unlock()
40 return __sl_cas(&lock->lock, 1, 0); in arch_spin_trylock()
55 do old = rw->lock; in arch_read_lock()
62 do old = rw->lock; in arch_read_unlock()
63 while (__sl_cas(&rw->lock, old, old+1) != old); in arch_read_unlock()
73 __sl_cas(&rw->lock, 0, RW_LOCK_BIAS); in arch_write_unlock()
79 do old = rw->lock; in arch_read_trylock()
[all …]
A Dspinlock-llsc.h18 #define arch_spin_is_locked(x) ((x)->lock <= 0)
26 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
41 : "r" (&lock->lock) in arch_spin_lock()
56 : "r" (&lock->lock) in arch_spin_unlock()
74 : "r" (&lock->lock) in arch_spin_trylock()
103 : "r" (&rw->lock) in arch_read_lock()
119 : "r" (&rw->lock) in arch_read_unlock()
137 : "r" (&rw->lock), "r" (RW_LOCK_BIAS) in arch_write_lock()
147 : "r" (&rw->lock), "r" (RW_LOCK_BIAS) in arch_write_unlock()
168 : "r" (&rw->lock) in arch_read_trylock()
[all …]
/arch/arm/include/asm/
A Dspinlock.h62 prefetchw(&lock->slock); in arch_spin_lock()
110 lock->tickets.owner++; in arch_spin_unlock()
116 return lock.tickets.owner == lock.tickets.next; in arch_spin_value_unlocked()
143 prefetchw(&rw->lock); in arch_write_lock()
162 prefetchw(&rw->lock); in arch_write_trylock()
211 prefetchw(&rw->lock); in arch_read_lock()
221 : "r" (&rw->lock) in arch_read_lock()
233 prefetchw(&rw->lock); in arch_read_unlock()
241 : "r" (&rw->lock) in arch_read_unlock()
252 prefetchw(&rw->lock); in arch_read_trylock()
[all …]
A Dmcs_spinlock.h9 #define arch_mcs_spin_lock_contended(lock) \ argument
13 while (!(smp_load_acquire(lock))) \
17 #define arch_mcs_spin_unlock_contended(lock) \ argument
19 smp_store_release(lock, 1); \
/arch/sparc/include/asm/
A Dspinlock_32.h16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) argument
34 : "r" (lock) in arch_spin_lock()
43 : "r" (lock) in arch_spin_trylock()
92 #define arch_read_lock(lock) \ argument
95 __arch_read_lock(lock); \
112 #define arch_read_unlock(lock) \ argument
115 __arch_read_unlock(lock); \
138 : "r" (lock) in arch_write_unlock()
148 : "r" (&rw->lock) in arch_write_trylock()
152 val = rw->lock & ~0xff; in arch_write_trylock()
[all …]
/arch/x86/include/asm/
A Dqspinlock.h23 val = GEN_BINARY_RMWcc(LOCK_PREFIX "btsl", lock->val.counter, c, in queued_fetch_set_pending_acquire()
25 val |= atomic_read(&lock->val) & ~_Q_PENDING_MASK; in queued_fetch_set_pending_acquire()
33 extern void __pv_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
44 static inline void native_queued_spin_unlock(struct qspinlock *lock) in native_queued_spin_unlock() argument
46 smp_store_release(&lock->locked, 0); in native_queued_spin_unlock()
51 pv_queued_spin_lock_slowpath(lock, val); in queued_spin_lock_slowpath()
54 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() argument
57 pv_queued_spin_unlock(lock); in queued_spin_unlock()
88 static inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock() argument
102 val = atomic_read(&lock->val); in virt_spin_lock()
[all …]
A Dcmpxchg.h42 #define __xchg_op(ptr, arg, op, lock) \ argument
47 asm_inline volatile (lock #op "b %b0, %1" \
52 asm_inline volatile (lock #op "w %w0, %1" \
57 asm_inline volatile (lock #op "l %0, %1" \
62 asm_inline volatile (lock #op "q %q0, %1" \
85 #define __raw_cmpxchg(ptr, old, new, size, lock) \ argument
94 asm_inline volatile(lock "cmpxchgb %2, %1" \
103 asm_inline volatile(lock "cmpxchgw %2, %1" \
112 asm_inline volatile(lock "cmpxchgl %2, %1" \
121 asm_inline volatile(lock "cmpxchgq %2, %1" \
[all …]
/arch/powerpc/lib/
A Dqspinlock.c17 struct qspinlock *lock; member
241 try_set_sleepy(lock, val); in seen_sleepy_owner()
276 if (qnode->lock == lock) in get_tail_qnode()
320 clear_mustq(lock); in __yield_to_locked_owner()
323 set_mustq(lock); in __yield_to_locked_owner()
556 node->lock = lock; in queued_spin_lock_mcs_queue()
652 set_mustq(lock); in queued_spin_lock_mcs_queue()
706 node->lock = NULL; in queued_spin_lock_mcs_queue()
747 static DEFINE_MUTEX(lock); in steal_spins_set()
756 mutex_lock(&lock); in steal_spins_set()
[all …]
A Dlocks.c21 void splpar_spin_yield(arch_spinlock_t *lock) in splpar_spin_yield() argument
25 lock_value = lock->slock; in splpar_spin_yield()
35 if (lock->slock != lock_value) in splpar_spin_yield()
51 lock_value = rw->lock; in splpar_rw_yield()
61 if (rw->lock != lock_value) in splpar_rw_yield()
/arch/arm64/kvm/hyp/include/nvhe/
A Dspinlock.h44 static inline void hyp_spin_lock(hyp_spinlock_t *lock) in hyp_spin_lock() argument
77 : "=&r" (lockval), "=&r" (newval), "=&r" (tmp), "+Q" (*lock) in hyp_spin_lock()
78 : "Q" (lock->owner) in hyp_spin_lock()
82 static inline void hyp_spin_unlock(hyp_spinlock_t *lock) in hyp_spin_unlock() argument
96 : "=Q" (lock->owner), "=&r" (tmp) in hyp_spin_unlock()
101 static inline bool hyp_spin_is_locked(hyp_spinlock_t *lock) in hyp_spin_is_locked() argument
103 hyp_spinlock_t lockval = READ_ONCE(*lock); in hyp_spin_is_locked()
109 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock) in hyp_assert_lock_held() argument
119 BUG_ON(!hyp_spin_is_locked(lock)); in hyp_assert_lock_held()
122 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock) { } in hyp_assert_lock_held() argument
/arch/mips/include/asm/octeon/
A Dcvmx-spinlock.h63 static inline void cvmx_spinlock_init(cvmx_spinlock_t *lock) in cvmx_spinlock_init() argument
65 lock->value = CVMX_SPINLOCK_UNLOCKED_VAL; in cvmx_spinlock_init()
74 static inline int cvmx_spinlock_locked(cvmx_spinlock_t *lock) in cvmx_spinlock_locked() argument
76 return lock->value != CVMX_SPINLOCK_UNLOCKED_VAL; in cvmx_spinlock_locked()
84 static inline void cvmx_spinlock_unlock(cvmx_spinlock_t *lock) in cvmx_spinlock_unlock() argument
87 lock->value = 0; in cvmx_spinlock_unlock()
103 static inline unsigned int cvmx_spinlock_trylock(cvmx_spinlock_t *lock) in cvmx_spinlock_trylock() argument
117 [val] "+m"(lock->value), [tmp] "=&r"(tmp) in cvmx_spinlock_trylock()
128 static inline void cvmx_spinlock_lock(cvmx_spinlock_t *lock) in cvmx_spinlock_lock() argument
140 [val] "+m"(lock->value), [tmp] "=&r"(tmp) in cvmx_spinlock_lock()
/arch/s390/lib/
A Dspinlock.c93 static inline int arch_load_niai4(int *lock) in arch_load_niai4() argument
100 : [owner] "=d" (owner) : [lock] "R" (*lock) : "memory"); in arch_load_niai4()
113 : [old] "+d" (old), [lock] "+Q" (*lock), "=@cc" (cc) in arch_try_cmpxchg_niai8()
128 : [old] "+d" (old), [lock] "+Q" (*lock) in arch_try_cmpxchg_niai8()
147 if (lock & _Q_LOCK_CPU_MASK) in arch_spin_yield_target()
148 return lock & _Q_LOCK_CPU_MASK; in arch_spin_yield_target()
169 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
189 if (arch_try_cmpxchg(&lp->lock, &old, new)) in arch_spin_lock_queued()
221 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
263 old = arch_load_niai4(&lp->lock); in arch_spin_lock_classic()
[all …]
/arch/arc/include/asm/
A Dspinlock.h17 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
28 : [slock] "r" (&(lock->slock)), in arch_spin_lock()
44 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
58 : [slock] "r" (&(lock->slock)), in arch_spin_trylock()
67 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
71 WRITE_ONCE(lock->slock, __ARCH_SPIN_LOCK_UNLOCKED__); in arch_spin_unlock()
222 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
237 : "r"(&(lock->slock)), "ir"(__ARCH_SPIN_LOCK_LOCKED__) in arch_spin_lock()
244 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
253 : "r"(&(lock->slock)) in arch_spin_trylock()
[all …]
/arch/s390/include/asm/
A Dspinlock.h52 void arch_spin_relax(arch_spinlock_t *lock);
64 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
66 return lock.lock == 0; in arch_spin_value_unlocked()
71 return READ_ONCE(lp->lock) != 0; in arch_spin_is_locked()
79 return likely(arch_try_cmpxchg(&lp->lock, &old, spinlock_lockval())); in arch_spin_trylock_once()
97 typecheck(int, lp->lock); in arch_spin_unlock()
102 : [lock] "=Q" (((unsigned short *)&lp->lock)[1]) in arch_spin_unlock()
/arch/arm/mach-spear/
A Dpl080.c21 static spinlock_t lock = __SPIN_LOCK_UNLOCKED(x);
33 spin_lock_irqsave(&lock, flags); in pl080_get_signal()
38 spin_unlock_irqrestore(&lock, flags); in pl080_get_signal()
58 spin_unlock_irqrestore(&lock, flags); in pl080_get_signal()
67 spin_lock_irqsave(&lock, flags); in pl080_put_signal()
75 spin_unlock_irqrestore(&lock, flags); in pl080_put_signal()
/arch/sparc/kernel/
A Debus.c76 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq()
79 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq()
135 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
141 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
162 spin_lock_irqsave(&p->lock, flags); in ebus_dma_unregister()
169 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_unregister()
185 spin_lock_irqsave(&p->lock, flags); in ebus_dma_request()
199 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_request()
210 spin_lock_irqsave(&p->lock, flags); in ebus_dma_prepare()
225 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_prepare()
[all …]
/arch/powerpc/platforms/44x/
A Duic.c46 raw_spinlock_t lock; member
60 raw_spin_lock_irqsave(&uic->lock, flags); in uic_unmask_irq()
77 raw_spin_lock_irqsave(&uic->lock, flags); in uic_mask_irq()
90 raw_spin_lock_irqsave(&uic->lock, flags); in uic_ack_irq()
103 raw_spin_lock_irqsave(&uic->lock, flags); in uic_mask_ack_irq()
151 raw_spin_lock_irqsave(&uic->lock, flags); in uic_set_irq_type()
204 raw_spin_lock(&desc->lock); in uic_irq_cascade()
209 raw_spin_unlock(&desc->lock); in uic_irq_cascade()
220 raw_spin_lock(&desc->lock); in uic_irq_cascade()
225 raw_spin_unlock(&desc->lock); in uic_irq_cascade()
[all …]
/arch/powerpc/platforms/52xx/
A Dmpc52xx_gpt.c91 raw_spinlock_t lock; member
142 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_irq_unmask()
152 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_irq_mask()
172 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_irq_set_type()
262 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_irq_setup()
293 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_gpio_set()
307 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_gpio_dir_in()
433 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_do_start()
473 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_stop_timer()
497 raw_spin_lock_irqsave(&gpt->lock, flags); in mpc52xx_gpt_timer_period()
[all …]
/arch/x86/hyperv/
A Dhv_spinlock.c82 pv_ops.lock.queued_spin_lock_slowpath = __pv_queued_spin_lock_slowpath; in hv_init_spinlocks()
83 pv_ops.lock.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock); in hv_init_spinlocks()
84 pv_ops.lock.wait = hv_qlock_wait; in hv_init_spinlocks()
85 pv_ops.lock.kick = hv_qlock_kick; in hv_init_spinlocks()
86 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted); in hv_init_spinlocks()
/arch/x86/kernel/
A Dparavirt-spinlocks.c12 __visible void __native_queued_spin_unlock(struct qspinlock *lock) in __native_queued_spin_unlock() argument
14 native_queued_spin_unlock(lock); in __native_queued_spin_unlock()
20 return pv_ops.lock.queued_spin_unlock.func == in pv_is_native_spin_unlock()
32 return pv_ops.lock.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()
/arch/s390/hypfs/
A Dhypfs_dbfs.c42 if (mutex_lock_interruptible(&df->lock)) in dbfs_read()
47 mutex_unlock(&df->lock); in dbfs_read()
52 mutex_unlock(&df->lock); in dbfs_read()
56 mutex_unlock(&df->lock); in dbfs_read()
68 mutex_lock(&df->lock); in dbfs_ioctl()
73 mutex_unlock(&df->lock); in dbfs_ioctl()
86 mutex_init(&df->lock); in hypfs_dbfs_create_file()

Completed in 39 milliseconds

12345678910>>...13