/linux-6.3-rc2/arch/arc/include/asm/ |
A D | spinlock.h | 40 smp_mb(); in arch_spin_lock() 62 smp_mb(); in arch_spin_trylock() 69 smp_mb(); in arch_spin_unlock() 105 smp_mb(); in arch_read_lock() 129 smp_mb(); in arch_read_trylock() 163 smp_mb(); in arch_write_lock() 188 smp_mb(); in arch_write_trylock() 197 smp_mb(); in arch_read_unlock() 215 smp_mb(); in arch_write_unlock() 231 smp_mb(); in arch_spin_lock() [all …]
|
A D | futex.h | 20 smp_mb(); \ 42 smp_mb() \ 48 smp_mb(); \ 69 smp_mb() \ 133 smp_mb(); in futex_atomic_cmpxchg_inatomic() 160 smp_mb(); in futex_atomic_cmpxchg_inatomic()
|
A D | atomic64-arcv2.h | 145 smp_mb(); in ATOMIC64_OPS() 158 smp_mb(); in ATOMIC64_OPS() 167 smp_mb(); in arch_atomic64_xchg() 178 smp_mb(); in arch_atomic64_xchg() 195 smp_mb(); in arch_atomic64_dec_if_positive() 209 smp_mb(); in arch_atomic64_dec_if_positive() 228 smp_mb(); in arch_atomic64_fetch_add_unless() 244 smp_mb(); in arch_atomic64_fetch_add_unless()
|
/linux-6.3-rc2/arch/arm/include/asm/ |
A D | spinlock.h | 78 smp_mb(); in arch_spin_lock() 100 smp_mb(); in arch_spin_trylock() 109 smp_mb(); in arch_spin_unlock() 155 smp_mb(); in arch_write_lock() 175 smp_mb(); in arch_write_trylock() 184 smp_mb(); in arch_write_unlock() 224 smp_mb(); in arch_read_lock() 231 smp_mb(); in arch_read_unlock() 266 smp_mb(); in arch_read_trylock()
|
A D | futex.h | 28 smp_mb(); \ 56 smp_mb(); in futex_atomic_cmpxchg_inatomic() 73 smp_mb(); in futex_atomic_cmpxchg_inatomic()
|
A D | atomic.h | 133 smp_mb(); in arch_atomic_fetch_add_unless() 150 smp_mb(); in arch_atomic_fetch_add_unless() 452 smp_mb(); in arch_atomic64_dec_if_positive() 469 smp_mb(); in arch_atomic64_dec_if_positive() 480 smp_mb(); in arch_atomic64_fetch_add_unless() 499 smp_mb(); in arch_atomic64_fetch_add_unless()
|
/linux-6.3-rc2/arch/alpha/include/asm/ |
A D | atomic.h | 72 smp_mb(); \ 90 smp_mb(); \ 126 smp_mb(); \ 145 smp_mb(); \ 225 smp_mb(); in ATOMIC_OPS() 240 smp_mb(); in ATOMIC_OPS() 257 smp_mb(); in arch_atomic64_fetch_add_unless() 272 smp_mb(); in arch_atomic64_fetch_add_unless() 287 smp_mb(); in arch_atomic64_dec_if_positive() 301 smp_mb(); in arch_atomic64_dec_if_positive()
|
A D | cmpxchg.h | 49 smp_mb(); \ 52 smp_mb(); \ 61 smp_mb(); \ 64 smp_mb(); \
|
/linux-6.3-rc2/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/ |
A D | barriers.h | 8 #define smp_mb() __sync_synchronize() macro 15 #define smp_mb() __CPROVER_fence("WWfence", "RRfence", "RWfence", "WRfence", \ macro 27 #define sync_smp_mb() smp_mb() 33 #define rs_smp_mb() smp_mb()
|
/linux-6.3-rc2/tools/include/asm/ |
A D | barrier.h | 44 #ifndef smp_mb 45 # define smp_mb() mb() macro 51 smp_mb(); \ 60 smp_mb(); \
|
/linux-6.3-rc2/kernel/sched/ |
A D | membarrier.c | 167 smp_mb(); /* IPIs should be serializing but paranoid. */ in ipi_mb() 182 smp_mb(); /* IPIs should be serializing but paranoid. */ in ipi_sync_core() 196 smp_mb(); in ipi_rseq() 214 smp_mb(); in ipi_sync_rq_state() 224 smp_mb(); in membarrier_exec_mmap() 257 smp_mb(); /* system call entry is not a mb. */ in membarrier_global_expedited() 306 smp_mb(); /* exit from system call is not a mb */ in membarrier_global_expedited() 345 smp_mb(); /* system call entry is not a mb. */ in membarrier_private_expedited() 421 smp_mb(); /* exit from system call is not a mb */ in membarrier_private_expedited() 442 smp_mb(); in sync_runqueues_membarrier_state()
|
/linux-6.3-rc2/tools/memory-model/litmus-tests/ |
A D | IRIW+fencembonceonces+OnceOnce.litmus | 6 * Test of independent reads from independent writes with smp_mb() 7 * between each pairs of reads. In other words, is smp_mb() sufficient to 26 smp_mb(); 41 smp_mb();
|
A D | R+fencembonceonces.litmus | 6 * This is the fully ordered (via smp_mb()) version of one of the classic 17 smp_mb(); 26 smp_mb();
|
A D | SB+fencembonceonces.litmus | 19 smp_mb(); 28 smp_mb();
|
A D | README | 24 Test of independent reads from independent writes with smp_mb() 25 between each pairs of reads. In other words, is smp_mb() 41 separated by smp_mb(). This addition of an external process to 53 Does a control dependency and an smp_mb() suffice for the 109 This is the fully ordered (via smp_mb()) version of one of 114 As above, but without the smp_mb() invocations. 117 This is the fully ordered (again, via smp_mb() version of store 122 As above, but without the smp_mb() invocations.
|
/linux-6.3-rc2/arch/sh/kernel/ |
A D | ftrace.c | 137 smp_mb(); in arch_ftrace_nmi_enter() 143 smp_mb(); in arch_ftrace_nmi_exit() 174 smp_mb(); in do_ftrace_mod_code() 179 smp_mb(); in do_ftrace_mod_code() 184 smp_mb(); in do_ftrace_mod_code()
|
/linux-6.3-rc2/include/asm-generic/ |
A D | barrier.h | 98 #ifndef smp_mb 99 #define smp_mb() do { kcsan_mb(); __smp_mb(); } while (0) macro 112 #ifndef smp_mb 113 #define smp_mb() barrier() macro
|
/linux-6.3-rc2/tools/virtio/ringtest/ |
A D | ring.c | 183 smp_mb(); in enable_call() 193 smp_mb(); in kick_available() 215 smp_mb(); in enable_kick() 260 smp_mb(); in call_used()
|
A D | main.h | 114 #define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc") macro 120 #define smp_mb() __sync_synchronize() macro 177 smp_mb(); /* Enforce dependency ordering from x */ \
|
A D | virtio_ring_0_9.c | 222 smp_mb(); in enable_call() 232 smp_mb(); in kick_available() 254 smp_mb(); in enable_kick() 325 smp_mb(); in call_used()
|
/linux-6.3-rc2/arch/arm64/include/asm/vdso/ |
A D | compat_barrier.h | 27 #undef smp_mb 31 #define smp_mb() aarch32_smp_mb() macro
|
/linux-6.3-rc2/drivers/comedi/drivers/ |
A D | dyna_pci10xx.c | 80 smp_mb(); in dyna_pci10xx_insn_read_ai() 111 smp_mb(); in dyna_pci10xx_insn_write_ao() 130 smp_mb(); in dyna_pci10xx_di_insn_bits() 150 smp_mb(); in dyna_pci10xx_do_insn_bits()
|
/linux-6.3-rc2/arch/hexagon/include/asm/ |
A D | spinlock.h | 111 smp_mb(); in arch_write_unlock() 132 smp_mb(); in arch_spin_unlock()
|
/linux-6.3-rc2/arch/riscv/kernel/ |
A D | cpu_ops_sbi.c | 73 smp_mb(); in sbi_cpu_start() 77 smp_mb(); in sbi_cpu_start()
|
/linux-6.3-rc2/tools/memory-model/Documentation/ |
A D | recipes.txt | 149 smp_mb(); 183 smp_mb(); 337 * smp_wmb() (B) smp_mb() (D) 346 smp_wmb() would also work with smp_mb() replacing either or both of the 374 smp_mb(); 392 * smp_wmb() (B) smp_mb() (D) 457 smp_mb(); 483 smp_mb(); 490 smp_mb(); 494 Omitting either smp_mb() will allow both r0 and r1 to have final [all …]
|