| /arch/arm64/include/asm/vdso/ |
| A D | compat_barrier.h | 17 #ifdef dmb 18 #undef dmb 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 23 #define aarch32_smp_mb() dmb(ish) 24 #define aarch32_smp_rmb() dmb(ishld) 25 #define aarch32_smp_wmb() dmb(ishst)
|
| /arch/arm/include/asm/ |
| A D | barrier.h | 21 #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") macro 33 #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ macro 40 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 45 #define dmb(x) __asm__ __volatile__ ("" : : : "memory") macro 67 #define dma_rmb() dmb(osh) 68 #define dma_wmb() dmb(oshst) 77 #define __smp_mb() dmb(ish) 79 #define __smp_wmb() dmb(ishst)
|
| A D | assembler.h | 381 ALT_SMP(dmb ish) 383 ALT_SMP(W(dmb) ish) 386 ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb 404 dmb ish 406 W(dmb) ish 409 mcr p15, 0, r0, c7, c10, 5 @ dmb
|
| /arch/arm64/include/asm/ |
| A D | barrier.h | 28 #define dmb(opt) asm volatile("dmb " #opt : : : "memory") macro 67 #define __dma_mb() dmb(osh) 68 #define __dma_rmb() dmb(oshld) 69 #define __dma_wmb() dmb(oshst) 126 #define __smp_mb() dmb(ish) 127 #define __smp_rmb() dmb(ishld) 128 #define __smp_wmb() dmb(ishst)
|
| A D | atomic_ll_sc.h | 86 ATOMIC_OP_RETURN( , dmb ish, , l, "memory", __VA_ARGS__)\ 90 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ 101 ATOMIC_FETCH_OP ( , dmb ish, , l, "memory", __VA_ARGS__)\ in ATOMIC_OPS() 182 ATOMIC64_OP_RETURN(, dmb ish, , l, "memory", __VA_ARGS__) \ 186 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 197 ATOMIC64_FETCH_OP (, dmb ish, , l, "memory", __VA_ARGS__) \ 290 __CMPXCHG_CASE(w, b, mb_, 8, dmb ish, , l, "memory", K) 291 __CMPXCHG_CASE(w, h, mb_, 16, dmb ish, , l, "memory", K) 292 __CMPXCHG_CASE(w, , mb_, 32, dmb ish, , l, "memory", K) 293 __CMPXCHG_CASE( , , mb_, 64, dmb ish, , l, "memory", L) [all …]
|
| A D | cmpxchg.h | 57 __XCHG_CASE(w, b, mb_, 8, dmb ish, nop, , a, l, "memory") 58 __XCHG_CASE(w, h, mb_, 16, dmb ish, nop, , a, l, "memory") 59 __XCHG_CASE(w, , mb_, 32, dmb ish, nop, , a, l, "memory") 60 __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
|
| A D | insn.h | 430 __AARCH64_INSN_FUNCS(dmb, 0xFFFFF0FF, 0xD50330BF)
|
| /arch/arm/common/ |
| A D | vlock.S | 31 dmb 35 dmb 82 dmb 95 dmb
|
| A D | mcpm_head.S | 123 dmb 138 dmb 150 dmb 154 dmb 175 dmb 184 dmb 198 dmb
|
| A D | mcpm_entry.c | 49 dmb(); in __mcpm_cpu_down() 65 dmb(); in __mcpm_outbound_leave_critical()
|
| /arch/arm/mm/ |
| A D | cache-b15-rac.c | 67 dmb(); in __b15_rac_disable() 81 dmb(); in __b15_rac_flush()
|
| A D | cache-v7.S | 100 dmb @ ensure ordering with previous memory accesses 128 dmb @ ensure ordering with previous memory accesses
|
| A D | cache-v7m.S | 176 dmb @ ensure ordering with previous memory accesses
|
| /arch/arm/mach-omap2/ |
| A D | sleep34xx.S | 97 dmb @ data memory barrier 213 dmb 418 dmb @ data memory barrier 429 dmb @ data memory barrier 444 dmb @ data memory barrier
|
| A D | omap-smc.S | 52 dmb
|
| A D | sleep33xx.S | 133 dmb
|
| A D | sleep44xx.S | 350 dmb
|
| A D | sleep43xx.S | 263 dmb
|
| /arch/arm64/mm/ |
| A D | flush.c | 92 dmb(osh); in arch_wb_cache_pmem()
|
| /arch/arm64/kernel/ |
| A D | head.S | 102 dmb sy 178 dmb sy // needed before dc ivac with 441 dmb sy
|
| A D | armv8_deprecated.c | 276 dmb(sy); in cp15barrier_handler()
|
| /arch/arm/mach-socfpga/ |
| A D | self-refresh.S | 85 dmb
|
| /arch/arm/mach-tegra/ |
| A D | sleep.S | 37 dmb @ ensure ordering
|
| /arch/arm/kernel/ |
| A D | smp_tlb.c | 153 dmb(); in ipi_flush_tlb_a15_erratum()
|
| /arch/arm/mach-bcm/ |
| A D | platsmp-brcmstb.c | 67 dmb(); in per_cpu_sw_state_wr()
|