| /arch/x86/include/asm/ |
| A D | atomic.h | 33 asm_inline volatile(LOCK_PREFIX "addl %1, %0" in arch_atomic_add() 40 asm_inline volatile(LOCK_PREFIX "subl %1, %0" in arch_atomic_sub() 47 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); in arch_atomic_sub_and_test() 53 asm_inline volatile(LOCK_PREFIX "incl %0" in arch_atomic_inc() 60 asm_inline volatile(LOCK_PREFIX "decl %0" in arch_atomic_dec() 67 return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e); in arch_atomic_dec_and_test() 73 return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e); in arch_atomic_inc_and_test() 79 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); in arch_atomic_add_negative() 119 asm_inline volatile(LOCK_PREFIX "andl %1, %0" in arch_atomic_and() 137 asm_inline volatile(LOCK_PREFIX "orl %1, %0" in arch_atomic_or() [all …]
|
| A D | atomic64_64.h | 25 asm_inline volatile(LOCK_PREFIX "addq %1, %0" in arch_atomic64_add() 32 asm_inline volatile(LOCK_PREFIX "subq %1, %0" in arch_atomic64_sub() 39 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); in arch_atomic64_sub_and_test() 45 asm_inline volatile(LOCK_PREFIX "incq %0" in arch_atomic64_inc() 53 asm_inline volatile(LOCK_PREFIX "decq %0" in arch_atomic64_dec() 61 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e); in arch_atomic64_dec_and_test() 67 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e); in arch_atomic64_inc_and_test() 73 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i); in arch_atomic64_add_negative() 113 asm_inline volatile(LOCK_PREFIX "andq %1, %0" in arch_atomic64_and() 131 asm_inline volatile(LOCK_PREFIX "orq %1, %0" in arch_atomic64_or() [all …]
|
| A D | bitops.h | 55 asm_inline volatile(LOCK_PREFIX "orb %b1,%0" in arch_set_bit() 60 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" in arch_set_bit() 75 asm_inline volatile(LOCK_PREFIX "andb %b1,%0" in arch_clear_bit() 79 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" in arch_clear_bit() 101 asm_inline volatile(LOCK_PREFIX "xorb %2,%1" in arch_xor_unlock_is_negative_byte() 125 asm_inline volatile(LOCK_PREFIX "xorb %b1,%0" in arch_change_bit() 129 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" in arch_change_bit() 137 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr); in arch_test_and_set_bit() 161 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr); in arch_test_and_clear_bit() 200 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr); in arch_test_and_change_bit()
|
| A D | futex.h | 35 "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ 61 unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, in arch_futex_atomic_op_inuser() 94 "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" in futex_atomic_cmpxchg_inatomic()
|
| A D | alternative.h | 52 #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock " macro 56 #define LOCK_PREFIX "" macro 321 .macro LOCK_PREFIX 329 .macro LOCK_PREFIX
|
| A D | cmpxchg_64.h | 52 return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX); in arch_cmpxchg128() 84 return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX); in arch_try_cmpxchg128()
|
| A D | cmpxchg.h | 134 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) 222 __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX) 246 #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
|
| A D | qspinlock_paravirt.h | 45 LOCK_PREFIX "cmpxchg %dl,(%rdi)\n\t" \
|
| A D | cmpxchg_32.h | 34 return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX); in __cmpxchg64() 64 return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX); in __try_cmpxchg64()
|
| A D | qspinlock.h | 23 val = GEN_BINARY_RMWcc(LOCK_PREFIX "btsl", lock->val.counter, c, in queued_fetch_set_pending_acquire()
|
| A D | uaccess.h | 379 "1: " LOCK_PREFIX "cmpxchg"itype" %[new], %[ptr]\n"\ 398 "1: " LOCK_PREFIX "cmpxchg8b %[ptr]\n" \ 419 "1: " LOCK_PREFIX "cmpxchg"itype" %[new], %[ptr]\n"\ 450 "1: " LOCK_PREFIX "cmpxchg8b %[ptr]\n" \
|
| /arch/x86/lib/ |
| A D | atomic64_cx8_32.S | 15 LOCK_PREFIX 41 LOCK_PREFIX 65 LOCK_PREFIX 93 LOCK_PREFIX 118 LOCK_PREFIX 148 LOCK_PREFIX 177 LOCK_PREFIX
|
| /arch/x86/kvm/ |
| A D | xen.c | 667 asm volatile(LOCK_PREFIX "orq %0, %1\n" in kvm_xen_inject_pending_events() 669 LOCK_PREFIX "andq %0, %2\n" in kvm_xen_inject_pending_events() 679 asm volatile(LOCK_PREFIX "orl %0, %1\n" in kvm_xen_inject_pending_events() 681 LOCK_PREFIX "andl %0, %2\n" in kvm_xen_inject_pending_events()
|