Home
last modified time | relevance | path

Searched refs:prev (Results 1 – 25 of 169) sorted by relevance

1234567

/arch/powerpc/include/asm/
A Dcmpxchg.h99 return prev; in __xchg_u8_local()
115 return prev; in __xchg_u8_relaxed()
131 return prev; in __xchg_u16_local()
147 return prev; in __xchg_u16_relaxed()
164 return prev; in __xchg_u32_local()
180 return prev; in __xchg_u32_relaxed()
197 return prev; in __xchg_u64_local()
213 return prev; in __xchg_u64_relaxed()
301 return prev; in __cmpxchg_u8()
321 return prev; in __cmpxchg_u8_local()
[all …]
A Dqspinlock.h95 u32 prev; in __queued_spin_trylock_nosteal() local
106 : "=&r" (prev) in __queued_spin_trylock_nosteal()
111 return likely(prev == 0); in __queued_spin_trylock_nosteal()
117 u32 prev, tmp; in __queued_spin_trylock_steal() local
130 : "=&r" (prev), "=&r" (tmp) in __queued_spin_trylock_steal()
135 return likely(!(prev & ~_Q_TAIL_CPU_MASK)); in __queued_spin_trylock_steal()
A Dpgtable-be-types.h84 __be64 prev; in pte_xchg() local
87 prev = (__force __be64)__cmpxchg_u64(p, (__force unsigned long)pte_raw(old), in pte_xchg()
90 return pte_raw(old) == prev; in pte_xchg()
96 __be64 prev; in pmd_xchg() local
98 prev = (__force __be64)__cmpxchg_u64(p, (__force unsigned long)pmd_raw(old), in pmd_xchg()
101 return pmd_raw(old) == prev; in pmd_xchg()
A Dmmu_context.h55 extern void radix__switch_mmu_context(struct mm_struct *prev,
57 static inline void switch_mmu_context(struct mm_struct *prev, in switch_mmu_context() argument
62 return radix__switch_mmu_context(prev, next); in switch_mmu_context()
100 extern void switch_mmu_context(struct mm_struct *prev, struct mm_struct *next,
223 extern void switch_mm_irqs_off(struct mm_struct *prev, struct mm_struct *next,
226 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
232 switch_mm_irqs_off(prev, next, tsk); in switch_mm()
242 static inline void activate_mm(struct mm_struct *prev, struct mm_struct *next) in activate_mm() argument
244 switch_mm_irqs_off(prev, next, current); in activate_mm()
/arch/mips/include/asm/
A Dswitch_to.h52 #define __mips_mt_fpaff_switch_to(prev) \ argument
58 (!(KSTK_STATUS(prev) & ST0_CU1))) { \
60 prev->cpus_mask = prev->thread.user_cpus_allowed; \
66 #define __mips_mt_fpaff_switch_to(prev) do { (void) (prev); } while (0) argument
109 #define switch_to(prev, next, last) \ argument
111 __mips_mt_fpaff_switch_to(prev); \
112 lose_fpu_inatomic(1, prev); \
116 __save_dsp(prev); \
123 if ((KSTK_STATUS(prev) & ST0_CU2)) { \
125 KSTK_STATUS(prev) &= ~ST0_CU2; \
[all …]
/arch/arm64/kvm/hyp/nvhe/
A Dlist_debug.c30 bool __list_add_valid_or_report(struct list_head *new, struct list_head *prev, in __list_add_valid_or_report() argument
33 if (NVHE_CHECK_DATA_CORRUPTION(next->prev != prev) || in __list_add_valid_or_report()
34 NVHE_CHECK_DATA_CORRUPTION(prev->next != next) || in __list_add_valid_or_report()
35 NVHE_CHECK_DATA_CORRUPTION(new == prev || new == next)) in __list_add_valid_or_report()
44 struct list_head *prev, *next; in __list_del_entry_valid_or_report() local
46 prev = entry->prev; in __list_del_entry_valid_or_report()
50 NVHE_CHECK_DATA_CORRUPTION(prev == LIST_POISON2) || in __list_del_entry_valid_or_report()
51 NVHE_CHECK_DATA_CORRUPTION(prev->next != entry) || in __list_del_entry_valid_or_report()
52 NVHE_CHECK_DATA_CORRUPTION(next->prev != entry)) in __list_del_entry_valid_or_report()
/arch/s390/lib/
A Duaccess.c158 unsigned int prev; in __cmpxchg_user_key_small() local
190 [prev] "=&d" (prev), in __cmpxchg_user_key_small()
201 *uval = prev; in __cmpxchg_user_key_small()
219 *uval = prev >> shift; in __cmpxchg_user_key1()
236 *uval = prev >> shift; in __cmpxchg_user_key2()
261 [prev] "+&d" (prev), in __cmpxchg_user_key4()
268 *uval = prev; in __cmpxchg_user_key4()
293 [prev] "+&d" (prev), in __cmpxchg_user_key8()
300 *uval = prev; in __cmpxchg_user_key8()
325 [prev] "+&d" (prev), in __cmpxchg_user_key16()
[all …]
/arch/riscv/include/asm/
A Datomic.h216 int prev, rc; local
220 return prev;
227 s64 prev; in arch_atomic64_fetch_add_unless() local
232 return prev; in arch_atomic64_fetch_add_unless()
254 int prev, rc; in arch_atomic_inc_unless_negative() local
280 int prev, rc; in arch_atomic_dec_unless_positive() local
310 return prev - 1; in arch_atomic_dec_if_positive()
318 s64 prev; in arch_atomic64_inc_unless_negative() local
330 s64 prev; in arch_atomic64_dec_unless_positive() local
342 s64 prev; in arch_atomic64_dec_if_positive() local
[all …]
/arch/csky/include/asm/
A Dswitch_to.h9 static inline void __switch_to_fpu(struct task_struct *prev, in __switch_to_fpu() argument
12 save_to_user_fp(&prev->thread.user_fp); in __switch_to_fpu()
16 static inline void __switch_to_fpu(struct task_struct *prev, in __switch_to_fpu() argument
27 #define switch_to(prev, next, last) \ argument
29 struct task_struct *__prev = (prev); \
32 ((last) = __switch_to((prev), (next))); \
/arch/alpha/include/asm/
A Dcmpxchg.h119 unsigned long prev, tmp, cmp, addr64; in ____cmpxchg_u8() local
136 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) in ____cmpxchg_u8()
139 return prev; in ____cmpxchg_u8()
145 unsigned long prev, tmp, cmp, addr64; in ____cmpxchg_u16() local
165 return prev; in ____cmpxchg_u16()
171 unsigned long prev, cmp; in ____cmpxchg_u32() local
184 : "=&r"(prev), "=&r"(cmp), "=m"(*m) in ____cmpxchg_u32()
187 return prev; in ____cmpxchg_u32()
193 unsigned long prev, cmp; in ____cmpxchg_u64() local
206 : "=&r"(prev), "=&r"(cmp), "=m"(*m) in ____cmpxchg_u64()
[all …]
/arch/loongarch/include/asm/
A Dswitch_to.h25 extern asmlinkage struct task_struct *__switch_to(struct task_struct *prev,
35 #define switch_to(prev, next, last) \ argument
37 lose_fpu_inatomic(1, prev); \
38 lose_lbt_inatomic(1, prev); \
40 (last) = __switch_to(prev, next, task_thread_info(next), \
/arch/powerpc/platforms/cell/spufs/
A Dswitch.c1797 save_mfc_cntl(prev, spu); /* Step 8. */ in quiece_spu()
1831 setup_mfc_sr1(prev, spu); /* Step 30. */ in save_csa()
1832 save_spu_npc(prev, spu); /* Step 31. */ in save_csa()
1835 save_spu_lslr(prev, spu); /* Step 34. */ in save_csa()
1837 save_spu_cfg(prev, spu); /* Step 36. */ in save_csa()
1838 save_pm_trace(prev, spu); /* Step 37. */ in save_csa()
1839 save_mfc_rag(prev, spu); /* Step 38. */ in save_csa()
1861 save_ls_16kb(prev, spu); /* Step 50. */ in save_lscsa()
1863 set_signot1(prev, spu); /* Step 52. */ in save_lscsa()
1864 set_signot2(prev, spu); /* Step 53. */ in save_lscsa()
[all …]
/arch/parisc/include/asm/
A Dmmu_context.h50 static inline void switch_mm_irqs_off(struct mm_struct *prev, in switch_mm_irqs_off() argument
53 if (prev != next) { in switch_mm_irqs_off()
65 static inline void switch_mm(struct mm_struct *prev, in switch_mm() argument
70 if (prev == next) in switch_mm()
74 switch_mm_irqs_off(prev, next, tsk); in switch_mm()
80 static inline void activate_mm(struct mm_struct *prev, struct mm_struct *next) in activate_mm() argument
95 switch_mm(prev,next,current); in activate_mm()
/arch/sh/include/asm/
A Dswitch_to_32.h64 struct task_struct *__switch_to(struct task_struct *prev,
70 #define switch_to(prev, next, last) \ argument
80 if (is_dsp_enabled(prev)) \
81 __save_dsp(prev); \
85 __ts1 = (u32 *)&prev->thread.sp; \
86 __ts2 = (u32 *)&prev->thread.pc; \
87 __ts4 = (u32 *)prev; \
A Dfutex-irq.h11 u32 prev = 0; in atomic_futex_op_cmpxchg_inatomic() local
15 ret = get_user(prev, uaddr); in atomic_futex_op_cmpxchg_inatomic()
16 if (!ret && oldval == prev) in atomic_futex_op_cmpxchg_inatomic()
21 *uval = prev; in atomic_futex_op_cmpxchg_inatomic()
/arch/hexagon/include/asm/
A Dmmu_context.h29 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
38 if (next->context.generation < prev->context.generation) { in switch_mm()
42 next->context.generation = prev->context.generation; in switch_mm()
52 static inline void activate_mm(struct mm_struct *prev, struct mm_struct *next) in activate_mm() argument
57 switch_mm(prev, next, current_thread_info()->task); in activate_mm()
/arch/arc/include/asm/
A Dswitch_to.h17 #define switch_to(prev, next, last) \ argument
19 dsp_save_restore(prev, next); \
20 fpu_save_restore(prev, next); \
21 last = __switch_to(prev, next);\
/arch/powerpc/perf/
A D8xx-pmu.c122 s64 prev, val = 0, delta = 0; in mpc8xx_pmu_read() local
128 prev = local64_read(&event->hw.prev_count); in mpc8xx_pmu_read()
132 delta = 16 * (val - prev); in mpc8xx_pmu_read()
136 delta = prev - val; in mpc8xx_pmu_read()
142 delta = (s64)((s32)val - (s32)prev); in mpc8xx_pmu_read()
146 delta = (s64)((s32)val - (s32)prev); in mpc8xx_pmu_read()
149 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in mpc8xx_pmu_read()
/arch/s390/include/asm/
A Dmmu_context.h71 static inline void switch_mm_irqs_off(struct mm_struct *prev, struct mm_struct *next, in switch_mm_irqs_off() argument
84 if (prev != next) in switch_mm_irqs_off()
85 cpumask_clear_cpu(cpu, &prev->context.cpu_attach_mask); in switch_mm_irqs_off()
89 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
95 switch_mm_irqs_off(prev, next, tsk); in switch_mm()
124 static inline void activate_mm(struct mm_struct *prev, in activate_mm() argument
127 switch_mm_irqs_off(prev, next, current); in activate_mm()
/arch/x86/include/asm/
A Dswitch_to.h9 struct task_struct *__switch_to_asm(struct task_struct *prev,
12 __visible struct task_struct *__switch_to(struct task_struct *prev,
16 __visible void ret_from_fork(struct task_struct *prev, struct pt_regs *regs,
49 #define switch_to(prev, next, last) \ argument
51 ((last) = __switch_to_asm((prev), (next))); \
/arch/powerpc/mm/
A Dmmu_context.c43 void switch_mm_irqs_off(struct mm_struct *prev, struct mm_struct *next, in switch_mm_irqs_off() argument
87 if (prev == next) in switch_mm_irqs_off()
98 membarrier_arch_switch_mm(prev, next, tsk); in switch_mm_irqs_off()
104 switch_mmu_context(prev, next, tsk); in switch_mm_irqs_off()
106 VM_WARN_ON_ONCE(!cpumask_test_cpu(cpu, mm_cpumask(prev))); in switch_mm_irqs_off()
/arch/s390/kernel/
A Dprocess.c52 void __ret_from_fork(struct task_struct *prev, struct pt_regs *regs) in __ret_from_fork() argument
56 schedule_tail(prev); in __ret_from_fork()
191 struct task_struct *__switch_to(struct task_struct *prev, struct task_struct *next) in __switch_to() argument
194 save_kernel_fpu_regs(&prev->thread); in __switch_to()
195 save_access_regs(&prev->thread.acrs[0]); in __switch_to()
196 save_ri_cb(prev->thread.ri_cb); in __switch_to()
197 save_gs_cb(prev->thread.gs_cb); in __switch_to()
201 restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb); in __switch_to()
203 return __switch_to_asm(prev, next); in __switch_to()
/arch/powerpc/boot/
A Dops.h46 void *(*find_node_by_prop_value)(const void *prev,
49 void *(*find_node_by_compatible)(const void *prev,
144 static inline void *find_node_by_prop_value(const void *prev, in find_node_by_prop_value() argument
149 return dt_ops.find_node_by_prop_value(prev, propname, in find_node_by_prop_value()
155 static inline void *find_node_by_prop_value_str(const void *prev, in find_node_by_prop_value_str() argument
159 return find_node_by_prop_value(prev, propname, propval, in find_node_by_prop_value_str()
163 static inline void *find_node_by_devtype(const void *prev, in find_node_by_devtype() argument
166 return find_node_by_prop_value_str(prev, "device_type", type); in find_node_by_devtype()
182 static inline void *find_node_by_compatible(const void *prev, in find_node_by_compatible() argument
186 return dt_ops.find_node_by_compatible(prev, compat); in find_node_by_compatible()
/arch/arc/kernel/
A Dfpu.c32 void fpu_save_restore(struct task_struct *prev, struct task_struct *next) in fpu_save_restore() argument
34 unsigned int *saveto = &prev->thread.fpu.aux_dpfp[0].l; in fpu_save_restore()
69 void fpu_save_restore(struct task_struct *prev, struct task_struct *next) in fpu_save_restore() argument
71 struct arc_fpu *save = &prev->thread.fpu; in fpu_save_restore()
/arch/microblaze/include/asm/
A Dswitch_to.h12 extern struct task_struct *_switch_to(struct thread_info *prev,
15 #define switch_to(prev, next, last) \ argument
17 (last) = _switch_to(task_thread_info(prev), \

Completed in 37 milliseconds

1234567