Home
last modified time | relevance | path

Searched refs:__always_inline (Results 1 – 25 of 767) sorted by relevance

12345678910>>...31

/linux/include/linux/atomic/
A Datomic-instrumented.h29 static __always_inline int
46 static __always_inline int
64 static __always_inline void
82 static __always_inline void
101 static __always_inline void
119 static __always_inline int
138 static __always_inline int
156 static __always_inline int
175 static __always_inline int
193 static __always_inline int
[all …]
A Datomic-long.h34 static __always_inline long
54 static __always_inline long
75 static __always_inline void
96 static __always_inline void
117 static __always_inline void
138 static __always_inline long
159 static __always_inline long
180 static __always_inline long
201 static __always_inline long
222 static __always_inline long
[all …]
A Datomic-arch-fallback.h454 static __always_inline int
470 static __always_inline int
560 static __always_inline int
587 static __always_inline int
614 static __always_inline int
640 static __always_inline int
663 static __always_inline int
690 static __always_inline int
717 static __always_inline int
743 static __always_inline int
[all …]
/linux/arch/powerpc/include/asm/
A Dkup.h12 static __always_inline bool kuap_is_disabled(void);
51 static __always_inline bool kuap_is_disabled(void)
58 static __always_inline bool kuap_is_disabled(void) { return true; }
60 static __always_inline bool
83 static __always_inline bool
92 static __always_inline void kuap_lock(void)
102 static __always_inline void kuap_save_and_lock(struct pt_regs *regs)
129 static __always_inline void kuap_assert_locked(void)
169 static __always_inline void prevent_current_access_user(void)
174 static __always_inline void prevent_current_read_from_user(void)
[all …]
/linux/include/linux/
A Dcpumask.h175 static __always_inline
189 static __always_inline
216 static __always_inline
232 static __always_inline
243 static __always_inline
249 static __always_inline
256 static __always_inline
276 static __always_inline
298 static __always_inline
401 static __always_inline
[all …]
A Dcontext_tracking_state.h52 static __always_inline int __ct_state(void) in __ct_state()
59 static __always_inline int ct_rcu_watching(void) in ct_rcu_watching()
64 static __always_inline int ct_rcu_watching_cpu(int cpu) in ct_rcu_watching_cpu()
71 static __always_inline int ct_rcu_watching_cpu_acquire(int cpu) in ct_rcu_watching_cpu_acquire()
78 static __always_inline long ct_nesting(void) in ct_nesting()
83 static __always_inline long ct_nesting_cpu(int cpu) in ct_nesting_cpu()
90 static __always_inline long ct_nmi_nesting(void) in ct_nmi_nesting()
95 static __always_inline long ct_nmi_nesting_cpu(int cpu) in ct_nmi_nesting_cpu()
106 static __always_inline bool context_tracking_enabled(void) in context_tracking_enabled()
111 static __always_inline bool context_tracking_enabled_cpu(int cpu) in context_tracking_enabled_cpu()
[all …]
A Dnodemask.h196 static __always_inline void __nodes_complement(nodemask_t *dstp, in __nodes_complement()
204 static __always_inline bool __nodes_equal(const nodemask_t *src1p, in __nodes_equal()
246 static __always_inline void __nodes_shift_right(nodemask_t *dstp, in __nodes_shift_right()
254 static __always_inline void __nodes_shift_left(nodemask_t *dstp, in __nodes_shift_left()
355 static __always_inline int __node_remap(int oldbit, in __node_remap()
436 static __always_inline int num_node_state(enum node_states state) in num_node_state()
446 static __always_inline unsigned int next_online_node(int nid) in next_online_node()
450 static __always_inline unsigned int next_memory_node(int nid) in next_memory_node()
458 static __always_inline void node_set_online(int nid) in node_set_online()
464 static __always_inline void node_set_offline(int nid) in node_set_offline()
[all …]
A Drwlock_rt.h35 static __always_inline void read_lock(rwlock_t *rwlock) in read_lock()
40 static __always_inline void read_lock_bh(rwlock_t *rwlock) in read_lock_bh()
46 static __always_inline void read_lock_irq(rwlock_t *rwlock) in read_lock_irq()
60 static __always_inline void read_unlock(rwlock_t *rwlock) in read_unlock()
65 static __always_inline void read_unlock_bh(rwlock_t *rwlock) in read_unlock_bh()
71 static __always_inline void read_unlock_irq(rwlock_t *rwlock) in read_unlock_irq()
82 static __always_inline void write_lock(rwlock_t *rwlock) in write_lock()
96 static __always_inline void write_lock_bh(rwlock_t *rwlock) in write_lock_bh()
102 static __always_inline void write_lock_irq(rwlock_t *rwlock) in write_lock_irq()
126 static __always_inline void write_unlock(rwlock_t *rwlock) in write_unlock()
[all …]
A Dcontext_tracking.h39 static __always_inline void user_enter_irqoff(void) in user_enter_irqoff()
45 static __always_inline void user_exit_irqoff(void) in user_exit_irqoff()
75 static __always_inline bool context_tracking_guest_enter(void) in context_tracking_guest_enter()
83 static __always_inline bool context_tracking_guest_exit(void) in context_tracking_guest_exit()
103 static __always_inline bool context_tracking_guest_exit(void) { return false; } in context_tracking_guest_exit()
126 static __always_inline bool rcu_is_watching_curr_cpu(void) in rcu_is_watching_curr_cpu()
135 static __always_inline unsigned long ct_state_inc(int incby) in ct_state_inc()
140 static __always_inline bool warn_rcu_enter(void) in warn_rcu_enter()
157 static __always_inline void warn_rcu_exit(bool rcu) in warn_rcu_exit()
168 static __always_inline bool warn_rcu_enter(void) { return false; } in warn_rcu_enter()
[all …]
A Dkdev_t.h24 static __always_inline bool old_valid_dev(dev_t dev) in old_valid_dev()
29 static __always_inline u16 old_encode_dev(dev_t dev) in old_encode_dev()
34 static __always_inline dev_t old_decode_dev(u16 val) in old_decode_dev()
39 static __always_inline u32 new_encode_dev(dev_t dev) in new_encode_dev()
46 static __always_inline dev_t new_decode_dev(u32 dev) in new_decode_dev()
53 static __always_inline u64 huge_encode_dev(dev_t dev) in huge_encode_dev()
58 static __always_inline dev_t huge_decode_dev(u64 dev) in huge_decode_dev()
63 static __always_inline int sysv_valid_dev(dev_t dev) in sysv_valid_dev()
68 static __always_inline u32 sysv_encode_dev(dev_t dev) in sysv_encode_dev()
73 static __always_inline unsigned sysv_major(u32 dev) in sysv_major()
[all …]
/linux/arch/x86/include/asm/
A Dirqflags.h17 extern __always_inline unsigned long native_save_fl(void) in native_save_fl()
35 static __always_inline void native_irq_disable(void) in native_irq_disable()
40 static __always_inline void native_irq_enable(void) in native_irq_enable()
45 static __always_inline void native_safe_halt(void) in native_safe_halt()
51 static __always_inline void native_halt(void) in native_halt()
90 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable()
95 static __always_inline void arch_local_irq_enable(void) in arch_local_irq_enable()
104 static __always_inline void arch_safe_halt(void) in arch_safe_halt()
113 static __always_inline void halt(void) in halt()
121 static __always_inline unsigned long arch_local_irq_save(void) in arch_local_irq_save()
[all …]
A Dbitops.h51 static __always_inline void
65 static __always_inline void
71 static __always_inline void
84 static __always_inline void
91 static __always_inline void
109 static __always_inline void
115 static __always_inline void
121 static __always_inline void
134 static __always_inline bool
140 static __always_inline bool
[all …]
A Datomic.h17 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read()
26 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set()
31 static __always_inline void arch_atomic_add(int i, atomic_t *v) in arch_atomic_add()
38 static __always_inline void arch_atomic_sub(int i, atomic_t *v) in arch_atomic_sub()
51 static __always_inline void arch_atomic_inc(atomic_t *v) in arch_atomic_inc()
58 static __always_inline void arch_atomic_dec(atomic_t *v) in arch_atomic_dec()
65 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) in arch_atomic_dec_and_test()
71 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) in arch_atomic_inc_and_test()
117 static __always_inline void arch_atomic_and(int i, atomic_t *v) in arch_atomic_and()
135 static __always_inline void arch_atomic_or(int i, atomic_t *v) in arch_atomic_or()
[all …]
A Datomic64_64.h13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
30 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub()
43 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc()
51 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
59 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test()
65 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test()
111 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v) in arch_atomic64_and()
129 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v) in arch_atomic64_or()
[all …]
A Datomic64_32.h102 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg()
114 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
123 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
148 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return()
157 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return()
166 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
173 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub()
180 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc()
187 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec()
226 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v) in arch_atomic64_and()
[all …]
/linux/arch/s390/include/asm/
A Dpreempt.h16 static __always_inline int preempt_count(void) in preempt_count()
21 static __always_inline void preempt_count_set(int pc) in preempt_count_set()
33 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched()
38 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched()
43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched()
48 static __always_inline void __preempt_count_add(int val) in __preempt_count_add()
63 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub()
83 static __always_inline int preempt_count(void) in preempt_count()
88 static __always_inline void preempt_count_set(int pc) in preempt_count_set()
106 static __always_inline void __preempt_count_add(int val) in __preempt_count_add()
[all …]
A Dfpu-insn.h39 static __always_inline void fpu_cefbr(u8 f1, s32 val) in fpu_cefbr()
58 static __always_inline void fpu_debr(u8 f1, u8 f2) in fpu_debr()
75 static __always_inline void fpu_ldgr(u8 f1, u32 val) in fpu_ldgr()
83 static __always_inline void fpu_lfpc(unsigned int *fpc) in fpu_lfpc()
129 static __always_inline void fpu_sfpc(unsigned int fpc) in fpu_sfpc()
137 static __always_inline void fpu_stfpc(unsigned int *fpc) in fpu_stfpc()
230 static __always_inline u64 fpu_vlgvf(u8 v, u16 index) in fpu_vlgvf()
317 static __always_inline void fpu_vlr(u8 v1, u8 v2) in fpu_vlr()
349 static __always_inline void fpu_vrepib(u8 v1, s16 i2) in fpu_vrepib()
461 static __always_inline void fpu_vupllf(u8 v1, u8 v2) in fpu_vupllf()
[all …]
A Datomic.h18 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read()
24 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set()
30 static __always_inline int arch_atomic_add_return(int i, atomic_t *v) in arch_atomic_add_return()
36 static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v) in arch_atomic_fetch_add()
42 static __always_inline void arch_atomic_add(int i, atomic_t *v) in arch_atomic_add()
53 static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
57 static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
85 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
91 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
109 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
[all …]
/linux/arch/arm64/include/asm/
A Dirqflags.h23 static __always_inline void __daif_local_irq_enable(void) in __daif_local_irq_enable()
30 static __always_inline void __pmr_local_irq_enable(void) in __pmr_local_irq_enable()
52 static __always_inline void __daif_local_irq_disable(void) in __daif_local_irq_disable()
59 static __always_inline void __pmr_local_irq_disable(void) in __pmr_local_irq_disable()
80 static __always_inline unsigned long __daif_local_save_flags(void) in __daif_local_save_flags()
85 static __always_inline unsigned long __pmr_local_save_flags(void) in __pmr_local_save_flags()
121 static __always_inline bool __daif_irqs_disabled(void) in __daif_irqs_disabled()
126 static __always_inline bool __pmr_irqs_disabled(void) in __pmr_irqs_disabled()
140 static __always_inline unsigned long __daif_local_irq_save(void) in __daif_local_irq_save()
149 static __always_inline unsigned long __pmr_local_irq_save(void) in __pmr_local_irq_save()
[all …]
/linux/arch/x86/kvm/vmx/
A Dvmx_onhyperv.h21 static __always_inline bool kvm_is_using_evmcs(void) in kvm_is_using_evmcs()
26 static __always_inline int get_evmcs_offset(unsigned long field, in get_evmcs_offset()
35 static __always_inline void evmcs_write64(unsigned long field, u64 value) in evmcs_write64()
48 static __always_inline void evmcs_write32(unsigned long field, u32 value) in evmcs_write32()
60 static __always_inline void evmcs_write16(unsigned long field, u16 value) in evmcs_write16()
72 static __always_inline u64 evmcs_read64(unsigned long field) in evmcs_read64()
82 static __always_inline u32 evmcs_read32(unsigned long field) in evmcs_read32()
92 static __always_inline u16 evmcs_read16(unsigned long field) in evmcs_read16()
123 static __always_inline bool kvm_is_using_evmcs(void) { return false; } in kvm_is_using_evmcs()
127 static __always_inline u64 evmcs_read64(unsigned long field) { return 0; } in evmcs_read64()
[all …]
/linux/arch/powerpc/include/asm/nohash/
A Dkup-booke.h21 static __always_inline void __kuap_lock(void)
28 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs)
36 static __always_inline void kuap_user_restore(struct pt_regs *regs)
55 static __always_inline unsigned long __kuap_get_and_assert_locked(void)
64 static __always_inline void uaccess_begin_booke(unsigned long val)
70 static __always_inline void uaccess_end_booke(void)
76 static __always_inline void allow_user_access(void __user *to, const void __user *from,
82 static __always_inline void prevent_user_access(unsigned long dir)
87 static __always_inline unsigned long prevent_user_access_return(void)
96 static __always_inline void restore_user_access(unsigned long flags)
[all …]
/linux/include/net/
A Dchecksum.h27 static __always_inline
38 static __always_inline __wsum csum_and_copy_to_user in csum_and_copy_to_user()
50 static __always_inline __wsum
86 static __always_inline __wsum csum_shift(__wsum sum, int offset) in csum_shift()
95 static __always_inline __wsum
101 static __always_inline __wsum
107 static __always_inline __wsum
113 static __always_inline __wsum csum_unfold(__sum16 n) in csum_unfold()
118 static __always_inline
163 static __always_inline
[all …]
/linux/arch/powerpc/include/asm/nohash/32/
A Dkup-8xx.h14 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock()
21 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore()
25 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore()
31 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked()
40 static __always_inline void uaccess_begin_8xx(unsigned long val) in uaccess_begin_8xx()
46 static __always_inline void uaccess_end_8xx(void) in uaccess_end_8xx()
52 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access()
58 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access()
63 static __always_inline unsigned long prevent_user_access_return(void) in prevent_user_access_return()
74 static __always_inline void restore_user_access(unsigned long flags) in restore_user_access()
[all …]
/linux/include/asm-generic/
A Dpreempt.h9 static __always_inline int preempt_count(void) in preempt_count()
14 static __always_inline volatile int *preempt_count_ptr(void) in preempt_count_ptr()
19 static __always_inline void preempt_count_set(int pc) in preempt_count_set()
35 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched()
39 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched()
43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched()
52 static __always_inline void __preempt_count_add(int val) in __preempt_count_add()
57 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub()
62 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test()
75 static __always_inline bool should_resched(int preempt_offset) in should_resched()
A Dpgtable_uffd.h5 static __always_inline int pte_uffd_wp(pte_t pte) in pte_uffd_wp()
10 static __always_inline int pmd_uffd_wp(pmd_t pmd) in pmd_uffd_wp()
15 static __always_inline pte_t pte_mkuffd_wp(pte_t pte) in pte_mkuffd_wp()
20 static __always_inline pmd_t pmd_mkuffd_wp(pmd_t pmd) in pmd_mkuffd_wp()
25 static __always_inline pte_t pte_clear_uffd_wp(pte_t pte) in pte_clear_uffd_wp()
30 static __always_inline pmd_t pmd_clear_uffd_wp(pmd_t pmd) in pmd_clear_uffd_wp()
35 static __always_inline pte_t pte_swp_mkuffd_wp(pte_t pte) in pte_swp_mkuffd_wp()
40 static __always_inline int pte_swp_uffd_wp(pte_t pte) in pte_swp_uffd_wp()
45 static __always_inline pte_t pte_swp_clear_uffd_wp(pte_t pte) in pte_swp_clear_uffd_wp()

Completed in 64 milliseconds

12345678910>>...31