Home
last modified time | relevance | path

Searched refs:p (Results 1 – 25 of 337) sorted by relevance

12345678910>>...14

/include/linux/
A Dunaligned.h78 return p[0] << 16 | p[1] << 8 | p[2]; in __get_unaligned_be24()
88 return p[0] | p[1] << 8 | p[2] << 16; in __get_unaligned_le24()
98 *p++ = (val >> 16) & 0xff; in __put_unaligned_be24()
99 *p++ = (val >> 8) & 0xff; in __put_unaligned_be24()
100 *p++ = val & 0xff; in __put_unaligned_be24()
110 *p++ = val & 0xff; in __put_unaligned_le24()
111 *p++ = (val >> 8) & 0xff; in __put_unaligned_le24()
126 *p++ = (val >> 8) & 0xff; in __put_unaligned_be48()
127 *p++ = val & 0xff; in __put_unaligned_be48()
137 return (u64)p[0] << 40 | (u64)p[1] << 32 | (u64)p[2] << 24 | in __get_unaligned_be48()
[all …]
A Dparport_pc.h9 #define ECONTROL(p) ((p)->base_hi + 0x2) argument
10 #define CONFIGB(p) ((p)->base_hi + 0x1) argument
11 #define CONFIGA(p) ((p)->base_hi + 0x0) argument
12 #define FIFO(p) ((p)->base_hi + 0x0) argument
13 #define EPPDATA(p) ((p)->base + 0x4) argument
14 #define EPPADDR(p) ((p)->base + 0x3) argument
15 #define CONTROL(p) ((p)->base + 0x2) argument
16 #define STATUS(p) ((p)->base + 0x1) argument
17 #define DATA(p) ((p)->base + 0x0) argument
170 p->name, p->cad->name); in parport_pc_write_control()
[all …]
A Dfortify-string.h257 #define strlen(p) \ argument
259 __builtin_strlen(p), __fortify_strlen(p))
391 p[actual] = '\0'; in strlcat()
420 return p; in strcat()
452 p_len = strlen(p); in strncat()
459 return p; in strncat()
512 #define memset(p, c, s) __fortify_memset_chk(p, c, s, \ argument
513 __struct_size(p), __member_size(p))
690 #define memcpy(p, q, s) __fortify_memcpy_chk(p, q, s, \ argument
694 #define memmove(p, q, s) __fortify_memcpy_chk(p, q, s, \ argument
[all …]
A Drcupdate.h503 ((void)(((typeof(*p) space *)p) == p))
510 typeof(*p) *local = (typeof(*p) *__force)(p); \
521 #define unrcu_pointer(p) __unrcu_pointer(p, __UNIQUE_ID(rcu)) argument
525 typeof(*p) *local = (typeof(*p) *__force)READ_ONCE(p); \
532 typeof(*p) *local = (typeof(*p) *__force)READ_ONCE(p); \
541 ((typeof(*p) __force __kernel *)(p)); \
546 typeof(p) local = READ_ONCE(p); \
594 WRITE_ONCE((p), (typeof(p))(_r_a_p__v)); \
752 #define rcu_dereference(p) rcu_dereference_check(p, 0) argument
760 #define rcu_dereference_bh(p) rcu_dereference_bh_check(p, 0) argument
[all …]
A Dparport.h515 #define parport_read_data(p) parport_pc_read_data(p) argument
528 #define parport_write_data(p,x) (p)->ops->write_data(p,x) argument
529 #define parport_read_data(p) (p)->ops->read_data(p) argument
530 #define parport_write_control(p,x) (p)->ops->write_control(p,x) argument
531 #define parport_read_control(p) (p)->ops->read_control(p) argument
532 #define parport_frob_control(p,m,v) (p)->ops->frob_control(p,m,v) argument
533 #define parport_read_status(p) (p)->ops->read_status(p) argument
534 #define parport_enable_irq(p) (p)->ops->enable_irq(p) argument
535 #define parport_disable_irq(p) (p)->ops->disable_irq(p) argument
536 #define parport_data_forward(p) (p)->ops->data_forward(p) argument
[all …]
A Du64_stats_sync.h77 static inline u64 u64_stats_read(const u64_stats_t *p) in u64_stats_read() argument
79 return local64_read(&p->v); in u64_stats_read()
84 local64_set(&p->v, val); in u64_stats_set()
89 local64_add(val, &p->v); in u64_stats_add()
92 static inline void u64_stats_inc(u64_stats_t *p) in u64_stats_inc() argument
94 local64_inc(&p->v); in u64_stats_inc()
120 return p->v; in u64_stats_read()
125 p->v = val; in u64_stats_set()
130 p->v += val; in u64_stats_add()
133 static inline void u64_stats_inc(u64_stats_t *p) in u64_stats_inc() argument
[all …]
A Dkasan-checks.h19 bool __kasan_check_read(const volatile void *p, unsigned int size);
20 bool __kasan_check_write(const volatile void *p, unsigned int size);
22 static inline bool __kasan_check_read(const volatile void *p, unsigned int size) in __kasan_check_read() argument
26 static inline bool __kasan_check_write(const volatile void *p, unsigned int size) in __kasan_check_write() argument
40 static inline bool kasan_check_read(const volatile void *p, unsigned int size) in kasan_check_read() argument
44 static inline bool kasan_check_write(const volatile void *p, unsigned int size) in kasan_check_write() argument
/include/linux/ceph/
A Ddecode.h22 *p += sizeof(u64); in ceph_decode_64()
39 u8 v = *(u8 *)*p; in ceph_decode_8()
40 (*p)++; in ceph_decode_8()
46 *p += n; in ceph_decode_copy()
54 return end >= *p && n <= end - *p; in ceph_has_room()
111 void *sp = *p; in ceph_extract_encoded_string()
126 *p = (char *) *p + sizeof (u32) + len; in ceph_extract_encoded_string()
271 (*p)++; in ceph_encode_8()
276 *p += len; in ceph_encode_copy()
292 *p += len; in ceph_encode_filepath()
[all …]
/include/linux/usb/
A Dehci_def.h22 #define HC_LENGTH(ehci, p) (0x00ff&((p) >> /* bits 7:0 / offset 00h */ \ argument
29 #define HCS_N_CC(p) (((p)>>12)&0xf) /* bits 15:12, #companion HCs */ argument
30 #define HCS_N_PCC(p) (((p)>>8)&0xf) /* bits 11:8, ports per CC */ argument
31 #define HCS_PORTROUTED(p) ((p)&(1 << 7)) /* true: port routing */ argument
32 #define HCS_PPC(p) ((p)&(1 << 4)) /* true: port power control */ argument
33 #define HCS_N_PORTS(p) (((p)>>0)&0xf) /* bits 3:0, ports on HC */ argument
38 #define HCC_32FRAME_PERIODIC_LIST(p) ((p)&(1 << 19)) argument
39 #define HCC_PER_PORT_CHANGE_EVENT(p) ((p)&(1 << 18)) argument
40 #define HCC_LPM(p) ((p)&(1 << 17)) argument
41 #define HCC_HW_PREFETCH(p) ((p)&(1 << 16)) argument
[all …]
/include/linux/sunrpc/xdrgen/
A D_builtins.h32 if (unlikely(!p)) in xdrgen_decode_bool()
43 if (unlikely(!p)) in xdrgen_encode_bool()
54 if (unlikely(!p)) in xdrgen_decode_int()
65 if (unlikely(!p)) in xdrgen_encode_int()
76 if (unlikely(!p)) in xdrgen_decode_unsigned_int()
87 if (unlikely(!p)) in xdrgen_encode_unsigned_int()
98 if (unlikely(!p)) in xdrgen_decode_long()
109 if (unlikely(!p)) in xdrgen_encode_long()
120 if (unlikely(!p)) in xdrgen_decode_unsigned_long()
184 __be32 *p; in xdrgen_decode_string() local
[all …]
/include/asm-generic/bitops/
A Datomic.h15 arch_set_bit(unsigned int nr, volatile unsigned long *p) in arch_set_bit() argument
17 p += BIT_WORD(nr); in arch_set_bit()
18 raw_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p); in arch_set_bit()
22 arch_clear_bit(unsigned int nr, volatile unsigned long *p) in arch_clear_bit() argument
24 p += BIT_WORD(nr); in arch_clear_bit()
25 raw_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p); in arch_clear_bit()
31 p += BIT_WORD(nr); in arch_change_bit()
32 raw_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p); in arch_change_bit()
41 p += BIT_WORD(nr); in arch_test_and_set_bit()
52 p += BIT_WORD(nr); in arch_test_and_clear_bit()
[all …]
A Dlock.h19 arch_test_and_set_bit_lock(unsigned int nr, volatile unsigned long *p) in arch_test_and_set_bit_lock() argument
24 p += BIT_WORD(nr); in arch_test_and_set_bit_lock()
25 if (READ_ONCE(*p) & mask) in arch_test_and_set_bit_lock()
28 old = raw_atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p); in arch_test_and_set_bit_lock()
41 arch_clear_bit_unlock(unsigned int nr, volatile unsigned long *p) in arch_clear_bit_unlock() argument
43 p += BIT_WORD(nr); in arch_clear_bit_unlock()
59 arch___clear_bit_unlock(unsigned int nr, volatile unsigned long *p) in arch___clear_bit_unlock() argument
63 p += BIT_WORD(nr); in arch___clear_bit_unlock()
64 old = READ_ONCE(*p); in arch___clear_bit_unlock()
66 raw_atomic_long_set_release((atomic_long_t *)p, old); in arch___clear_bit_unlock()
[all …]
A Dgeneric-non-atomic.h31 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); in generic___set_bit() local
33 *p |= mask; in generic___set_bit()
42 *p &= ~mask; in generic___clear_bit()
60 *p ^= mask; in generic___change_bit()
77 unsigned long old = *p; in generic___test_and_set_bit()
79 *p = old | mask; in generic___test_and_set_bit()
97 unsigned long old = *p; in generic___test_and_clear_bit()
99 *p = old & ~mask; in generic___test_and_clear_bit()
109 unsigned long old = *p; in generic___test_and_change_bit()
111 *p = old ^ mask; in generic___test_and_change_bit()
[all …]
/include/net/
A Dred.h242 p->qth_delta = delta; in red_set_parms()
264 memcpy(p->Stab, stab, sizeof(p->Stab)); in red_set_parms()
316 shift = p->Stab[(us_idle >> p->Scell_log) & RED_STAB_MASK]; in red_calc_qavg_from_idle_time()
389 return !(((qavg - p->qth_min) >> p->Wlog) * v->qcount < v->qR); in red_mark_probability()
400 if (qavg < p->qth_min) in red_cmp_thresh()
454 qavg >>= p->Wlog; in red_adaptative_algo()
456 if (qavg > p->target_max && p->max_P <= MAX_P_MAX) in red_adaptative_algo()
457 p->max_P += MAX_P_ALPHA(p->max_P); /* maxp = maxp + alpha */ in red_adaptative_algo()
458 else if (qavg < p->target_min && p->max_P >= MAX_P_MIN) in red_adaptative_algo()
459 p->max_P = (p->max_P/10)*9; /* maxp = maxp * Beta */ in red_adaptative_algo()
[all …]
/include/uapi/linux/
A Dswab.h176 return __swab16(*p); in __swab16p()
243 __arch_swab16s(p); in __swab16s()
245 *p = __swab16p(p); in __swab16s()
255 __arch_swab32s(p); in __swab32s()
257 *p = __swab32p(p); in __swab32s()
268 __arch_swab64s(p); in __swab64s()
270 *p = __swab64p(p); in __swab64s()
283 __arch_swahw32s(p); in __swahw32s()
285 *p = __swahw32p(p); in __swahw32s()
298 __arch_swahb32s(p); in __swahb32s()
[all …]
/include/linux/pinctrl/
A Dconsumer.h161 struct pinctrl *p; in pinctrl_get_select() local
165 p = pinctrl_get(dev); in pinctrl_get_select()
166 if (IS_ERR(p)) in pinctrl_get_select()
167 return p; in pinctrl_get_select()
171 pinctrl_put(p); in pinctrl_get_select()
177 pinctrl_put(p); in pinctrl_get_select()
181 return p; in pinctrl_get_select()
192 struct pinctrl *p; in devm_pinctrl_get_select() local
197 if (IS_ERR(p)) in devm_pinctrl_get_select()
198 return p; in devm_pinctrl_get_select()
[all …]
/include/linux/unaligned/
A Dpacked_struct.h10 static inline u16 __get_unaligned_cpu16(const void *p) in __get_unaligned_cpu16() argument
12 const struct __una_u16 *ptr = (const struct __una_u16 *)p; in __get_unaligned_cpu16()
16 static inline u32 __get_unaligned_cpu32(const void *p) in __get_unaligned_cpu32() argument
18 const struct __una_u32 *ptr = (const struct __una_u32 *)p; in __get_unaligned_cpu32()
22 static inline u64 __get_unaligned_cpu64(const void *p) in __get_unaligned_cpu64() argument
24 const struct __una_u64 *ptr = (const struct __una_u64 *)p; in __get_unaligned_cpu64()
28 static inline void __put_unaligned_cpu16(u16 val, void *p) in __put_unaligned_cpu16() argument
30 struct __una_u16 *ptr = (struct __una_u16 *)p; in __put_unaligned_cpu16()
34 static inline void __put_unaligned_cpu32(u32 val, void *p) in __put_unaligned_cpu32() argument
36 struct __una_u32 *ptr = (struct __una_u32 *)p; in __put_unaligned_cpu32()
[all …]
/include/linux/sunrpc/
A Dxdr.h156 return p + 2; in xdr_encode_hyper()
163 return p + 2; in xdr_decode_hyper()
394 *p = xdr_one; in xdr_stream_encode_item_present()
428 return p; in xdr_encode_bool()
489 *p = n; in xdr_stream_encode_be32()
535 *ptr = ++p; in xdr_stream_encode_opaque_inline()
692 *ptr = *p; in xdr_stream_decode_be32()
757 __be32 *p; in xdr_stream_decode_opaque_inline() local
769 *ptr = p; in xdr_stream_decode_opaque_inline()
789 __be32 *p; in xdr_stream_decode_uint32_array() local
[all …]
A Drpc_rdma.h137 static inline __be32 *xdr_encode_rdma_segment(__be32 *p, u32 handle, in xdr_encode_rdma_segment() argument
140 *p++ = cpu_to_be32(handle); in xdr_encode_rdma_segment()
141 *p++ = cpu_to_be32(length); in xdr_encode_rdma_segment()
142 return xdr_encode_hyper(p, offset); in xdr_encode_rdma_segment()
160 *p++ = cpu_to_be32(position); in xdr_encode_read_segment()
161 return xdr_encode_rdma_segment(p, handle, length, offset); in xdr_encode_read_segment()
177 *handle = be32_to_cpup(p++); in xdr_decode_rdma_segment()
178 *length = be32_to_cpup(p++); in xdr_decode_rdma_segment()
179 return xdr_decode_hyper(p, offset); in xdr_decode_rdma_segment()
197 *position = be32_to_cpup(p++); in xdr_decode_read_segment()
[all …]
/include/asm-generic/
A Dbarrier.h143 WRITE_ONCE(*p, v); \
150 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
153 (typeof(*p))___p1; \
172 #define smp_store_release(p, v) do { kcsan_release(); __smp_store_release(p, v); } while (0) argument
176 #define smp_load_acquire(p) __smp_load_acquire(p) argument
197 WRITE_ONCE(*p, v); \
202 #define smp_load_acquire(p) \ argument
204 __unqual_scalar_typeof(*p) ___p1 = READ_ONCE(*p); \
206 (typeof(*p))___p1; \
219 #define virt_store_release(p, v) do { kcsan_release(); __smp_store_release(p, v); } while (0) argument
[all …]
/include/linux/sched/
A Drt.h23 static inline bool rt_task(struct task_struct *p) in rt_task() argument
25 return rt_prio(p->prio); in rt_task()
33 static inline bool rt_or_dl_task(struct task_struct *p) in rt_or_dl_task() argument
35 return rt_or_dl_prio(p->prio); in rt_or_dl_task()
61 static inline struct task_struct *rt_mutex_get_top_task(struct task_struct *p) in rt_mutex_get_top_task() argument
63 return p->pi_top_task; in rt_mutex_get_top_task()
65 extern void rt_mutex_setprio(struct task_struct *p, struct task_struct *pi_task);
66 extern void rt_mutex_adjust_pi(struct task_struct *p);
72 # define rt_mutex_adjust_pi(p) do { } while (0) argument
A Dautogroup.h11 extern void sched_autogroup_create_attach(struct task_struct *p);
12 extern void sched_autogroup_detach(struct task_struct *p);
15 extern void sched_autogroup_exit_task(struct task_struct *p);
17 extern void proc_sched_autogroup_show_task(struct task_struct *p, struct seq_file *m);
18 extern int proc_sched_autogroup_set_nice(struct task_struct *p, int nice);
21 static inline void sched_autogroup_create_attach(struct task_struct *p) { } in sched_autogroup_create_attach() argument
22 static inline void sched_autogroup_detach(struct task_struct *p) { } in sched_autogroup_detach() argument
25 static inline void sched_autogroup_exit_task(struct task_struct *p) { } in sched_autogroup_exit_task() argument
/include/keys/
A Dtrusted-type.h62 int (*seal)(struct trusted_key_payload *p, char *datablob);
65 int (*unseal)(struct trusted_key_payload *p, char *datablob);
84 static inline void dump_payload(struct trusted_key_payload *p) in dump_payload() argument
86 pr_info("key_len %d\n", p->key_len); in dump_payload()
88 16, 1, p->key, p->key_len, 0); in dump_payload()
89 pr_info("bloblen %d\n", p->blob_len); in dump_payload()
91 16, 1, p->blob, p->blob_len, 0); in dump_payload()
92 pr_info("migratable %d\n", p->migratable); in dump_payload()
95 static inline void dump_payload(struct trusted_key_payload *p) in dump_payload() argument
/include/uapi/linux/byteorder/
A Dbig_endian.h47 return (__force __le64)__swab64p(p); in __cpu_to_le64p()
51 return __swab64p((__u64 *)p); in __le64_to_cpup()
55 return (__force __le32)__swab32p(p); in __cpu_to_le32p()
59 return __swab32p((__u32 *)p); in __le32_to_cpup()
67 return __swab16p((__u16 *)p); in __le16_to_cpup()
71 return (__force __be64)*p; in __cpu_to_be64p()
75 return (__force __u64)*p; in __be64_to_cpup()
79 return (__force __be32)*p; in __cpu_to_be32p()
83 return (__force __u32)*p; in __be32_to_cpup()
87 return (__force __be16)*p; in __cpu_to_be16p()
[all …]
A Dlittle_endian.h47 return (__force __le64)*p; in __cpu_to_le64p()
51 return (__force __u64)*p; in __le64_to_cpup()
55 return (__force __le32)*p; in __cpu_to_le32p()
59 return (__force __u32)*p; in __le32_to_cpup()
63 return (__force __le16)*p; in __cpu_to_le16p()
67 return (__force __u16)*p; in __le16_to_cpup()
71 return (__force __be64)__swab64p(p); in __cpu_to_be64p()
75 return __swab64p((__u64 *)p); in __be64_to_cpup()
79 return (__force __be32)__swab32p(p); in __cpu_to_be32p()
83 return __swab32p((__u32 *)p); in __be32_to_cpup()
[all …]

Completed in 52 milliseconds

12345678910>>...14