Searched refs:likely (Results 1 – 25 of 98) sorted by relevance
1234
| /include/asm-generic/ |
| A D | qrwlock.h | 49 if (likely(!(cnts & _QW_WMASK))) { in queued_read_trylock() 51 if (likely(!(cnts & _QW_WMASK))) in queued_read_trylock() 71 return likely(atomic_try_cmpxchg_acquire(&lock->cnts, &cnts, in queued_write_trylock() 83 if (likely(!(cnts & _QW_WMASK))) in queued_read_lock() 98 if (likely(atomic_try_cmpxchg_acquire(&lock->cnts, &cnts, _QW_LOCKED))) in queued_write_lock()
|
| A D | qspinlock.h | 97 return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)); in queued_spin_trylock() 111 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) in queued_spin_lock()
|
| A D | access_ok.h | 45 #define access_ok(addr, size) likely(__access_ok(addr, size))
|
| A D | mmiowb.h | 39 if (likely(ms->nesting_count)) in mmiowb_set_pending()
|
| /include/linux/ |
| A D | indirect_call_wrapper.h | 19 likely(f == f1) ? f1(__VA_ARGS__) : f(__VA_ARGS__); \ 23 likely(f == f2) ? f2(__VA_ARGS__) : \ 28 likely(f == f3) ? f3(__VA_ARGS__) : \ 33 likely(f == f4) ? f4(__VA_ARGS__) : \
|
| A D | rcuref.h | 87 if (likely(!atomic_add_negative_relaxed(1, &ref->refcnt))) in rcuref_get() 110 if (likely(cnt >= 0)) in __rcuref_put()
|
| A D | device_cgroup.h | 21 if (likely(!S_ISBLK(inode->i_mode) && !S_ISCHR(inode->i_mode))) in devcgroup_inode_permission() 24 if (likely(!inode->i_rdev)) in devcgroup_inode_permission()
|
| A D | file_ref.h | 183 if (likely(old == FILE_REF_ONEREF)) { in file_ref_put_close() 184 if (likely(atomic_long_try_cmpxchg(&ref->refcnt, &old, FILE_REF_DEAD))) in file_ref_put_close()
|
| A D | random.h | 79 if (likely(is_power_of_2(ceil) || (u8)mult >= (1U << 8) % ceil)) in get_random_u32_below() 83 if (likely(is_power_of_2(ceil) || (u16)mult >= (1U << 16) % ceil)) in get_random_u32_below() 87 if (likely(is_power_of_2(ceil) || (u32)mult >= -ceil % ceil)) in get_random_u32_below()
|
| A D | percpu-rwsem.h | 64 if (likely(rcu_sync_is_idle(&sem->rss))) in percpu_down_read_internal() 94 if (likely(rcu_sync_is_idle(&sem->rss))) in percpu_down_read_trylock() 118 if (likely(rcu_sync_is_idle(&sem->rss))) { in percpu_up_read()
|
| A D | coresight.h | 452 if (likely(csa->io_mem)) in csdev_access_relaxed_read32() 521 if (likely(csa->io_mem)) { in csdev_access_relaxed_read_pair() 532 if (likely(csa->io_mem)) { in csdev_access_relaxed_write_pair() 543 if (likely(csa->io_mem)) in csdev_access_read32() 552 if (likely(csa->io_mem)) in csdev_access_relaxed_write32() 560 if (likely(csa->io_mem)) in csdev_access_write32() 571 if (likely(csa->io_mem)) in csdev_access_relaxed_read64() 579 if (likely(csa->io_mem)) in csdev_access_read64() 588 if (likely(csa->io_mem)) in csdev_access_relaxed_write64() 596 if (likely(csa->io_mem)) in csdev_access_write64()
|
| A D | compiler.h | 43 # ifndef likely 44 # define likely(x) (__branch_check__(x, 1, __builtin_constant_p(x))) macro 76 # define likely(x) __builtin_expect(!!(x), 1) macro 78 # define likely_notrace(x) likely(x)
|
| A D | freezer.h | 54 if (likely(!freezing(current))) in try_to_freeze()
|
| A D | cookie.h | 35 if (likely(local_inc_return(&local->nesting) == 1)) { in gen_cookie_next()
|
| A D | eventpoll.h | 49 if (likely(!READ_ONCE(file->f_ep))) in eventpoll_release()
|
| A D | rcupdate_trace.h | 80 if (likely(!READ_ONCE(t->trc_reader_special.s)) || nesting) { in rcu_read_unlock_trace()
|
| A D | if_vlan.h | 366 if (likely(mac_len > ETH_TLEN)) in __vlan_insert_inner_tag() 374 if (likely(mac_len >= ETH_TLEN)) { in __vlan_insert_inner_tag() 523 if (likely(skb)) in __vlan_hwaccel_push_inside() 758 likely(!eth_type_vlan(skb->protocol))) in skb_vlan_tagged() 778 if (likely(!eth_type_vlan(protocol))) in skb_vlan_tagged_multi()
|
| A D | iov_iter.h | 51 if (likely(part)) { in iterate_iovec() 85 if (likely(part)) { in iterate_kvec() 299 if (likely(iter_is_ubuf(iter))) in iterate_and_advance2() 301 if (likely(iter_is_iovec(iter))) in iterate_and_advance2()
|
| A D | kfence.h | 127 if (likely(atomic_read(&kfence_allocation_gate) > 0)) in kfence_alloc()
|
| /include/net/ |
| A D | tipc.h | 54 if (likely(!keepalive_msg)) in tipc_hdr_rps_key()
|
| A D | ip6_tunnel.h | 102 if (likely(ops && ops->encap_hlen)) in ip6_encap_hlen() 123 if (likely(ops && ops->build_header)) in ip6_tnl_encap()
|
| /include/kunit/ |
| A D | static_stub.h | 64 if (likely(!current_test)) \
|
| /include/net/libeth/ |
| A D | xsk.h | 33 if (likely(!xdp_buff_has_frags(&xdp->base))) in libeth_xsk_tx_queue_head() 465 if (likely(act == LIBETH_XDP_REDIRECT)) in __libeth_xsk_run_pass() 649 if (likely(this > n)) in libeth_xskfqe_alloc() 656 for (u32 i = 0, ntu = fq->ntu; likely(i < ret); i++) in libeth_xskfqe_alloc() 662 if (likely(fq->ntu < fq->count) || unlikely(ret < this)) in libeth_xskfqe_alloc()
|
| A D | rx.h | 291 return likely(pt.inner_prot > LIBETH_RX_PT_INNER_NONE && in libeth_rx_pt_has_checksum() 298 return likely(pt.payload_layer > LIBETH_RX_PT_PAYLOAD_NONE && in libeth_rx_pt_has_hash()
|
| /include/linux/atomic/ |
| A D | atomic-arch-fallback.h | 195 likely(___r == ___o); \ 213 likely(___r == ___o); \ 231 likely(___r == ___o); \ 246 likely(___r == ___o); \ 262 likely(___r == ___o); \ 280 likely(___r == ___o); \ 298 likely(___r == ___o); \ 313 likely(___r == ___o); \ 329 likely(___r == ___o); \ 347 likely(___r == ___o); \ [all …]
|
Completed in 107 milliseconds
1234