Home
last modified time | relevance | path

Searched refs:bucket (Results 1 – 25 of 44) sorted by relevance

12

/drivers/md/dm-vdo/
A Dint-map.c73 struct bucket { struct
276 struct bucket *bucket = dereference_hop(neighborhood, next_hop); in insert_in_hop_list() local
323 static struct bucket *search_hop_list(struct bucket *bucket, u64 key, in search_hop_list() argument
334 struct bucket *entry = dereference_hop(bucket, next_hop); in search_hop_list()
453 static struct bucket *move_empty_bucket(struct bucket *hole) in move_empty_bucket()
461 struct bucket *bucket; in move_empty_bucket() local
463 for (bucket = &hole[1 - NEIGHBORHOOD]; bucket < hole; bucket++) { in move_empty_bucket()
468 struct bucket *new_hole = dereference_hop(bucket, bucket->first_hop); in move_empty_bucket()
607 struct bucket *neighborhood, *bucket; in vdo_int_map_put() local
674 struct bucket *bucket = select_bucket(map, key); in vdo_int_map_remove() local
[all …]
A Dpriority-table.c23 struct bucket { struct
44 struct bucket buckets[];
69 struct bucket *bucket = &table->buckets[priority]; in vdo_make_priority_table() local
71 bucket->priority = priority; in vdo_make_priority_table()
72 INIT_LIST_HEAD(&bucket->queue); in vdo_make_priority_table()
140 static inline void mark_bucket_empty(struct priority_table *table, struct bucket *bucket) in mark_bucket_empty() argument
157 struct bucket *bucket; in vdo_priority_table_dequeue() local
173 bucket = &table->buckets[top_priority]; in vdo_priority_table_dequeue()
174 entry = bucket->queue.next; in vdo_priority_table_dequeue()
178 if (list_empty(&bucket->queue)) in vdo_priority_table_dequeue()
[all …]
/drivers/interconnect/qcom/
A Dbcm-voter.c65 int bucket, i; in bcm_aggregate_mask() local
67 for (bucket = 0; bucket < QCOM_ICC_NUM_BUCKETS; bucket++) { in bcm_aggregate_mask()
68 bcm->vote_x[bucket] = 0; in bcm_aggregate_mask()
69 bcm->vote_y[bucket] = 0; in bcm_aggregate_mask()
75 if (node->sum_avg[bucket] || node->max_peak[bucket]) { in bcm_aggregate_mask()
76 bcm->vote_x[bucket] = 0; in bcm_aggregate_mask()
77 bcm->vote_y[bucket] = bcm->enable_mask; in bcm_aggregate_mask()
94 size_t i, bucket; in bcm_aggregate() local
99 for (bucket = 0; bucket < QCOM_ICC_NUM_BUCKETS; bucket++) { in bcm_aggregate()
104 agg_avg[bucket] = max(agg_avg[bucket], temp); in bcm_aggregate()
[all …]
/drivers/infiniband/sw/rdmavt/
A Dtrace_qp.h18 TP_PROTO(struct rvt_qp *qp, u32 bucket),
19 TP_ARGS(qp, bucket),
23 __field(u32, bucket)
28 __entry->bucket = bucket;
34 __entry->bucket
39 TP_PROTO(struct rvt_qp *qp, u32 bucket),
40 TP_ARGS(qp, bucket));
43 TP_PROTO(struct rvt_qp *qp, u32 bucket),
44 TP_ARGS(qp, bucket));
/drivers/cpuidle/governors/
A Dmenu.c75 unsigned int bucket; member
83 int bucket = 0; in which_bucket() local
86 return bucket; in which_bucket()
88 return bucket + 1; in which_bucket()
90 return bucket + 2; in which_bucket()
92 return bucket + 3; in which_bucket()
94 return bucket + 4; in which_bucket()
95 return bucket + 5; in which_bucket()
256 data->bucket = which_bucket(data->next_timer_ns); in menu_select()
261 data->correction_factor[data->bucket], in menu_select()
[all …]
/drivers/md/
A Ddm-clone-target.c588 bucket = clone->ht + i; in hash_table_init()
883 bucket_lock_irq(bucket); in hydrate_bio_region()
889 bucket_unlock_irq(bucket); in hydrate_bio_region()
895 bucket_unlock_irq(bucket); in hydrate_bio_region()
904 bucket_unlock_irq(bucket); in hydrate_bio_region()
909 bucket_lock_irq(bucket); in hydrate_bio_region()
913 bucket_unlock_irq(bucket); in hydrate_bio_region()
923 bucket_unlock_irq(bucket); in hydrate_bio_region()
935 bucket_unlock_irq(bucket); in hydrate_bio_region()
949 bucket_unlock_irq(bucket); in hydrate_bio_region()
[all …]
A Ddm-ps-historical-service-time.c150 int bucket = clamp(delta >> HST_BUCKET_SHIFT, 0ULL, in hst_weight() local
153 return s->weights[bucket]; in hst_weight()
A Ddm-region-hash.c277 struct list_head *bucket = rh->buckets + rh_hash(rh, region); in __rh_lookup() local
279 list_for_each_entry(reg, bucket, hash_list) in __rh_lookup()
/drivers/md/bcache/
A Dalloc.c76 uint8_t bch_inc_gen(struct cache *ca, struct bucket *b) in bch_inc_gen()
89 struct bucket *b; in bch_rescale_priorities()
179 struct bucket *b; in invalidate_buckets_lru()
216 struct bucket *b; in invalidate_buckets_fifo()
239 struct bucket *b; in invalidate_buckets_random()
305 if (fifo_push(&ca->free[RESERVE_PRIO], bucket)) in bch_allocator_push()
309 if (fifo_push(&ca->free[i], bucket)) in bch_allocator_push()
328 long bucket; in bch_allocator_thread() local
330 if (!fifo_pop(&ca->free_inc, bucket)) in bch_allocator_thread()
336 bucket_to_sector(ca->set, bucket), in bch_allocator_thread()
[all …]
A Dbcache.h197 struct bucket { struct
211 BITMASK(GC_MARK, struct bucket, gc_mark, 0, 2);
218 BITMASK(GC_MOVE, struct bucket, gc_mark, 15, 1);
459 struct bucket *buckets;
461 DECLARE_HEAP(struct bucket *, heap);
820 static inline struct bucket *PTR_BUCKET(struct cache_set *c, in PTR_BUCKET()
916 static inline uint8_t bucket_gc_gen(struct bucket *b) in bucket_gc_gen()
980 uint8_t bch_inc_gen(struct cache *ca, struct bucket *b);
983 bool bch_can_invalidate_bucket(struct cache *ca, struct bucket *b);
984 void __bch_invalidate_one_bucket(struct cache *ca, struct bucket *b);
[all …]
A Dextents.c54 size_t bucket = PTR_BUCKET_NR(c, k, i); in __ptr_invalid() local
58 bucket < ca->sb.first_bucket || in __ptr_invalid()
59 bucket >= ca->sb.nbuckets) in __ptr_invalid()
75 size_t bucket = PTR_BUCKET_NR(c, k, i); in bch_ptr_status() local
80 if (bucket < ca->sb.first_bucket) in bch_ptr_status()
82 if (bucket >= ca->sb.nbuckets) in bch_ptr_status()
177 struct bucket *g; in btree_ptr_bad_expensive()
510 struct bucket *g = PTR_BUCKET(b->c, k, ptr); in bch_extent_bad_expensive()
A Dmovinggc.c185 static bool bucket_cmp(struct bucket *l, struct bucket *r) in bucket_cmp()
192 struct bucket *b; in bucket_heap_top()
200 struct bucket *b; in bch_moving_gc()
A Dsuper.c613 struct bucket *b; in bch_prio_write()
643 long bucket; in bch_prio_write() local
660 BUG_ON(bucket == -1); in bch_prio_write()
663 prio_io(ca, bucket, REQ_OP_WRITE); in bch_prio_write()
666 ca->prio_buckets[i] = bucket; in bch_prio_write()
667 atomic_dec_bug(&ca->buckets[bucket].pin); in bch_prio_write()
695 struct bucket *b; in prio_read()
703 ca->prio_buckets[bucket_nr] = bucket; in prio_read()
707 prio_io(ca, bucket, REQ_OP_READ); in prio_read()
720 bucket = p->next_bucket; in prio_read()
[all …]
/drivers/net/ethernet/freescale/fman/
A Dfman_dtsec.c461 int reg_idx = (bucket >> 5) & 0xf; in set_bucket()
462 int bit_idx = bucket & 0x1f; in set_bucket()
1017 s32 bucket; in dtsec_add_hash_mac_address() local
1045 bucket = (s32)((crc >> 23) & 0x1ff); in dtsec_add_hash_mac_address()
1047 bucket = (s32)((crc >> 24) & 0xff); in dtsec_add_hash_mac_address()
1052 bucket += 0x100; in dtsec_add_hash_mac_address()
1055 set_bucket(dtsec->regs, bucket, true); in dtsec_add_hash_mac_address()
1120 s32 bucket; in dtsec_del_hash_mac_address() local
1138 bucket = (s32)((crc >> 23) & 0x1ff); in dtsec_del_hash_mac_address()
1140 bucket = (s32)((crc >> 24) & 0xff); in dtsec_del_hash_mac_address()
[all …]
/drivers/misc/vmw_vmci/
A Dvmci_doorbell.c120 u32 bucket = VMCI_DOORBELL_HASH(idx); in dbell_index_table_find() local
123 hlist_for_each_entry(dbell, &vmci_doorbell_it.entries[bucket], in dbell_index_table_find()
139 u32 bucket; in dbell_index_table_add() local
187 bucket = VMCI_DOORBELL_HASH(entry->idx); in dbell_index_table_add()
188 hlist_add_head(&entry->node, &vmci_doorbell_it.entries[bucket]); in dbell_index_table_add()
338 u32 bucket = VMCI_DOORBELL_HASH(notify_idx); in dbell_fire_entries() local
343 hlist_for_each_entry(dbell, &vmci_doorbell_it.entries[bucket], node) { in dbell_fire_entries()
/drivers/net/wireguard/
A Dratelimiter.c92 struct hlist_head *bucket; in wg_ratelimiter_allow() local
97 bucket = &table_v4[hsiphash_2u32(net_word, ip, &key) & in wg_ratelimiter_allow()
104 bucket = &table_v6[hsiphash_3u32(net_word, ip >> 32, ip, &key) & in wg_ratelimiter_allow()
111 hlist_for_each_entry_rcu(entry, bucket, hash) { in wg_ratelimiter_allow()
149 hlist_add_head_rcu(&entry->hash, bucket); in wg_ratelimiter_allow()
/drivers/net/dsa/
A Dvitesse-vsc73xx-core.c2071 int bucket, ret; in vsc73xx_fdb_del_entry() local
2079 for (bucket = 0; bucket < VSC73XX_NUM_BUCKETS; bucket++) { in vsc73xx_fdb_del_entry()
2080 if (fdb[bucket].valid && fdb[bucket].port == port && in vsc73xx_fdb_del_entry()
2104 int bucket, ret; in vsc73xx_fdb_add_entry() local
2113 for (bucket = 0; bucket < VSC73XX_NUM_BUCKETS; bucket++) { in vsc73xx_fdb_add_entry()
2114 if (!fdb[bucket].valid) in vsc73xx_fdb_add_entry()
2184 u16 i, bucket; in vsc73xx_port_fdb_dump() local
2194 for (bucket = 0; bucket < VSC73XX_NUM_BUCKETS; bucket++) { in vsc73xx_port_fdb_dump()
2195 if (!fdb[bucket].valid || fdb[bucket].port != port) in vsc73xx_port_fdb_dump()
2200 fdb[bucket].vid = 0; in vsc73xx_port_fdb_dump()
[all …]
/drivers/md/persistent-data/
A Ddm-transaction-manager.c109 unsigned int bucket = dm_hash_block(b, DM_HASH_MASK); in is_shadow() local
113 node = &tm->buckets[bucket].rb_node; in is_shadow()
137 unsigned int bucket; in insert_shadow() local
144 bucket = dm_hash_block(b, DM_HASH_MASK); in insert_shadow()
147 node = &tm->buckets[bucket].rb_node; in insert_shadow()
159 rb_insert_color(&si->node, &tm->buckets[bucket]); in insert_shadow()
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/
A Dpno.c408 gsz = struct_size(gscan_cfg, bucket, n_buckets); in brcmf_pno_config_sched_scans()
437 memcpy(gscan_cfg->bucket, buckets, in brcmf_pno_config_sched_scans()
552 u64 brcmf_pno_find_reqid_by_bucket(struct brcmf_pno_info *pi, u32 bucket) in brcmf_pno_find_reqid_by_bucket() argument
558 if (bucket < pi->n_reqs) in brcmf_pno_find_reqid_by_bucket()
559 reqid = pi->reqs[bucket]->reqid; in brcmf_pno_find_reqid_by_bucket()
A Dpno.h61 u64 brcmf_pno_find_reqid_by_bucket(struct brcmf_pno_info *pi, u32 bucket);
/drivers/crypto/nx/
A Dnx-common-pseries.c155 int bucket = fls(time); in ibm_nx842_incr_hist() local
157 if (bucket) in ibm_nx842_incr_hist()
158 bucket = min((NX842_HIST_SLOTS - 1), bucket - 1); in ibm_nx842_incr_hist()
160 atomic64_inc(&times[bucket]); in ibm_nx842_incr_hist()
/drivers/scsi/lpfc/
A Dlpfc_vmid.c300 u32 bucket, i, cpu; in lpfc_reinit_vmid() local
322 hash_for_each_safe(vport->hash_table, bucket, tmp, cur, hnode) in lpfc_reinit_vmid()
/drivers/gpu/drm/radeon/
A Dradeon_cs.c48 struct list_head bucket[RADEON_CS_NUM_BUCKETS]; member
56 INIT_LIST_HEAD(&b->bucket[i]); in radeon_cs_buckets_init()
67 list_add_tail(item, &b->bucket[min(priority, RADEON_CS_MAX_PRIORITY)]); in radeon_cs_buckets_add()
77 list_splice(&b->bucket[i], out_list); in radeon_cs_buckets_get_list()
/drivers/net/ethernet/mellanox/mlx4/
A Den_netdev.c710 struct hlist_head *bucket; in mlx4_en_replace_mac() local
717 hlist_for_each_entry_safe(entry, tmp, bucket, hlist) { in mlx4_en_replace_mac()
1148 struct hlist_head *bucket; in mlx4_en_do_uc_filter() local
1159 bucket = &priv->mac_hash[i]; in mlx4_en_do_uc_filter()
1160 hlist_for_each_entry_safe(entry, tmp, bucket, hlist) { in mlx4_en_do_uc_filter()
1204 hlist_for_each_entry(entry, bucket, hlist) { in mlx4_en_do_uc_filter()
1244 bucket = &priv->mac_hash[mac_hash]; in mlx4_en_do_uc_filter()
1245 hlist_add_head_rcu(&entry->hlist, bucket); in mlx4_en_do_uc_filter()
1348 struct hlist_head *bucket; in mlx4_en_delete_rss_steer_rules() local
1353 bucket = &priv->mac_hash[i]; in mlx4_en_delete_rss_steer_rules()
[all …]
/drivers/target/iscsi/cxgbit/
A Dcxgbit_cm.c85 int bucket = cxgbit_np_hashfn(cnp); in cxgbit_np_hash_add() local
90 p->next = cdev->np_hash_tab[bucket]; in cxgbit_np_hash_add()
91 cdev->np_hash_tab[bucket] = p; in cxgbit_np_hash_add()
101 int stid = -1, bucket = cxgbit_np_hashfn(cnp); in cxgbit_np_hash_find() local
105 for (p = cdev->np_hash_tab[bucket]; p; p = p->next) { in cxgbit_np_hash_find()
118 int stid = -1, bucket = cxgbit_np_hashfn(cnp); in cxgbit_np_hash_del() local
119 struct np_info *p, **prev = &cdev->np_hash_tab[bucket]; in cxgbit_np_hash_del()

Completed in 97 milliseconds

12