Home
last modified time | relevance | path

Searched refs:rb_node (Results 1 – 25 of 106) sorted by relevance

12345

/drivers/infiniband/sw/rdmavt/
A Dmcast.c109 struct rb_node *n; in rvt_mcast_find()
114 n = ibp->mcast_tree.rb_node; in rvt_mcast_find()
119 mcast = rb_entry(n, struct rvt_mcast, rb_node); in rvt_mcast_find()
154 struct rb_node **n = &ibp->mcast_tree.rb_node; in rvt_mcast_add()
155 struct rb_node *pn = NULL; in rvt_mcast_add()
219 rb_link_node(&mcast->rb_node, pn, n); in rvt_mcast_add()
308 struct rb_node *n; in rvt_detach_mcast()
318 n = ibp->mcast_tree.rb_node; in rvt_detach_mcast()
325 mcast = rb_entry(n, struct rvt_mcast, rb_node); in rvt_detach_mcast()
356 rb_erase(&mcast->rb_node, &ibp->mcast_tree); in rvt_detach_mcast()
[all …]
/drivers/android/
A Dbinder_alloc.c73 struct rb_node **p = &alloc->free_buffers.rb_node; in binder_insert_free_buffer()
74 struct rb_node *parent = NULL; in binder_insert_free_buffer()
106 struct rb_node **p = &alloc->allocated_buffers.rb_node; in binder_insert_allocated_buffer_locked()
107 struct rb_node *parent = NULL; in binder_insert_allocated_buffer_locked()
132 struct rb_node *n = alloc->allocated_buffers.rb_node; in binder_alloc_prepare_to_free_locked()
435 struct rb_node *n; in debug_no_space_locked()
475 struct rb_node *n; in debug_low_async_space_locked()
521 struct rb_node *n = alloc->free_buffers.rb_node; in binder_alloc_new_buf_locked()
966 struct rb_node *n; in binder_alloc_deferred_release()
1045 struct rb_node *n; in binder_alloc_print_allocated()
[all …]
A Dbinder_internal.h235 struct rb_node rb_node; member
330 struct rb_node rb_node_desc;
331 struct rb_node rb_node_node;
494 struct rb_node rb_node; member
/drivers/block/drbd/
A Ddrbd_interval.c10 sector_t interval_end(struct rb_node *node) in interval_end()
27 struct rb_node **new = &root->rb_node, *parent = NULL; in drbd_insert_interval()
72 struct rb_node *node = root->rb_node; in drbd_contains_interval()
120 struct rb_node *node = root->rb_node; in drbd_find_overlap()
151 struct rb_node *node; in drbd_next_overlap()
/drivers/gpu/drm/
A Ddrm_vma_manager.c146 struct rb_node *iter; in drm_vma_offset_lookup_locked()
149 iter = mgr->vm_addr_space_mm.interval_tree.rb_root.rb_node; in drm_vma_offset_lookup_locked()
247 struct rb_node **iter; in vma_node_allow()
248 struct rb_node *parent = NULL; in vma_node_allow()
260 iter = &node->vm_files.rb_node; in vma_node_allow()
362 struct rb_node *iter; in drm_vma_node_revoke()
366 iter = node->vm_files.rb_node; in drm_vma_node_revoke()
403 struct rb_node *iter; in drm_vma_node_is_allowed()
407 iter = node->vm_files.rb_node; in drm_vma_node_is_allowed()
A Ddrm_mm.c168 struct rb_node **link, *rb; in drm_mm_interval_tree_add_node()
190 link = &mm->interval_tree.rb_root.rb_node; in drm_mm_interval_tree_add_node()
215 static u64 rb_to_hole_size(struct rb_node *rb) in rb_to_hole_size()
223 struct rb_node **link = &root->rb_root.rb_node, *rb = NULL; in insert_hole_size()
247 struct rb_node **link = &root->rb_node, *rb_parent = NULL; in RB_DECLARE_CALLBACKS_MAX()
295 static inline struct drm_mm_node *rb_hole_size_to_node(struct rb_node *rb) in rb_hole_size_to_node()
307 struct rb_node *rb = mm->holes_size.rb_root.rb_node; in best_hole()
325 static bool usable_hole_addr(struct rb_node *rb, u64 size) in usable_hole_addr()
332 struct rb_node *rb = mm->holes_addr.rb_node; in find_hole_addr()
392 struct rb_node *parent, *node = &entry->rb_hole_addr; \
[all …]
A Ddrm_prime.c92 struct rb_node dmabuf_rb;
93 struct rb_node handle_rb;
100 struct rb_node **p, *rb; in drm_prime_add_buf_handle()
111 p = &prime_fpriv->dmabufs.rb_node; in drm_prime_add_buf_handle()
126 p = &prime_fpriv->handles.rb_node; in drm_prime_add_buf_handle()
146 struct rb_node *rb; in drm_prime_lookup_buf_by_handle()
148 rb = prime_fpriv->handles.rb_node; in drm_prime_lookup_buf_by_handle()
168 struct rb_node *rb; in drm_prime_lookup_buf_handle()
170 rb = prime_fpriv->dmabufs.rb_node; in drm_prime_lookup_buf_handle()
191 struct rb_node *rb; in drm_prime_remove_buf_handle()
[all …]
/drivers/md/
A Ddm-writecache.c82 struct rb_node rb_node; member
610 struct rb_node *node = wc->tree.rb_node; in writecache_find_entry()
621 e->rb_node.rb_left : e->rb_node.rb_right); in writecache_find_entry()
656 struct rb_node **node = &wc->tree.rb_node, *parent = NULL; in writecache_insert_entry()
660 parent = &e->rb_node; in writecache_insert_entry()
681 struct rb_node **node = &wc->freetree.rb_node, *parent = NULL; in writecache_add_to_freelist()
837 struct rb_node *rb_node = rb_prev(&e->rb_node); in writecache_flush() local
839 if (rb_node) { in writecache_flush()
840 e2 = container_of(rb_node, struct wc_entry, rb_node); in writecache_flush()
890 struct rb_node *node = rb_next(&e->rb_node); in writecache_discard()
[all …]
A Ddm-dust.c18 struct rb_node node;
38 struct rb_node *node = root->rb_node; in dust_rb_search()
57 struct rb_node **link = &root->rb_node, *parent = NULL; in dust_rb_insert()
243 struct rb_node *node = NULL, *nnode = NULL; in __dust_clear_badblocks()
259 BUG_ON(tree->rb_node != NULL); in __dust_clear_badblocks()
292 struct rb_node *node; in dust_list_badblocks()
A Ddm-cache-background-tracker.c80 struct rb_node **new = &b->pending.rb_node, *parent = NULL; in __insert_pending()
109 struct rb_node **new = &b->pending.rb_node; in __find_pending()
/drivers/base/regmap/
A Dregcache-rbtree.c31 struct rb_node node;
66 struct rb_node *node; in regcache_rbtree_lookup()
78 node = rbtree_ctx->root.rb_node; in regcache_rbtree_lookup()
99 struct rb_node **new, *parent; in regcache_rbtree_insert()
105 new = &root->rb_node; in regcache_rbtree_insert()
137 struct rb_node *node; in rbtree_show()
215 struct rb_node *next; in regcache_rbtree_exit()
371 struct rb_node *node; in regcache_rbtree_write()
400 node = rbtree_ctx->root.rb_node; in regcache_rbtree_write()
467 struct rb_node *node; in regcache_rbtree_sync()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
A Dpagealloc.c58 struct rb_node rb_node; member
127 struct rb_node *parent = NULL; in insert_page()
129 struct rb_node **new; in insert_page()
138 new = &root->rb_node; in insert_page()
162 rb_link_node(&nfp->rb_node, parent, new); in insert_page()
163 rb_insert_color(&nfp->rb_node, root); in insert_page()
174 struct rb_node *tmp; in find_fw_page()
181 tmp = root->rb_node; in find_fw_page()
263 rb_erase(&fwp->rb_node, root); in free_fwp()
439 struct rb_node *p; in release_all_pages()
[all …]
/drivers/iommu/
A Diova.c32 static struct iova *to_iova(struct rb_node *node) in to_iova()
62 static struct rb_node *
101 struct rb_node *node, *next; in iova_find_limit()
113 node = iovad->rbroot.rb_node; in iova_find_limit()
139 struct rb_node *start) in iova_insert_rbtree()
141 struct rb_node **new, *parent = NULL; in iova_insert_rbtree()
143 new = (start) ? &start : &(root->rb_node); in iova_insert_rbtree()
168 struct rb_node *curr, *prev; in __alloc_and_insert_iova_range()
278 struct rb_node *node = iovad->rbroot.rb_node; in private_find_iova()
459 __is_range_overlap(struct rb_node *node, in __is_range_overlap()
[all …]
/drivers/infiniband/ulp/ipoib/
A Dipoib_multicast.c161 struct rb_node *n = priv->multicast_tree.rb_node; in __ipoib_mcast_find()
167 mcast = rb_entry(n, struct ipoib_mcast, rb_node); in __ipoib_mcast_find()
185 struct rb_node **n = &priv->multicast_tree.rb_node, *pn = NULL; in __ipoib_mcast_add()
192 tmcast = rb_entry(pn, struct ipoib_mcast, rb_node); in __ipoib_mcast_add()
204 rb_link_node(&mcast->rb_node, pn, n); in __ipoib_mcast_add()
718 rb_erase(&mcast->rb_node, &priv->multicast_tree); in ipoib_check_and_add_mcast_sendonly()
837 rb_erase(&mcast->rb_node, &priv->multicast_tree); in ipoib_mcast_dev_flush()
935 rb_replace_node(&mcast->rb_node, in ipoib_mcast_restart_task()
936 &nmcast->rb_node, in ipoib_mcast_restart_task()
1001 struct rb_node *n; in ipoib_mcast_iter_next()
[all …]
/drivers/mtd/ubi/
A Dfastmap-wl.c28 struct rb_node *p; in find_anchor_wl_entry()
79 if (!ubi->free.rb_node) in ubi_wl_get_fm_peb()
151 if (!ubi->free.rb_node) in left_free_count()
275 while (!ubi->free.rb_node && ubi->works_count) { in produce_free_peb()
388 if (!ubi->used.rb_node) in need_wear_leveling()
393 if (!ubi->free.rb_node) in need_wear_leveling()
399 if (ubi->free.rb_node) { in need_wear_leveling()
563 e = rb_entry(rb_next(root->rb_node), in may_reserve_for_fm()
A Dwl.c143 p = &root->rb_node; in wl_tree_add()
244 struct rb_node *p; in in_wl_tree()
246 p = root->rb_node; in in_wl_tree()
329 struct rb_node *p; in find_wl_entry()
336 p = root->rb_node; in find_wl_entry()
688 if (!ubi->free.rb_node ||
690 (!ubi->used.rb_node && !ubi->scrub.rb_node)) {
702 !ubi->free.rb_node, !ubi->used.rb_node);
1051 if (!ubi->used.rb_node || !ubi->free.rb_node)
1638 struct rb_node *rb;
[all …]
/drivers/staging/media/atomisp/pci/hmm/
A Dhmm_bo.c65 struct rb_node *node, unsigned int pgnr) in __bo_search_and_remove_from_free_rbtree()
118 struct rb_node *n = root->rb_node; in __bo_search_by_addr()
143 struct rb_node *n = root->rb_node; in __bo_search_by_addr_in_range()
168 struct rb_node **new = &root->rb_node; in __bo_insert_to_free_rbtree()
169 struct rb_node *parent = NULL; in __bo_insert_to_free_rbtree()
201 struct rb_node **new = &root->rb_node; in __bo_insert_to_alloc_rbtree()
202 struct rb_node *parent = NULL; in __bo_insert_to_alloc_rbtree()
397 bo = __bo_search_and_remove_from_free_rbtree(root->rb_node, pgnr); in hmm_bo_alloc()
504 rbtree_node_to_hmm_bo(bdev->allocated_rbtree.rb_node)); in hmm_bo_device_exit()
/drivers/fpga/
A Ddfl-afu-dma-region.c147 struct rb_node **new, *parent = NULL; in afu_dma_region_add()
152 new = &afu->dma_regions.rb_node; in afu_dma_region_add()
206 struct rb_node *node = rb_first(&afu->dma_regions); in afu_dma_region_destroy()
248 struct rb_node *node = afu->dma_regions.rb_node; in afu_dma_region_find()
/drivers/infiniband/sw/rxe/
A Drxe_mcast.c84 struct rb_node **link = &tree->rb_node; in __rxe_insert_mcg()
85 struct rb_node *node = NULL; in __rxe_insert_mcg()
128 struct rb_node *node; in __rxe_lookup_mcg()
131 node = tree->rb_node; in __rxe_lookup_mcg()
/drivers/gpu/drm/nouveau/nvkm/core/
A Dobject.c37 struct rb_node *node = client->objroot.rb_node; in nvkm_object_search()
76 struct rb_node **ptr; in nvkm_object_insert()
77 struct rb_node *parent = NULL; in nvkm_object_insert()
81 ptr = &object->client->objroot.rb_node; in nvkm_object_insert()
/drivers/gpu/drm/i915/
A Di915_active.c27 struct rb_node node;
143 ref->cache = fetch_node(ref->tree.rb_node); in __active_retire()
152 rb_link_node(&ref->cache->node, NULL, &ref->tree.rb_node); in __active_retire()
154 GEM_BUG_ON(ref->tree.rb_node != &ref->cache->node); in __active_retire()
272 it = fetch_node(ref->tree.rb_node); in __active_lookup()
292 struct rb_node **p, *parent; in active_instance()
302 p = &ref->tree.rb_node; in active_instance()
758 struct rb_node *prev, *p; in reuse_idle_barrier()
778 p = ref->tree.rb_node; in reuse_idle_barrier()
937 struct rb_node **p, *parent; in i915_active_acquire_barrier()
[all …]
/drivers/md/persistent-data/
A Ddm-transaction-manager.c81 struct rb_node node;
110 struct rb_node **node; in is_shadow()
113 node = &tm->buckets[bucket].rb_node; in is_shadow()
142 struct rb_node **node, *parent; in insert_shadow()
147 node = &tm->buckets[bucket].rb_node; in insert_shadow()
172 rb_entry(tm->buckets[i].rb_node, struct shadow_info, node); in wipe_shadow_table()
/drivers/xen/
A Devtchn.c84 struct rb_node node;
110 struct rb_node **new = &(u->evtchns.rb_node), *parent = NULL; in add_evtchn()
145 struct rb_node *node = u->evtchns.rb_node; in find_evtchn()
671 struct rb_node *node; in evtchn_release()
673 while ((node = u->evtchns.rb_node)) { in evtchn_release()
/drivers/infiniband/hw/hfi1/
A Dpin_system.c97 struct mmu_rb_node *rb_node; in find_system_node() local
101 rb_node = hfi1_mmu_rb_get_first(handler, start, (end - start)); in find_system_node()
102 if (!rb_node) { in find_system_node()
108 kref_get(&rb_node->refcount); in find_system_node()
111 return container_of(rb_node, struct sdma_mmu_node, rb); in find_system_node()
/drivers/vfio/
A Dvfio_iommu_type1.c87 struct rb_node node;
126 struct rb_node node;
166 struct rb_node *node = iommu->dma_list.rb_node; in vfio_find_dma()
186 struct rb_node *node = iommu->dma_list.rb_node; in vfio_find_dma_first_node()
209 struct rb_node **link = &iommu->dma_list.rb_node, *parent = NULL; in vfio_link_dma()
260 struct rb_node *p; in vfio_dma_populate_bitmap()
272 struct rb_node *n; in vfio_iommu_populate_bitmap_full()
284 struct rb_node *n; in vfio_dma_bitmap_alloc_all()
309 struct rb_node *n; in vfio_dma_bitmap_free_all()
324 struct rb_node *node = dma->pfn_list.rb_node; in vfio_find_vpfn()
[all …]

Completed in 75 milliseconds

12345