Lines Matching refs:xa
359 void *xa_find(struct xarray *xa, unsigned long *index,
361 void *xa_find_after(struct xarray *xa, unsigned long *index,
378 static inline void xa_init_flags(struct xarray *xa, gfp_t flags) in xa_init_flags() argument
380 spin_lock_init(&xa->xa_lock); in xa_init_flags()
381 xa->xa_flags = flags; in xa_init_flags()
382 xa->xa_head = NULL; in xa_init_flags()
393 static inline void xa_init(struct xarray *xa) in xa_init() argument
395 xa_init_flags(xa, 0); in xa_init()
405 static inline bool xa_empty(const struct xarray *xa) in xa_empty() argument
407 return xa->xa_head == NULL; in xa_empty()
418 static inline bool xa_marked(const struct xarray *xa, xa_mark_t mark) in xa_marked() argument
420 return xa->xa_flags & XA_FLAGS_MARK(mark); in xa_marked()
447 #define xa_for_each_range(xa, index, entry, start, last) \ argument
449 entry = xa_find(xa, &index, last, XA_PRESENT); \
451 entry = xa_find_after(xa, &index, last, XA_PRESENT))
476 #define xa_for_each_start(xa, index, entry, start) \ argument
477 xa_for_each_range(xa, index, entry, start, ULONG_MAX)
500 #define xa_for_each(xa, index, entry) \ argument
501 xa_for_each_start(xa, index, entry, 0)
527 #define xa_for_each_marked(xa, index, entry, filter) \ argument
528 for (index = 0, entry = xa_find(xa, &index, ULONG_MAX, filter); \
529 entry; entry = xa_find_after(xa, &index, ULONG_MAX, filter))
531 #define xa_trylock(xa) spin_trylock(&(xa)->xa_lock) argument
532 #define xa_lock(xa) spin_lock(&(xa)->xa_lock) argument
533 #define xa_unlock(xa) spin_unlock(&(xa)->xa_lock) argument
534 #define xa_lock_bh(xa) spin_lock_bh(&(xa)->xa_lock) argument
535 #define xa_unlock_bh(xa) spin_unlock_bh(&(xa)->xa_lock) argument
536 #define xa_lock_irq(xa) spin_lock_irq(&(xa)->xa_lock) argument
537 #define xa_unlock_irq(xa) spin_unlock_irq(&(xa)->xa_lock) argument
538 #define xa_lock_irqsave(xa, flags) \ argument
539 spin_lock_irqsave(&(xa)->xa_lock, flags)
540 #define xa_unlock_irqrestore(xa, flags) \ argument
541 spin_unlock_irqrestore(&(xa)->xa_lock, flags)
542 #define xa_lock_nested(xa, subclass) \ argument
543 spin_lock_nested(&(xa)->xa_lock, subclass)
544 #define xa_lock_bh_nested(xa, subclass) \ argument
545 spin_lock_bh_nested(&(xa)->xa_lock, subclass)
546 #define xa_lock_irq_nested(xa, subclass) \ argument
547 spin_lock_irq_nested(&(xa)->xa_lock, subclass)
548 #define xa_lock_irqsave_nested(xa, flags, subclass) \ argument
549 spin_lock_irqsave_nested(&(xa)->xa_lock, flags, subclass)
585 static inline void *xa_store_bh(struct xarray *xa, unsigned long index, in xa_store_bh() argument
591 xa_lock_bh(xa); in xa_store_bh()
592 curr = __xa_store(xa, index, entry, gfp); in xa_store_bh()
593 xa_unlock_bh(xa); in xa_store_bh()
612 static inline void *xa_store_irq(struct xarray *xa, unsigned long index, in xa_store_irq() argument
618 xa_lock_irq(xa); in xa_store_irq()
619 curr = __xa_store(xa, index, entry, gfp); in xa_store_irq()
620 xa_unlock_irq(xa); in xa_store_irq()
638 static inline void *xa_erase_bh(struct xarray *xa, unsigned long index) in xa_erase_bh() argument
642 xa_lock_bh(xa); in xa_erase_bh()
643 entry = __xa_erase(xa, index); in xa_erase_bh()
644 xa_unlock_bh(xa); in xa_erase_bh()
662 static inline void *xa_erase_irq(struct xarray *xa, unsigned long index) in xa_erase_irq() argument
666 xa_lock_irq(xa); in xa_erase_irq()
667 entry = __xa_erase(xa, index); in xa_erase_irq()
668 xa_unlock_irq(xa); in xa_erase_irq()
688 static inline void *xa_cmpxchg(struct xarray *xa, unsigned long index, in xa_cmpxchg() argument
694 xa_lock(xa); in xa_cmpxchg()
695 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg()
696 xa_unlock(xa); in xa_cmpxchg()
716 static inline void *xa_cmpxchg_bh(struct xarray *xa, unsigned long index, in xa_cmpxchg_bh() argument
722 xa_lock_bh(xa); in xa_cmpxchg_bh()
723 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg_bh()
724 xa_unlock_bh(xa); in xa_cmpxchg_bh()
744 static inline void *xa_cmpxchg_irq(struct xarray *xa, unsigned long index, in xa_cmpxchg_irq() argument
750 xa_lock_irq(xa); in xa_cmpxchg_irq()
751 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg_irq()
752 xa_unlock_irq(xa); in xa_cmpxchg_irq()
774 static inline int __must_check xa_insert(struct xarray *xa, in xa_insert() argument
780 xa_lock(xa); in xa_insert()
781 err = __xa_insert(xa, index, entry, gfp); in xa_insert()
782 xa_unlock(xa); in xa_insert()
804 static inline int __must_check xa_insert_bh(struct xarray *xa, in xa_insert_bh() argument
810 xa_lock_bh(xa); in xa_insert_bh()
811 err = __xa_insert(xa, index, entry, gfp); in xa_insert_bh()
812 xa_unlock_bh(xa); in xa_insert_bh()
834 static inline int __must_check xa_insert_irq(struct xarray *xa, in xa_insert_irq() argument
840 xa_lock_irq(xa); in xa_insert_irq()
841 err = __xa_insert(xa, index, entry, gfp); in xa_insert_irq()
842 xa_unlock_irq(xa); in xa_insert_irq()
864 static inline __must_check int xa_alloc(struct xarray *xa, u32 *id, in xa_alloc() argument
870 xa_lock(xa); in xa_alloc()
871 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc()
872 xa_unlock(xa); in xa_alloc()
894 static inline int __must_check xa_alloc_bh(struct xarray *xa, u32 *id, in xa_alloc_bh() argument
900 xa_lock_bh(xa); in xa_alloc_bh()
901 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc_bh()
902 xa_unlock_bh(xa); in xa_alloc_bh()
924 static inline int __must_check xa_alloc_irq(struct xarray *xa, u32 *id, in xa_alloc_irq() argument
930 xa_lock_irq(xa); in xa_alloc_irq()
931 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc_irq()
932 xa_unlock_irq(xa); in xa_alloc_irq()
958 static inline int xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic() argument
964 xa_lock(xa); in xa_alloc_cyclic()
965 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic()
966 xa_unlock(xa); in xa_alloc_cyclic()
992 static inline int xa_alloc_cyclic_bh(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic_bh() argument
998 xa_lock_bh(xa); in xa_alloc_cyclic_bh()
999 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic_bh()
1000 xa_unlock_bh(xa); in xa_alloc_cyclic_bh()
1026 static inline int xa_alloc_cyclic_irq(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic_irq() argument
1032 xa_lock_irq(xa); in xa_alloc_cyclic_irq()
1033 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic_irq()
1034 xa_unlock_irq(xa); in xa_alloc_cyclic_irq()
1058 int xa_reserve(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve() argument
1060 return xa_err(xa_cmpxchg(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve()
1076 int xa_reserve_bh(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve_bh() argument
1078 return xa_err(xa_cmpxchg_bh(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve_bh()
1094 int xa_reserve_irq(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve_irq() argument
1096 return xa_err(xa_cmpxchg_irq(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve_irq()
1108 static inline void xa_release(struct xarray *xa, unsigned long index) in xa_release() argument
1110 xa_cmpxchg(xa, index, XA_ZERO_ENTRY, NULL, 0); in xa_release()
1162 #define XA_BUG_ON(xa, x) do { \ argument
1164 xa_dump(xa); \
1175 #define XA_BUG_ON(xa, x) do { } while (0) argument
1180 static inline void *xa_head(const struct xarray *xa) in xa_head() argument
1182 return rcu_dereference_check(xa->xa_head, in xa_head()
1183 lockdep_is_held(&xa->xa_lock)); in xa_head()
1187 static inline void *xa_head_locked(const struct xarray *xa) in xa_head_locked() argument
1189 return rcu_dereference_protected(xa->xa_head, in xa_head_locked()
1190 lockdep_is_held(&xa->xa_lock)); in xa_head_locked()
1194 static inline void *xa_entry(const struct xarray *xa, in xa_entry() argument
1199 lockdep_is_held(&xa->xa_lock)); in xa_entry()
1203 static inline void *xa_entry_locked(const struct xarray *xa, in xa_entry_locked() argument
1208 lockdep_is_held(&xa->xa_lock)); in xa_entry_locked()
1212 static inline struct xa_node *xa_parent(const struct xarray *xa, in xa_parent() argument
1216 lockdep_is_held(&xa->xa_lock)); in xa_parent()
1220 static inline struct xa_node *xa_parent_locked(const struct xarray *xa, in xa_parent_locked() argument
1224 lockdep_is_held(&xa->xa_lock)); in xa_parent_locked()
1327 struct xarray *xa; member
1348 .xa = array, \
1388 #define xas_marked(xas, mark) xa_marked((xas)->xa, (mark))
1389 #define xas_trylock(xas) xa_trylock((xas)->xa)
1390 #define xas_lock(xas) xa_lock((xas)->xa)
1391 #define xas_unlock(xas) xa_unlock((xas)->xa)
1392 #define xas_lock_bh(xas) xa_lock_bh((xas)->xa)
1393 #define xas_unlock_bh(xas) xa_unlock_bh((xas)->xa)
1394 #define xas_lock_irq(xas) xa_lock_irq((xas)->xa)
1395 #define xas_unlock_irq(xas) xa_unlock_irq((xas)->xa)
1397 xa_lock_irqsave((xas)->xa, flags)
1399 xa_unlock_irqrestore((xas)->xa, flags)
1536 static inline int xa_get_order(struct xarray *xa, unsigned long index) in xa_get_order() argument
1574 return xa_head(xas->xa); in xas_reload()
1577 entry = xa_entry(xas->xa, node, offset); in xas_reload()
1584 return xa_entry(xas->xa, node, offset); in xas_reload()
1684 entry = xa_entry(xas->xa, node, xas->xa_offset + 1); in xas_next_entry()
1743 entry = xa_entry(xas->xa, node, offset); in xas_next_marked()
1836 return xa_entry(xas->xa, node, xas->xa_offset); in xas_prev()
1865 return xa_entry(xas->xa, node, xas->xa_offset); in xas_next()