Lines Matching refs:ref

10 void enumerated_ref_get(struct enumerated_ref *ref, unsigned idx)  in enumerated_ref_get()  argument
12 BUG_ON(idx >= ref->nr); in enumerated_ref_get()
13 atomic_long_inc(&ref->refs[idx]); in enumerated_ref_get()
16 bool __enumerated_ref_tryget(struct enumerated_ref *ref, unsigned idx) in __enumerated_ref_tryget() argument
18 BUG_ON(idx >= ref->nr); in __enumerated_ref_tryget()
19 return atomic_long_inc_not_zero(&ref->refs[idx]); in __enumerated_ref_tryget()
22 bool enumerated_ref_tryget(struct enumerated_ref *ref, unsigned idx) in enumerated_ref_tryget() argument
24 BUG_ON(idx >= ref->nr); in enumerated_ref_tryget()
25 return !ref->dying && in enumerated_ref_tryget()
26 atomic_long_inc_not_zero(&ref->refs[idx]); in enumerated_ref_tryget()
29 void enumerated_ref_put(struct enumerated_ref *ref, unsigned idx) in enumerated_ref_put() argument
31 BUG_ON(idx >= ref->nr); in enumerated_ref_put()
32 long v = atomic_long_dec_return(&ref->refs[idx]); in enumerated_ref_put()
38 for (unsigned i = 0; i < ref->nr; i++) in enumerated_ref_put()
39 if (atomic_long_read(&ref->refs[i])) in enumerated_ref_put()
42 if (ref->stop_fn) in enumerated_ref_put()
43 ref->stop_fn(ref); in enumerated_ref_put()
44 complete(&ref->stop_complete); in enumerated_ref_put()
51 struct enumerated_ref *ref = in enumerated_ref_kill_cb() local
52 container_of(percpu_ref, struct enumerated_ref, ref); in enumerated_ref_kill_cb()
54 if (ref->stop_fn) in enumerated_ref_kill_cb()
55 ref->stop_fn(ref); in enumerated_ref_kill_cb()
56 complete(&ref->stop_complete); in enumerated_ref_kill_cb()
60 void enumerated_ref_stop_async(struct enumerated_ref *ref) in enumerated_ref_stop_async() argument
62 reinit_completion(&ref->stop_complete); in enumerated_ref_stop_async()
65 percpu_ref_kill(&ref->ref); in enumerated_ref_stop_async()
67 ref->dying = true; in enumerated_ref_stop_async()
68 for (unsigned i = 0; i < ref->nr; i++) in enumerated_ref_stop_async()
69 enumerated_ref_put(ref, i); in enumerated_ref_stop_async()
73 void enumerated_ref_stop(struct enumerated_ref *ref, in enumerated_ref_stop() argument
76 enumerated_ref_stop_async(ref); in enumerated_ref_stop()
77 while (!wait_for_completion_timeout(&ref->stop_complete, HZ * 10)) { in enumerated_ref_stop()
82 enumerated_ref_to_text(&buf, ref, names); in enumerated_ref_stop()
88 void enumerated_ref_start(struct enumerated_ref *ref) in enumerated_ref_start() argument
91 percpu_ref_reinit(&ref->ref); in enumerated_ref_start()
93 ref->dying = false; in enumerated_ref_start()
94 for (unsigned i = 0; i < ref->nr; i++) { in enumerated_ref_start()
95 BUG_ON(atomic_long_read(&ref->refs[i])); in enumerated_ref_start()
96 atomic_long_inc(&ref->refs[i]); in enumerated_ref_start()
101 void enumerated_ref_exit(struct enumerated_ref *ref) in enumerated_ref_exit() argument
104 percpu_ref_exit(&ref->ref); in enumerated_ref_exit()
106 kfree(ref->refs); in enumerated_ref_exit()
107 ref->refs = NULL; in enumerated_ref_exit()
108 ref->nr = 0; in enumerated_ref_exit()
112 int enumerated_ref_init(struct enumerated_ref *ref, unsigned nr, in enumerated_ref_init() argument
115 init_completion(&ref->stop_complete); in enumerated_ref_init()
116 ref->stop_fn = stop_fn; in enumerated_ref_init()
119 return percpu_ref_init(&ref->ref, enumerated_ref_kill_cb, in enumerated_ref_init()
122 ref->refs = kzalloc(sizeof(ref->refs[0]) * nr, GFP_KERNEL); in enumerated_ref_init()
123 if (!ref->refs) in enumerated_ref_init()
126 ref->nr = nr; in enumerated_ref_init()
132 struct enumerated_ref *ref, in enumerated_ref_to_text() argument
138 for (unsigned i = 0; i < ref->nr; i++) in enumerated_ref_to_text()
140 atomic_long_read(&ref->refs[i])); in enumerated_ref_to_text()