Home
last modified time | relevance | path

Searched refs:node (Results 1 – 25 of 93) sorted by relevance

1234

/kernel/gcov/
A Dfs.c393 if (node) { in gcov_seq_write()
545 if (!node) in new_node()
565 return node; in new_node()
568 kfree(node); in new_node()
599 kfree(node); in release_node()
700 if (!node) { in add_node()
702 if (!node) in add_node()
709 if (!node) in add_node()
822 node->loaded_info[i] = node->loaded_info[node->num_loaded - 1]; in remove_info()
846 if (node) in gcov_event()
[all …]
/kernel/bpf/
A Dbpf_lru_list.c373 if (node) in __local_list_pop_free()
376 return node; in __local_list_pop_free()
392 return node; in __local_list_pop_pending()
433 return node; in bpf_percpu_lru_pop_free()
451 if (!node) { in bpf_common_lru_pop_free()
456 if (node) in bpf_common_lru_pop_free()
461 if (node) in bpf_common_lru_pop_free()
462 return node; in bpf_common_lru_pop_free()
480 if (!node) in bpf_common_lru_pop_free()
490 if (node) { in bpf_common_lru_pop_free()
[all …]
A Dlpm_trie.c282 node = rcu_dereference_check(node->child[next_bit], in trie_lookup_elem()
299 if (!node) in lpm_trie_node_alloc()
308 return node; in lpm_trie_node_alloc()
498 if (!node || node->prefixlen != key->prefixlen || in trie_delete_elem()
524 !node->child[0] && !node->child[1]) { in trie_delete_elem()
693 for (node = search_root; node;) { in trie_get_next_key()
701 node = rcu_dereference(node->child[next_bit]); in trie_get_next_key()
703 if (!node || node->prefixlen != matchlen || in trie_get_next_key()
735 for (node = search_root; node;) { in trie_get_next_key()
737 node = rcu_dereference(node->child[0]); in trie_get_next_key()
[all …]
A Dpercpu_freelist.c31 node->next = head->first; in pcpu_freelist_push_node()
32 WRITE_ONCE(head->first, node); in pcpu_freelist_push_node()
36 struct pcpu_freelist_node *node) in ___pcpu_freelist_push() argument
40 pcpu_freelist_push_node(head, node); in ___pcpu_freelist_push()
46 struct pcpu_freelist_node *node) in __pcpu_freelist_push() argument
69 struct pcpu_freelist_node *node) in pcpu_freelist_push() argument
74 __pcpu_freelist_push(s, node); in pcpu_freelist_push()
112 node = head->first; in ___pcpu_freelist_pop()
113 if (node) { in ___pcpu_freelist_pop()
116 return node; in ___pcpu_freelist_pop()
[all …]
A Dcrypto.c62 struct bpf_crypto_type_list *node; in bpf_crypto_register_type() local
71 node = kmalloc(sizeof(*node), GFP_KERNEL); in bpf_crypto_register_type()
73 if (!node) in bpf_crypto_register_type()
76 node->type = type; in bpf_crypto_register_type()
89 struct bpf_crypto_type_list *node; in bpf_crypto_unregister_type() local
97 list_del(&node->list); in bpf_crypto_unregister_type()
98 kfree(node); in bpf_crypto_unregister_type()
111 struct bpf_crypto_type_list *node; in bpf_crypto_get_type() local
115 if (strcmp(node->type->name, name)) in bpf_crypto_get_type()
118 if (try_module_get(node->type->owner)) in bpf_crypto_get_type()
[all …]
A Dstream.c114 init_llist_node(&elem->node); in bpf_stream_elem_init()
195 llist_add(&elem->node, log); in __bpf_stream_push_str()
255 struct llist_node *node; in bpf_stream_backlog_pop() local
257 node = stream->backlog_head; in bpf_stream_backlog_pop()
262 return node; in bpf_stream_backlog_pop()
302 struct llist_node *node; in bpf_stream_read() local
311 if (!node) { in bpf_stream_read()
315 if (!node) in bpf_stream_read()
317 elem = container_of(node, typeof(*elem), node); in bpf_stream_read()
428 struct llist_node *node; in bpf_stream_stage_free() local
[all …]
A Drqspinlock.c350 struct mcs_spinlock *prev, *next, *node; in resilient_queued_spin_lock_slowpath() local
459 node = this_cpu_ptr(&rqnodes[0].mcs); in resilient_queued_spin_lock_slowpath()
460 idx = node->count++; in resilient_queued_spin_lock_slowpath()
487 node = grab_mcs_node(node, idx); in resilient_queued_spin_lock_slowpath()
501 node->locked = 0; in resilient_queued_spin_lock_slowpath()
502 node->next = NULL; in resilient_queued_spin_lock_slowpath()
539 WRITE_ONCE(prev->next, node); in resilient_queued_spin_lock_slowpath()
541 val = arch_mcs_spin_lock_contended(&node->locked); in resilient_queued_spin_lock_slowpath()
553 next = READ_ONCE(node->next); in resilient_queued_spin_lock_slowpath()
605 next = smp_cond_load_relaxed(&node->next, VAL); in resilient_queued_spin_lock_slowpath()
[all …]
/kernel/trace/
A Drethook.c23 struct llist_node *node; in rethook_flush_task() local
26 while (node) { in rethook_flush_task()
28 node = node->next; in rethook_flush_task()
148 objpool_push(node, &node->rethook->pool); in rethook_recycle()
211 if (!node) in __rethook_find_ret_addr()
214 node = node->next; in __rethook_find_ret_addr()
216 while (node) { in __rethook_find_ret_addr()
219 *cur = node; in __rethook_find_ret_addr()
222 node = node->next; in __rethook_find_ret_addr()
315 if (first == node) in rethook_trampoline_handler()
[all …]
A Dtrace_boot.c301 struct xbc_node *node; in trace_boot_hist_add_handlers() local
308 p = xbc_node_get_data(node); in trace_boot_hist_add_handlers()
378 if (node) { in trace_boot_compose_hist_cmd()
397 if (node && trace_boot_hist_add_handlers(node, &buf, end, "var") < 0) in trace_boot_compose_hist_cmd()
400 if (node && trace_boot_hist_add_handlers(node, &buf, end, "var") < 0) in trace_boot_compose_hist_cmd()
403 if (node && trace_boot_hist_add_handlers(node, &buf, end, "event") < 0) in trace_boot_compose_hist_cmd()
422 struct xbc_node *node; in trace_boot_init_histograms() local
524 node = xbc_node_find_subkey(node, "event"); in trace_boot_init_events()
525 if (!node) in trace_boot_init_events()
627 node = xbc_node_find_subkey(node, "instance"); in trace_boot_init_instances()
[all …]
A Dtrace_stat.c27 struct rb_node node; member
171 struct rb_node *node; in stat_seq_start() local
186 for (i = 0; node && i < n; i++) in stat_seq_start()
187 node = rb_next(node); in stat_seq_start()
189 return node; in stat_seq_start()
195 struct rb_node *node = p; in stat_seq_next() local
202 return rb_next(node); in stat_seq_next()
320 if (node->ts == trace) in register_stat_tracer()
347 struct stat_session *node, *tmp; in unregister_stat_tracer() local
351 if (node->ts == trace) { in unregister_stat_tracer()
[all …]
A Dfprobe.c62 if (node->addr == ip) in find_first_fprobe_node()
63 return node; in find_first_fprobe_node()
72 unsigned long ip = node->addr; in insert_fprobe_node()
94 WRITE_ONCE(node->fp, NULL); in delete_fprobe_node()
95 hlist_del_rcu(&node->hlist); in delete_fprobe_node()
269 if (node->addr != func) in fprobe_entry()
271 fp = READ_ONCE(node->fp); in fprobe_entry()
281 node = first; in fprobe_entry()
286 if (node->addr != func) in fprobe_entry()
306 if (node->addr != func) in fprobe_entry()
[all …]
/kernel/sched/
A Dext_idle.c777 int node; in reset_idle_masks() local
834 if (node < 0 || node >= nr_node_ids) { in validate_node()
845 return node; in validate_node()
998 node = validate_node(node); in scx_bpf_get_idle_cpumask_node()
999 if (node < 0) in scx_bpf_get_idle_cpumask_node()
1037 node = validate_node(node); in scx_bpf_get_idle_smtmask_node()
1038 if (node < 0) in scx_bpf_get_idle_smtmask_node()
1129 node = validate_node(node); in scx_bpf_pick_idle_cpu_node()
1130 if (node < 0) in scx_bpf_pick_idle_cpu_node()
1131 return node; in scx_bpf_pick_idle_cpu_node()
[all …]
/kernel/locking/
A Dosq_lock.c34 return node->cpu - 1; in node_cpu()
81 if (node->next) { in osq_wait_next()
100 node->locked = 0; in osq_lock()
101 node->next = NULL; in osq_lock()
102 node->cpu = curr; in osq_lock()
115 node->prev = prev; in osq_lock()
129 WRITE_ONCE(prev->next, node); in osq_lock()
165 cmpxchg(&prev->next, node, NULL) == node) in osq_lock()
182 prev = READ_ONCE(node->prev); in osq_lock()
224 node = this_cpu_ptr(&osq_node); in osq_unlock()
[all …]
A Dqspinlock.c91 struct mcs_spinlock *node) { } in __pv_kick_node() argument
93 struct mcs_spinlock *node) in __pv_wait_head_or_lock() argument
215 node = this_cpu_ptr(&qnodes[0].mcs); in queued_spin_lock_slowpath()
216 idx = node->count++; in queued_spin_lock_slowpath()
237 node = grab_mcs_node(node, idx); in queued_spin_lock_slowpath()
251 node->locked = 0; in queued_spin_lock_slowpath()
252 node->next = NULL; in queued_spin_lock_slowpath()
253 pv_init_node(node); in queued_spin_lock_slowpath()
288 WRITE_ONCE(prev->next, node); in queued_spin_lock_slowpath()
290 pv_wait_node(node, prev); in queued_spin_lock_slowpath()
[all …]
A Dqspinlock_paravirt.h169 struct pv_node *node; member
218 WRITE_ONCE(he->node, node); in pv_hash()
240 struct pv_node *node; in pv_unhash() local
244 node = READ_ONCE(he->node); in pv_unhash()
246 return node; in pv_unhash()
299 if (READ_ONCE(node->locked)) in pv_wait_node()
319 if (!READ_ONCE(node->locked)) { in pv_wait_node()
340 !READ_ONCE(node->locked)); in pv_wait_node()
501 struct pv_node *node; in __pv_queued_spin_unlock_slowpath() local
523 node = pv_unhash(lock); in __pv_queued_spin_unlock_slowpath()
[all …]
A Dmcs_spinlock.h57 void mcs_spin_lock(struct mcs_spinlock **lock, struct mcs_spinlock *node) in mcs_spin_lock() argument
62 node->locked = 0; in mcs_spin_lock()
63 node->next = NULL; in mcs_spin_lock()
71 prev = xchg(lock, node); in mcs_spin_lock()
83 WRITE_ONCE(prev->next, node); in mcs_spin_lock()
86 arch_mcs_spin_lock_contended(&node->locked); in mcs_spin_lock()
94 void mcs_spin_unlock(struct mcs_spinlock **lock, struct mcs_spinlock *node) in mcs_spin_unlock() argument
96 struct mcs_spinlock *next = READ_ONCE(node->next); in mcs_spin_unlock()
102 if (likely(cmpxchg_release(lock, node, NULL) == node)) in mcs_spin_unlock()
105 while (!(next = READ_ONCE(node->next))) in mcs_spin_unlock()
/kernel/power/
A Dwakelock.c29 struct rb_node node; member
40 struct rb_node *node; in pm_show_wakelocks() local
46 for (node = rb_first(&wakelocks_tree); node; node = rb_next(node)) { in pm_show_wakelocks()
47 wl = rb_entry(node, struct wakelock, node); in pm_show_wakelocks()
154 struct rb_node *parent = *node; in wakelock_lookup_add()
157 while (*node) { in wakelock_lookup_add()
160 parent = *node; in wakelock_lookup_add()
161 wl = rb_entry(*node, struct wakelock, node); in wakelock_lookup_add()
170 node = &(*node)->rb_left; in wakelock_lookup_add()
172 node = &(*node)->rb_right; in wakelock_lookup_add()
[all …]
A Dsnapshot.c449 if (!node) in alloc_rtree_node()
458 return node; in alloc_rtree_node()
488 if (!node) in add_rtree_block()
508 if (!node) { in add_rtree_block()
511 if (!node) in add_rtree_block()
513 *dst = node; in add_rtree_block()
519 node = *dst; in add_rtree_block()
784 node = bm->cur.node; in memory_bm_find_bit()
798 node = (struct rtree_node *)node->data[index]; in memory_bm_find_bit()
804 bm->cur.node = node; in memory_bm_find_bit()
[all …]
/kernel/
A Dsmp.c358 csd->node.u_flags |= CSD_FLAG_LOCK; in csd_lock()
393 csd = container_of(node, call_single_data_t, node.llist); in __smp_call_single_queue()
542 entry = &csd_next->node.llist; in __flush_smp_call_function_queue()
550 prev = &csd->node.llist; in __flush_smp_call_function_queue()
568 entry = &csd_next->node.llist; in __flush_smp_call_function_queue()
584 prev = &csd->node.llist; in __flush_smp_call_function_queue()
680 csd->node.src = smp_processor_id(); in smp_call_function_single()
681 csd->node.dst = cpu; in smp_call_function_single()
729 csd->node.u_flags = CSD_FLAG_LOCK; in smp_call_function_single_async()
835 csd->node.src = smp_processor_id(); in smp_call_function_many_cond()
[all …]
A Dirq_work.c61 oflags = atomic_fetch_or(IRQ_WORK_CLAIMED | CSD_TYPE_IRQ_WORK, &work->node.a_flags); in irq_work_claim()
95 work_flags = atomic_read(&work->node.a_flags); in __irq_work_queue_local()
107 if (!llist_add(&work->node.llist, list)) in __irq_work_queue_local()
163 !(atomic_read(&work->node.a_flags) & IRQ_WORK_HARD_IRQ)) { in irq_work_queue_on()
165 if (!llist_add(&work->node.llist, &per_cpu(lazy_list, cpu))) in irq_work_queue_on()
173 __smp_call_single_queue(cpu, &work->node.llist); in irq_work_queue_on()
211 flags = atomic_read(&work->node.a_flags); in irq_work_single()
213 atomic_set(&work->node.a_flags, flags); in irq_work_single()
228 (void)atomic_cmpxchg(&work->node.a_flags, flags, flags & ~IRQ_WORK_BUSY); in irq_work_single()
251 llist_for_each_entry_safe(work, tmp, llnode, node.llist) in irq_work_run_list()
A Dkthread.c47 int node; member
59 unsigned int node; member
504 void *data, int node, in __kthread_create_on_node() argument
517 create->node = node; in __kthread_create_on_node()
579 void *data, int node, in kthread_create_on_node() argument
1000 struct kthread_work, node); in kthread_worker_fn()
1001 list_del_init(&work->node); in kthread_worker_fn()
1047 node, namefmt, args); in __kthread_create_worker_on_node()
1229 list_del_init(&work->node); in kthread_delayed_work_timer_fn()
1336 if (!list_empty(&work->node)) in kthread_flush_work()
[all …]
/kernel/module/
A Dtree_lookup.c24 struct module_memory *mod_mem = container_of(n, struct module_memory, mtn.node); in __mod_tree_val()
31 struct module_memory *mod_mem = container_of(n, struct module_memory, mtn.node); in __mod_tree_size()
64 static noinline void __mod_tree_insert(struct mod_tree_node *node, struct mod_tree_root *tree) in __mod_tree_insert() argument
66 latch_tree_insert(&node->node, &tree->root, &mod_tree_ops); in __mod_tree_insert()
69 static void __mod_tree_remove(struct mod_tree_node *node, struct mod_tree_root *tree) in __mod_tree_remove() argument
71 latch_tree_erase(&node->node, &tree->root, &mod_tree_ops); in __mod_tree_remove()
111 return container_of(ltn, struct mod_tree_node, node)->mod; in mod_find()
/kernel/irq/
A Dirqdesc.c55 static int alloc_masks(struct irq_desc *desc, int node) in alloc_masks() argument
58 GFP_KERNEL, node)) in alloc_masks()
63 GFP_KERNEL, node)) { in alloc_masks()
92 desc->irq_common_data.node = node; in desc_smp_init()
139 desc_smp_init(desc, node, affinity); in desc_set_defaults()
219 if (alloc_masks(desc, node)) { in init_desc()
228 desc_set_defaults(irq, desc, node, affinity, owner); in init_desc()
522 node = cpu_to_node(cpumask_first(mask)); in alloc_descs()
551 int i, initcnt, node = first_online_node; in early_irq_init() local
571 desc = alloc_desc(i, node, 0, NULL, NULL); in early_irq_init()
[all …]
/kernel/futex/
A Dcore.c340 int node = FUTEX_NO_NODE; in __futex_key_to_node() local
362 return node; in __futex_key_to_node()
367 int seq, node; in futex_key_to_node_opt() local
379 return node; in futex_key_to_node_opt()
384 int node; in futex_mpol() local
388 return node; in futex_mpol()
417 int node = key->both.node; in __futex_hash() local
579 node = FUTEX_NO_NODE; in get_futex_key()
588 ((unsigned int)node >= MAX_NUMNODES || !node_possible(node))) in get_futex_key()
601 node = numa_node_id(); in get_futex_key()
[all …]
/kernel/time/
A Dalarmtimer.c154 timerqueue_add(&base->timerqueue, &alarm->node); in alarmtimer_enqueue()
172 timerqueue_del(&base->timerqueue, &alarm->node); in alarmtimer_dequeue()
314 timerqueue_init(&alarm->node); in __alarm_init()
345 alarm->node.expires = start; in alarm_start()
426 delta = ktime_sub(now, alarm->node.expires); in alarm_forward()
436 alarm->node.expires = ktime_add_ns(alarm->node.expires, in alarm_forward()
439 if (alarm->node.expires > now) in alarm_forward()
448 alarm->node.expires = ktime_add_safe(alarm->node.expires, interval); in alarm_forward()
531 alarm_start(alarm, alarm->node.expires); in alarm_timer_rearm()
555 return ktime_sub(alarm->node.expires, now); in alarm_timer_remaining()
[all …]

Completed in 848 milliseconds

1234