/tools/net/sunrpc/xdrgen/generators/ |
A D | typedef.py | 22 if isinstance(node, _XdrBasic): 26 name=node.name, 36 print(template.render(name=node.name, size=node.size)) 44 name=node.name, 46 size=node.size, 53 name=node.name, 72 name=node.name, 82 print(template.render(name=node.name, size=node.size)) 90 name=node.name, 92 size=node.size, [all …]
|
A D | union.py | 17 if node.name in public_apis: 30 name=node.name, 47 name=node.arm.name, 61 for case in node.cases: 64 if node.default is not None: 77 print(template.render(name=node.name, type=node.spec.type_name)) 92 for case in node.values: 143 for case in node.cases: 162 print(template.render(name=node.name, type=node.spec.type_name)) 177 for case in node.values: [all …]
|
A D | enum.py | 20 if node.name in public_apis: 22 print(template.render(name=node.name)) 27 print(template.render(name=node.name)) 30 for enumerator in node.enumerators: 33 if node.name in big_endian: 37 print(template.render(name=node.name)) 39 def emit_decoder(self, node: _XdrEnum) -> None: 41 if node.name in big_endian: 45 print(template.render(name=node.name)) 49 if node.name in big_endian: [all …]
|
A D | pointer.py | 20 if node.name in public_apis: 22 print(template.render(name=node.name)) 84 print(template.render(name=node.name)) 86 for field in node.fields[0:-1]: 90 print(template.render(name=node.name)) 164 print(template.render(name=node.name)) 166 for field in node.fields[0:-1]: 241 print(template.render(name=node.name)) 243 for field in node.fields[0:-1]: 257 width=" + ".join(node.symbolic_width()), [all …]
|
A D | struct.py | 20 if node.name in public_apis: 22 print(template.render(name=node.name)) 84 print(template.render(name=node.name)) 86 for field in node.fields: 90 print(template.render(name=node.name)) 164 print(template.render(name=node.name)) 166 for field in node.fields: 241 print(template.render(name=node.name)) 243 for field in node.fields: 280 emit_struct_decoder(self.environment, node) [all …]
|
/tools/include/linux/ |
A D | rbtree.h | 43 ((node)->__rb_parent_color == (unsigned long)(node)) 45 ((node)->__rb_parent_color = (unsigned long)(node)) 70 node->rb_left = node->rb_right = NULL; in rb_link_node() 276 node = node->rb_left; in rb_find() 278 node = node->rb_right; in rb_find() 307 node = node->rb_left; in rb_find_first() 309 node = node->rb_right; in rb_find_first() 328 node = rb_next(node); in rb_next_match() 329 if (node && cmp(key, node)) in rb_next_match() 331 return node; in rb_next_match() [all …]
|
A D | interval_tree_generic.h | 42 ITTYPE start = ITSTART(node), last = ITLAST(node); \ 59 node->ITSUBTREE = last; \ 88 if (node->ITRB.rb_left) { \ 100 node = left; \ 107 if (node->ITRB.rb_right) { \ 108 node = rb_entry(node->ITRB.rb_right, \ 122 ITSTRUCT *node, *leftmost; \ 141 if (node->ITSUBTREE < start) \ 176 prev = &node->ITRB; \ 178 rb = node->ITRB.rb_right; \ [all …]
|
A D | rbtree_augmented.h | 61 root->rb_leftmost = node; in rb_insert_augmented_cached() 83 if (RBCOMPUTE(node, true)) \ 85 rb = rb_parent(&node->RBFIELD); \ 140 node->RBAUGMENTED = max; \ 203 pc = node->__rb_parent_color; in __rb_erase_augmented() 236 augment->copy(node, successor); in __rb_erase_augmented() 262 augment->copy(node, successor); in __rb_erase_augmented() 266 tmp = node->rb_left; in __rb_erase_augmented() 270 pc = node->__rb_parent_color; in __rb_erase_augmented() 303 if (root->rb_leftmost == node) in rb_erase_augmented_cached() [all …]
|
/tools/lib/ |
A D | rbtree.c | 498 node = node->rb_right; in rb_next() 500 node = node->rb_left; in rb_next() 511 while ((parent = rb_parent(node)) && node == parent->rb_right) in rb_next() 512 node = parent; in rb_next() 529 node = node->rb_left; in rb_prev() 531 node = node->rb_right; in rb_prev() 539 while ((parent = rb_parent(node)) && node == parent->rb_left) in rb_prev() 540 node = parent; in rb_prev() 565 node = node->rb_left; in rb_left_deepest_node() 567 node = node->rb_right; in rb_left_deepest_node() [all …]
|
/tools/perf/tests/ |
A D | hists_output.c | 185 node = rb_next(node); in test1() 191 node = rb_next(node); in test1() 197 node = rb_next(node); in test1() 203 node = rb_next(node); in test1() 209 node = rb_next(node); in test1() 215 node = rb_next(node); in test1() 221 node = rb_next(node); in test1() 227 node = rb_next(node); in test1() 284 node = rb_next(node); in test2() 339 node = rb_next(node); in test3() [all …]
|
/tools/testing/selftests/sched_ext/ |
A D | numa.bpf.c | 22 static bool is_cpu_idle(s32 cpu, int node) in is_cpu_idle() argument 37 int node = __COMPAT_scx_bpf_cpu_node(scx_bpf_task_cpu(p)); in BPF_STRUCT_OPS() local 51 if (is_cpu_idle(cpu, node)) in BPF_STRUCT_OPS() 54 if (__COMPAT_scx_bpf_cpu_node(cpu) != node) in BPF_STRUCT_OPS() 55 scx_bpf_error("CPU %d should be in node %d", cpu, node); in BPF_STRUCT_OPS() 64 scx_bpf_dsq_insert(p, node, SCX_SLICE_DFL, enq_flags); in BPF_STRUCT_OPS() 69 int node = __COMPAT_scx_bpf_cpu_node(cpu); in BPF_STRUCT_OPS() local 71 scx_bpf_dsq_move_to_local(node); in BPF_STRUCT_OPS() 76 int node, err; in BPF_STRUCT_OPS_SLEEPABLE() local 78 bpf_for(node, 0, __COMPAT_scx_bpf_nr_node_ids()) { in BPF_STRUCT_OPS_SLEEPABLE() [all …]
|
/tools/perf/util/ |
A D | strfilter.c | 21 if (node) { in strfilter_node__delete() 22 if (node->p && !is_operator(*node->p)) in strfilter_node__delete() 26 free(node); in strfilter_node__delete() 70 struct strfilter_node *node = zalloc(sizeof(*node)); in strfilter_node__alloc() local 72 if (node) { in strfilter_node__alloc() 73 node->p = op; in strfilter_node__alloc() 74 node->l = l; in strfilter_node__alloc() 75 node->r = r; in strfilter_node__alloc() 78 return node; in strfilter_node__alloc() 221 if (!node || !node->p) in strfilter_node__compare() [all …]
|
A D | intlist.c | 18 struct int_node *node = malloc(sizeof(*node)); in intlist__node_new() local 20 if (node != NULL) { in intlist__node_new() 21 node->i = i; in intlist__node_new() 22 node->priv = NULL; in intlist__node_new() 23 rc = &node->rb_node; in intlist__node_new() 39 int_node__delete(node); in intlist__node_delete() 47 if (node->i > i) in intlist__node_cmp() 49 else if (node->i < i) in intlist__node_cmp() 68 struct int_node *node = NULL; in __intlist__findnew() local 82 return node; in __intlist__findnew() [all …]
|
A D | block-range.c | 54 node = *p; in rb_link_left_of_node() 55 p = &node->rb_right; in rb_link_left_of_node() 57 rb_link_node(left, node, p); in rb_link_left_of_node() 64 node = *p; in rb_link_right_of_node() 65 p = &node->rb_left; in rb_link_right_of_node() 67 rb_link_node(right, node, p); in rb_link_right_of_node() 127 rb_link_left_of_node(&head->node, &next->node); in block_range__create() 181 rb_link_left_of_node(&head->node, &entry->node); in block_range__create() 221 rb_link_right_of_node(&tail->node, &entry->node); in block_range__create() 259 rb_link_right_of_node(&tail->node, &entry->node); in block_range__create() [all …]
|
A D | callchain.c | 434 if (node->hit && node->hit >= min_hit) in __sort_chain_flat() 753 node->ms.map, node->ms.sym->start); in match_chain() 901 if (!node) in append_chain_children() 958 if (!node) in append_chain() 1086 node = calloc(1, sizeof(*node)); in callchain_cursor_append() 1087 if (!node) in callchain_cursor_append() 1536 node->hit = (node->hit * 7) / 8; in decay_callchain_node() 1597 for (node = cursor->first; node != NULL; node = next) { in callchain_cursor__delete() 1644 node->branch, &node->branch_flags, in callchain_cursor__copy() 1647 node->branch_from, node->srcline); in callchain_cursor__copy() [all …]
|
/tools/testing/selftests/dt/ |
A D | test_unprobed_devices.sh | 32 for node in $(find ${PDT} -type d); do 33 [ ! -f "${node}"/compatible ] && continue 35 if [[ -e "${node}"/status ]]; then 36 status=$(tr -d '\000' < "${node}"/status) 39 disabled_nodes_regex="${disabled_nodes_regex}|${node}" 41 disabled_nodes_regex="${node}" 52 echo "${node}" | sed -e 's|\/proc\/device-tree||' 70 for node in ${nodes_compatible}; do 80 ktap_test_fail "${node}" 85 ktap_test_skip "${node}" [all …]
|
/tools/verification/rvgen/rvgen/ |
A D | ltl2ba.py | 192 node.new | ({n.op.left, n.op.right} - node.old), 216 node.new | ({n.op.left} - node.old), 220 node.new | ({n.op.right} - node.old), 238 node.new | ({n.op.left} - node.old), 242 node.new | ({n.op.right} - node.old), 260 node.new | ({n.op.right} - node.old), 264 node.new | ({n.op.left, n.op.right} - node.old), 371 node.old |= {n} 399 node.old |= {n} 531 for node in rule: [all …]
|
A D | ltl2k.py | 25 if not node.labels: 75 for node in self.ba: 111 for node in self.ltl: 116 buf.append("\tbool %s = %s && %s;" % (node, node.op.left, node.op.right)) 117 required_values |= {str(node.op.left), str(node.op.right)} 119 buf.append("\tbool %s = %s || %s;" % (node, node.op.left, node.op.right)) 120 required_values |= {str(node.op.left), str(node.op.right)} 122 buf.append("\tbool %s = !%s;" % (node, node.op.child)) 145 for node in self.ba: 155 for node in self.ba: [all …]
|
/tools/testing/selftests/bpf/progs/ |
A D | rbtree_fail.c | 12 struct bpf_rb_node node; member 41 bpf_rbtree_add(&groot, &n->node, less); in rbtree_api_nolock_add() 56 bpf_rbtree_add(&groot, &n->node, less); in rbtree_api_nolock_remove() 59 bpf_rbtree_remove(&groot, &n->node); in rbtree_api_nolock_remove() 89 bpf_rbtree_add(&groot, &n->node, less); in rbtree_api_remove_unadded_node() 147 bpf_rbtree_add(&groot, &n->node, less); in rbtree_api_add_to_multiple_trees() 150 bpf_rbtree_add(&groot2, &n->node, less); in rbtree_api_add_to_multiple_trees() 192 bpf_rbtree_add(&groot, &n->node, less); in rbtree_api_add_release_unlock_escape() 200 bpf_rbtree_remove(&groot, &n->node); in rbtree_api_add_release_unlock_escape() 226 bpf_rbtree_remove(&groot, &n->node); in rbtree_api_first_release_unlock_escape() [all …]
|
A D | rbtree.c | 13 struct bpf_rb_node node; member 65 bpf_rbtree_add(root, &n->node, less); in __add_three() 66 bpf_rbtree_add(root, &m->node, less); in __add_three() 75 bpf_rbtree_add(root, &n->node, less); in __add_three() 109 bpf_rbtree_add(&groot, &n->node, less); in rbtree_add_and_remove() 110 bpf_rbtree_add(&groot, &m->node, less); in rbtree_add_and_remove() 111 res = bpf_rbtree_remove(&groot, &n->node); in rbtree_add_and_remove() 214 bpf_rbtree_add(&groot, &n->node, less); in rbtree_first_and_remove() 215 bpf_rbtree_add(&groot, &m->node, less); in rbtree_first_and_remove() 216 bpf_rbtree_add(&groot, &o->node, less); in rbtree_first_and_remove() [all …]
|
A D | bpf_qdisc_fifo.c | 11 struct bpf_list_node node; member 15 private(A) struct bpf_list_head q_fifo __contains(skb_node, node); 41 bpf_list_push_back(&q_fifo, &skbn->node); in BPF_PROG() 54 struct bpf_list_node *node; in BPF_PROG() local 59 node = bpf_list_pop_front(&q_fifo); in BPF_PROG() 61 if (!node) in BPF_PROG() 64 skbn = container_of(node, struct skb_node, node); in BPF_PROG() 89 struct bpf_list_node *node; in BPF_PROG() local 97 node = bpf_list_pop_front(&q_fifo); in BPF_PROG() 100 if (!node) in BPF_PROG() [all …]
|
/tools/net/sunrpc/xdrgen/subcmds/ |
A D | source.py | 32 if isinstance(node, _XdrEnum): 34 elif isinstance(node, _XdrPointer): 36 elif isinstance(node, _XdrTypedef): 38 elif isinstance(node, _XdrStruct): 40 elif isinstance(node, _XdrUnion): 42 elif isinstance(node, _RpcProgram): 46 gen.emit_decoder(node) 51 if isinstance(node, _XdrEnum): 57 elif isinstance(node, _XdrStruct): 59 elif isinstance(node, _XdrUnion): [all …]
|
/tools/testing/selftests/kvm/lib/ |
A D | lru_gen_util.c | 183 int node, gen; in print_memcg_stats() local 186 for (node = 0; node < stats->nr_nodes; ++node) { in print_memcg_stats() 187 pr_debug("\tnode %d\n", stats->nodes[node].node); in print_memcg_stats() 190 &stats->nodes[node].gens[gen]; in print_memcg_stats() 248 int node, gen; in lru_gen_sum_memcg_stats_for_gen() local 251 for (node = 0; node < stats->nr_nodes; ++node) { in lru_gen_sum_memcg_stats_for_gen() 304 int node, gen; in lru_gen_do_aging() local 311 for (node = 0; node < stats->nr_nodes; ++node) { in lru_gen_do_aging() 320 run_aging_impl(stats->memcg_id, stats->nodes[node].node, in lru_gen_do_aging() 337 for (node = 0; node < stats->nr_nodes; ++node) in lru_gen_find_generation() [all …]
|
/tools/testing/selftests/rseq/ |
A D | basic_percpu_ops_test.c | 183 newval = (intptr_t)node; in this_cpu_list_push() 237 node = list->c[cpu].head; in __percpu_list_pop() 238 if (!node) in __percpu_list_pop() 241 return node; in __percpu_list_pop() 260 if (node) in test_percpu_list_thread() 294 node = malloc(sizeof(*node)); in test_percpu_list() 295 assert(node); in test_percpu_list() 296 node->data = j; in test_percpu_list() 298 list.c[i].head = node; in test_percpu_list() 316 sum += node->data; in test_percpu_list() [all …]
|
/tools/testing/selftests/net/ |
A D | reuseport_bpf_numa.c | 197 int epfd, node; in test() local 205 for (node = 0; node < len; ++node) { in test() 207 ev.data.fd = rcv_fd[node]; in test() 213 for (node = 0; node < len; ++node) { in test() 216 send_from_node(node, family, proto); in test() 217 receive_on_node(rcv_fd, len, epfd, node, proto); in test() 221 for (node = len - 1; node >= 0; --node) { in test() 224 send_from_node(node, family, proto); in test() 225 receive_on_node(rcv_fd, len, epfd, node, proto); in test() 229 for (node = 0; node < len; ++node) in test() [all …]
|