| /tools/testing/selftests/kvm/x86/ |
| A D | fastops_test.c | 115 guest_test_fastop_2("add" suffix, type_t, vals[i], vals[j]); \ 116 guest_test_fastop_2("adc" suffix, type_t, vals[i], vals[j]); \ 117 guest_test_fastop_2("and" suffix, type_t, vals[i], vals[j]); \ 118 guest_test_fastop_2("bsf" suffix, type_t, vals[i], vals[j]); \ 119 guest_test_fastop_2("bsr" suffix, type_t, vals[i], vals[j]); \ 120 guest_test_fastop_2("bt" suffix, type_t, vals[i], vals[j]); \ 121 guest_test_fastop_2("btc" suffix, type_t, vals[i], vals[j]); \ 122 guest_test_fastop_2("btr" suffix, type_t, vals[i], vals[j]); \ 123 guest_test_fastop_2("bts" suffix, type_t, vals[i], vals[j]); \ 124 guest_test_fastop_2("cmp" suffix, type_t, vals[i], vals[j]); \ [all …]
|
| /tools/perf/tests/ |
| A D | parse-metric.c | 40 count = find_value(evsel->name, vals); in load_runtime_stat() 103 load_runtime_stat(evlist, vals); in __compute_metric() 134 struct value vals[] = { in test_ipc() local 141 compute_metric("IPC", vals, &ratio) == 0); in test_ipc() 151 struct value vals[] = { in test_frontend() local 170 struct value vals[] = { in test_cache_miss_cycles() local 210 struct value vals[] = { in test_dcache_l2() local 237 struct value vals[] = { in test_recursion_fail() local 254 struct value vals[] = { in test_memory_bandwidth() local 271 struct value vals[] = { in test_metric_group() local [all …]
|
| /tools/testing/selftests/bpf/progs/ |
| A D | verifier_subprog_precision.c | 73 : __imm_ptr(vals) in subprog_result_precise() 125 : __imm_ptr(vals) in fp_precise_subprog_result() 161 : __imm_ptr(vals) in sneaky_fp_precise_subprog_result() 201 : __imm_ptr(vals), in global_subprog_result_precise() 320 : __imm_ptr(vals), in callback_result_precise() 358 : __imm_ptr(vals) in parent_callee_saved_reg_precise() 392 : __imm_ptr(vals) in parent_callee_saved_reg_precise_global() 502 : __imm_ptr(vals) in parent_stack_slot_precise() 546 : __imm_ptr(vals) in parent_stack_slot_precise_global() 726 : __imm_ptr(vals) in subprog_spill_into_parent_stack_slot_precise() [all …]
|
| /tools/testing/selftests/gpio/ |
| A D | gpio-mockup-cdev.c | 48 struct gpio_v2_line_values vals; in get_value_v2() local 51 memset(&vals, 0, sizeof(vals)); in get_value_v2() 52 vals.mask = 1; in get_value_v2() 53 ret = ioctl(lfd, GPIO_V2_LINE_GET_VALUES_IOCTL, &vals); in get_value_v2() 56 return vals.bits & 0x1; in get_value_v2() 81 struct gpiohandle_data vals; in get_value_v1() local 84 memset(&vals, 0, sizeof(vals)); in get_value_v1() 85 ret = ioctl(lfd, GPIOHANDLE_GET_LINE_VALUES_IOCTL, &vals); in get_value_v1() 88 return vals.values[0]; in get_value_v1()
|
| /tools/testing/selftests/bpf/prog_tests/ |
| A D | queue_stack_map.c | 13 __u32 vals[MAP_SIZE], val; in test_queue_stack_map_by_type() local 28 vals[i] = rand(); in test_queue_stack_map_by_type() 51 err = bpf_map_update_elem(map_in_fd, NULL, &vals[i], 0); in test_queue_stack_map_by_type() 61 val = vals[i]; in test_queue_stack_map_by_type() 62 pkt_v4.iph.saddr = vals[i] * 5; in test_queue_stack_map_by_type() 64 val = vals[MAP_SIZE - 1 - i]; in test_queue_stack_map_by_type() 65 pkt_v4.iph.saddr = vals[MAP_SIZE - 1 - i] * 5; in test_queue_stack_map_by_type() 97 ASSERT_EQ(val, vals[i] * 5, "bpf_map_push_elem val"); in test_queue_stack_map_by_type()
|
| A D | unpriv_bpf_disabled.c | 105 __u32 key = 0, vals[nr_cpus], lookup_vals[nr_cpus]; in test_unpriv_bpf_disabled_positive() local 114 vals[j] = expected_val; in test_unpriv_bpf_disabled_positive() 118 vals[0] = prog_fd; in test_unpriv_bpf_disabled_positive() 122 ASSERT_OK(bpf_map_update_elem(map_fds[i], &key, vals, 0), "map_update_elem"); in test_unpriv_bpf_disabled_positive()
|
| A D | reg_bounds.c | 1769 const u64 *vals; in validate_gen_range_vs_const_64() local 1778 vals = init_t == U64 ? ctx.uvals : (const u64 *)ctx.svals; in validate_gen_range_vs_const_64() 1788 rconst = range(init_t, vals[i], vals[i]); in validate_gen_range_vs_const_64() 1808 const u32 *vals; in validate_gen_range_vs_const_32() local 1817 vals = init_t == U32 ? ctx.usubvals : (const u32 *)ctx.ssubvals; in validate_gen_range_vs_const_32() 1827 rconst = range(init_t, vals[i], vals[i]); in validate_gen_range_vs_const_32()
|
| /tools/testing/selftests/kvm/ |
| A D | guest_print_test.c | 24 static struct guest_vals vals; variable 59 vals = (struct guest_vals){ (uint64_t)a, (uint64_t)b, TYPE_##ext }; \ 60 sync_global_to_guest(vcpu->vm, vals); \ 72 switch (vals.type) { in guest_code() 75 GUEST_PRINTF(PRINTF_FMT_##ext, vals.a, vals.b); \ in guest_code() 76 __GUEST_ASSERT(vals.a == vals.b, \ in guest_code() 77 ASSERT_FMT_##ext, vals.a, vals.b); \ in guest_code() 82 GUEST_SYNC(vals.type); in guest_code()
|
| /tools/testing/selftests/arm64/pauth/ |
| A D | pac.c | 72 size_t vals[nkeys]; in n_same_single_set() local 75 vals[0] = sign->keyia & PAC_MASK; in n_same_single_set() 76 vals[1] = sign->keyib & PAC_MASK; in n_same_single_set() 77 vals[2] = sign->keyda & PAC_MASK; in n_same_single_set() 78 vals[3] = sign->keydb & PAC_MASK; in n_same_single_set() 81 vals[4] = sign->keyg & PAC_MASK; in n_same_single_set() 85 if (vals[i] == vals[j]) in n_same_single_set()
|
| /tools/perf/arch/x86/tests/ |
| A D | insn-x86.c | 42 } vals[] = { in get_op() local 63 for (val = vals; val->name; val++) { in get_op() 78 } vals[] = { in get_branch() local 90 for (val = vals; val->name; val++) { in get_branch()
|
| /tools/net/ynl/pyynl/lib/ |
| A D | ynl.py | 854 search_attrs = SpaceAttrs(op.attr_set, vals) 907 def _encode_struct(self, name, vals): argument 911 value = vals.pop(m.name) if m.name in vals else None 1035 search_attrs = SpaceAttrs(op.attr_set, vals) 1036 for name, value in vals.items(): 1045 for (method, vals, flags) in ops: 1115 ops = [(method, vals, req_flags)] 1118 def do(self, method, vals, flags=None): argument 1119 return self._op(method, vals, flags) 1121 def dump(self, method, vals): argument [all …]
|
| /tools/objtool/include/objtool/ |
| A D | cfi.h | 30 struct cfi_reg vals[CFI_NUM_REGS]; member
|
| /tools/testing/selftests/bpf/ |
| A D | test_maps.c | 538 __u32 vals[MAP_SIZE + MAP_SIZE/2], val = 0; in test_queuemap() local 543 vals[i] = rand(); in test_queuemap() 562 assert(bpf_map_update_elem(fd, NULL, &vals[i], 0) == 0); in test_queuemap() 569 assert(bpf_map_lookup_elem(fd, NULL, &val) == 0 && val == vals[0]); in test_queuemap() 573 assert(bpf_map_update_elem(fd, NULL, &vals[i], BPF_EXIST) == 0); in test_queuemap() 578 val == vals[i]); in test_queuemap() 594 __u32 vals[MAP_SIZE + MAP_SIZE/2], val = 0; in test_stackmap() local 599 vals[i] = rand(); in test_stackmap() 618 assert(bpf_map_update_elem(fd, NULL, &vals[i], 0) == 0); in test_stackmap() 629 assert(bpf_map_update_elem(fd, NULL, &vals[i], BPF_EXIST) == 0); in test_stackmap() [all …]
|
| /tools/perf/util/ |
| A D | stat-display.c | 501 char buf[64], *vals, *ends; in print_metric_csv() local 508 ends = vals = skip_spaces(buf); in print_metric_csv() 512 fprintf(out, "%s%s%s%s", config->csv_sep, vals, config->csv_sep, skip_spaces(unit)); in print_metric_csv() 650 char buf[64], *vals, *ends; in print_metric_only_csv() local 657 ends = vals = skip_spaces(buf); in print_metric_only_csv() 661 fprintf(out, "%s%s", vals, config->csv_sep); in print_metric_only_csv() 674 const char *vals; in print_metric_only_json() local 682 vals = ends = skip_spaces(buf); in print_metric_only_json() 686 if (!vals[0]) in print_metric_only_json() 687 vals = "none"; in print_metric_only_json() [all …]
|
| A D | stat.c | 304 static int check_per_pkg(struct evsel *counter, struct perf_counts_values *vals, in check_per_pkg() argument 337 if (!(vals->run && vals->ena)) in check_per_pkg()
|
| A D | python.c | 674 PyObject *vals = PyList_New(5); in pyrf_counts_values_get_values() local 676 if (!vals) in pyrf_counts_values_get_values() 679 PyList_SetItem(vals, i, PyLong_FromLong(self->values.values[i])); in pyrf_counts_values_get_values() 681 return vals; in pyrf_counts_values_get_values()
|
| /tools/lib/bpf/ |
| A D | btf_relocate.c | 103 struct btf_name_info *vals, in search_btf_name_size() argument 112 struct btf_name_info *val = &vals[mid]; in search_btf_name_size()
|
| /tools/objtool/ |
| A D | check.c | 334 cfi->vals[i].base = CFI_UNDEFINED; in init_cfi_state() 2819 cfi->vals[op->dest.reg].base = CFI_CFA; in update_cfi_state() 2820 cfi->vals[op->dest.reg].offset = -cfi->stack_size; in update_cfi_state() 2838 cfi->vals[op->src.reg].base == CFI_CFA) { in update_cfi_state() 2848 cfa->offset = -cfi->vals[op->src.reg].offset; in update_cfi_state() 2890 cfi->vals[op->src.reg].base == CFI_SP_INDIRECT && in update_cfi_state() 2891 cfi->vals[op->src.reg].offset == cfa->offset) { in update_cfi_state() 2954 cfi->vals[op->dest.reg].base = CFI_CFA; in update_cfi_state() 2955 cfi->vals[op->dest.reg].offset = \ in update_cfi_state() 3159 cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT; in update_cfi_state() [all …]
|
| /tools/testing/radix-tree/ |
| A D | maple.c | 34986 mtree_store_range(mt, vals->index, vals->last, vals->entry2, in run_check_rcu() 35087 (index % 2 ? vals->entry2 : vals->entry3), in run_check_rcu_slowread() 35276 vals.count = 1000; in check_rcu_threaded() 35277 vals.mt = mt; in check_rcu_threaded() 35278 vals.index = 8650; in check_rcu_threaded() 35279 vals.last = 8666; in check_rcu_threaded() 35300 vals.mt = mt; in check_rcu_threaded() 35301 vals.index = 4390; in check_rcu_threaded() 35302 vals.last = 4398; in check_rcu_threaded() 35337 vals.mt = mt; in check_rcu_threaded() [all …]
|
| /tools/perf/ |
| A D | builtin-diff.c | 1476 static int all_zero(unsigned long *vals, int len) in all_zero() argument 1481 if (vals[i] != 0) in all_zero()
|
| /tools/power/pm-graph/ |
| A D | sleepgraph.py | 1247 vals = [dev] 1250 vals.append(prop.split('=')[-1]) 1251 return ':'.join(vals)
|