| /tools/testing/selftests/bpf/prog_tests/ |
| A D | arg_parsing.c | 9 set->tests = NULL; in init_test_filter_set() 17 for (j = 0; j < set->tests[i].subtest_cnt; j++) in free_test_filter_set() 18 free((void *)set->tests[i].subtests[j]); in free_test_filter_set() 19 free(set->tests[i].subtests); in free_test_filter_set() 20 free(set->tests[i].name); in free_test_filter_set() 23 free(set->tests); in free_test_filter_set() 36 if (!ASSERT_OK_PTR(set.tests, "test filters initialized")) in test_parse_test_list() 46 if (!ASSERT_OK_PTR(set.tests, "test filters initialized")) in test_parse_test_list() 60 if (!ASSERT_OK_PTR(set.tests, "test filters initialized")) in test_parse_test_list() 66 ASSERT_OK(strcmp("arg_parsing", set.tests[0].subtests[0]), in test_parse_test_list() [all …]
|
| A D | obj_name.c | 10 } tests[] = { in test_obj_name() local 23 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_obj_name() 24 size_t name_len = strlen(tests[i].name) + 1; in test_obj_name() 40 CHECK((tests[i].success && fd < 0) || in test_obj_name() 41 (!tests[i].success && fd >= 0) || in test_obj_name() 42 (!tests[i].success && errno != tests[i].expected_errno), in test_obj_name() 45 fd, tests[i].success, errno, tests[i].expected_errno); in test_obj_name() 61 CHECK((tests[i].success && fd < 0) || in test_obj_name() 62 (!tests[i].success && fd >= 0) || in test_obj_name() 63 (!tests[i].success && errno != tests[i].expected_errno), in test_obj_name() [all …]
|
| A D | global_data.c | 18 } tests[] = { in test_global_data_number() local 32 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_global_data_number() 34 CHECK(err || num != tests[i].num, tests[i].name, in test_global_data_number() 36 err, num, tests[i].num); in test_global_data_number() 53 } tests[] = { in test_global_data_string() local 61 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_global_data_string() 63 CHECK(err || memcmp(str, tests[i].str, sizeof(str)), in test_global_data_string() 65 err, str, tests[i].str); in test_global_data_string() 88 } tests[] = { in test_global_data_struct() local 95 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_global_data_struct() [all …]
|
| A D | empty_skb.c | 29 } tests[] = { in test_empty_skb() local 115 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_empty_skb() 122 expected_ret = at_egress && !at_tc ? tests[i].lwt_egress_ret : tests[i].ret; in test_empty_skb() 124 tattr.data_in = tests[i].data_in; in test_empty_skb() 125 tattr.data_size_in = tests[i].data_size_in; in test_empty_skb() 128 bpf_obj->bss->ifindex = *tests[i].ifindex; in test_empty_skb() 131 sprintf(buf, "err: %s [%s]", tests[i].msg, bpf_program__name(prog)); in test_empty_skb() 133 if (at_tc && tests[i].success_on_tc) in test_empty_skb() 136 ASSERT_EQ(err, tests[i].err, buf); in test_empty_skb() 137 sprintf(buf, "ret: %s [%s]", tests[i].msg, bpf_program__name(prog)); in test_empty_skb() [all …]
|
| A D | test_csum_diff.c | 365 static void test_csum_diff(struct testcase *tests, int num_tests) in test_csum_diff() argument 376 skel->rodata->to_buff_len = tests[i].to_buff_len; in test_csum_diff() 377 skel->rodata->from_buff_len = tests[i].from_buff_len; in test_csum_diff() 383 memcpy(skel->bss->to_buff, tests[i].to_buff, tests[i].to_buff_len); in test_csum_diff() 384 memcpy(skel->bss->from_buff, tests[i].from_buff, tests[i].from_buff_len); in test_csum_diff() 385 skel->bss->seed = tests[i].seed; in test_csum_diff() 388 ASSERT_EQ(got, tests[i].result, "csum_diff result"); in test_csum_diff()
|
| A D | global_func_args.c | 20 } tests[] = { in test_global_func_args0() local 30 for (i = 0; i < ARRAY_SIZE(tests); ++i) { in test_global_func_args0() 31 const int expected_value = tests[i].expected_value; in test_global_func_args0() 35 CHECK(err || actual_value != expected_value, tests[i].descr, in test_global_func_args0()
|
| A D | fib_lookup.c | 57 static const struct fib_lookup_test tests[] = { variable 333 for (i = 0; i < ARRAY_SIZE(tests); i++) { in test_fib_lookup() 334 printf("Testing %s ", tests[i].desc); in test_fib_lookup() 340 skel->bss->lookup_flags = tests[i].lookup_flags; in test_fib_lookup() 349 if (tests[i].expected_src) in test_fib_lookup() 350 assert_src_ip(fib_params, tests[i].expected_src); in test_fib_lookup() 352 if (tests[i].expected_dst) in test_fib_lookup() 353 assert_dst_ip(fib_params, tests[i].expected_dst); in test_fib_lookup() 355 ret = memcmp(tests[i].dmac, fib_params->dmac, sizeof(tests[i].dmac)); in test_fib_lookup() 359 mac_str(expected, tests[i].dmac); in test_fib_lookup() [all …]
|
| /tools/lib/perf/tests/ |
| A D | Build | 1 tests-y += main.o 2 tests-y += test-evsel.o 3 tests-y += test-evlist.o 4 tests-y += test-cpumap.o 5 tests-y += test-threadmap.o
|
| /tools/testing/selftests/clone3/ |
| A D | clone3.c | 173 static const struct test tests[] = { variable 319 ksft_set_plan(ARRAY_SIZE(tests)); in main() 322 for (i = 0; i < ARRAY_SIZE(tests); i++) { in main() 323 if (tests[i].filter && tests[i].filter()) { in main() 324 ksft_test_result_skip("%s\n", tests[i].name); in main() 328 if (tests[i].size_function) in main() 329 size = tests[i].size_function(); in main() 331 size = tests[i].size; in main() 336 tests[i].expected, in main() 337 tests[i].test_mode), in main() [all …]
|
| /tools/perf/Documentation/ |
| A D | perf-test.txt | 6 perf-test - Runs sanity tests. 15 This command does assorted sanity tests, initially through linked routines but 16 also will look for a directory with more tests in the form of scripts. 18 To get a list of available tests use 'perf test list', specifying a test name 19 fragment will show all tests that have it. 21 To run just specific tests, inform test name fragments or the numbers obtained 39 Run all tests one after the other. By default "exclusive" 40 tests are run sequentially, but other tests are run in 50 Do not fork child for each test, run all tests within single process, this 61 Used with the shell script regression tests.
|
| /tools/testing/selftests/riscv/hwprobe/ |
| A D | cbo.c | 203 } tests[] = { variable 226 tests[TEST_NO_ZICBOZ].enabled = true; in main() 227 tests[TEST_NO_ZICBOM].enabled = true; in main() 243 tests[TEST_ZICBOZ].enabled = true; in main() 244 tests[TEST_NO_ZICBOZ].enabled = false; in main() 250 tests[TEST_ZICBOM].enabled = true; in main() 251 tests[TEST_NO_ZICBOM].enabled = false; in main() 256 for (i = 0; i < ARRAY_SIZE(tests); ++i) in main() 257 plan += tests[i].enabled ? tests[i].nr_tests : 0; in main() 265 if (tests[i].enabled) in main() [all …]
|
| /tools/testing/kunit/test_data/ |
| A D | test_skip_all_tests.log | 5 ok 1 - string_stream_test_empty_on_creation # SKIP all tests skipped 6 ok 2 - string_stream_test_not_empty_after_add # SKIP all tests skipped 7 ok 3 - string_stream_test_get_string # SKIP all tests skipped 12 ok 1 - example_simple_test # SKIP all tests skipped
|
| /tools/testing/selftests/cgroup/ |
| A D | test_pids.c | 137 } tests[] = { variable 148 ksft_set_plan(ARRAY_SIZE(tests)); in main() 163 for (int i = 0; i < ARRAY_SIZE(tests); i++) { in main() 164 switch (tests[i].fn(root)) { in main() 166 ksft_test_result_pass("%s\n", tests[i].name); in main() 169 ksft_test_result_skip("%s\n", tests[i].name); in main() 172 ksft_test_result_fail("%s\n", tests[i].name); in main()
|
| /tools/testing/selftests/kvm/s390/ |
| A D | tprot.c | 77 } tests[] = { variable 141 enum stage stage = tests[*i].stage; in perform_next_stage() 145 for (; tests[*i].stage == stage; (*i)++) { in perform_next_stage() 153 skip = tests[*i].addr < (void *)PAGE_SIZE && in perform_next_stage() 154 tests[*i].expected != TRANSL_UNAVAIL && in perform_next_stage() 157 result = test_protection(tests[*i].addr, tests[*i].key); in perform_next_stage() 158 __GUEST_ASSERT(result == tests[*i].expected, in perform_next_stage() 160 tests[*i].expected, result, *i); in perform_next_stage()
|
| /tools/testing/memblock/ |
| A D | Makefile | 9 TEST_OFILES = tests/alloc_nid_api.o tests/alloc_helpers_api.o tests/alloc_api.o \ 10 tests/basic_api.o tests/common.o tests/alloc_exact_nid_api.o
|
| /tools/testing/selftests/livepatch/ |
| A D | README | 5 This is a small set of sanity tests for the kernel livepatching. 9 buffer and parsed for expected messages. (Note: the tests will compare 19 Building the tests 22 To only build the tests without running them, run: 29 Running the tests 35 tested on systems with different kABI, ensuring they the tests are backwards 47 Adding tests 53 "livepatch:" and "test_klp" strings, so tests be sure to include one of
|
| /tools/testing/selftests/lkdtm/ |
| A D | Makefile | 7 TEST_FILES := tests.txt 9 TEST_GEN_PROGS = $(patsubst %,$(OUTPUT)/%.sh,$(shell awk '{print $$1}' tests.txt | sed -e 's/\#//')) 12 $(OUTPUT)/%: run.sh tests.txt
|
| /tools/perf/tests/ |
| A D | tests-scripts.c | 176 struct test_case *tests; in append_script() local 189 tests = calloc(2, sizeof(*tests)); in append_script() 190 if (!tests) { in append_script() 194 tests[0].name = strdup_check(name); in append_script() 197 tests[0].exclusive = true; in append_script() 200 tests[0].desc = strdup_check(desc); in append_script() 201 tests[0].run_case = shell_test__run; in append_script() 205 free(tests); in append_script() 209 test_suite->test_cases = tests; in append_script() 216 free(tests); in append_script()
|
| /tools/testing/selftests/mm/ |
| A D | run_vmtests.sh | 22 -d: run destructive tests 25 will run all tests. 31 tests for mmap(2) 33 tests for gup 35 tests for userfaultfd(2) 39 tests for mlock(2) 41 tests for mremap(2) 45 vmalloc smoke tests 47 hmm smoke tests 61 memory protection key tests [all …]
|
| /tools/testing/selftests/drivers/net/ |
| A D | README.rst | 3 Running driver tests 6 Networking driver tests are executed within kselftest framework like any 7 other tests. They support testing both real device drivers and emulated / 13 By default, when no extra parameters are set or exported, tests execute 16 In this mode the tests are indistinguishable from other selftests and 22 Executing tests against a real device requires external preparation. 23 The netdevice against which tests will be run must exist, be running 27 the tests against a real device. 33 and a real device. SW-only tests should instead be placed in net/ or 34 drivers/net/netdevsim, HW-only tests in drivers/net/hw. [all …]
|
| /tools/memory-model/ |
| A D | README | 12 the state space of small litmus tests. 69 tests is available in tools/memory-model/Documentation/litmus-tests.txt. 73 tools/memory-model/litmus-tests/ 74 Documentation/litmus-tests/ 76 Several thousand more example litmus tests are available here: 85 tools/memory-model/Documentation/litmus-tests.txt 158 running LKMM litmus tests. 194 litmus-tests 199 By "representative", it means the one in the litmus-tests 205 2) orthogonal, there should be no two litmus tests [all …]
|
| /tools/perf/tests/shell/coresight/asm_pure_loop/ |
| A D | Makefile | 21 install-tests: all 25 $(call QUIET_INSTALL, tests) \ 34 .PHONY: all clean install-tests
|
| /tools/perf/tests/shell/coresight/memcpy_thread/ |
| A D | Makefile | 20 install-tests: all 24 $(call QUIET_INSTALL, tests) \ 33 .PHONY: all clean install-tests
|
| /tools/perf/tests/shell/coresight/thread_loop/ |
| A D | Makefile | 20 install-tests: all 24 $(call QUIET_INSTALL, tests) \ 33 .PHONY: all clean install-tests
|
| /tools/perf/tests/shell/coresight/unroll_loop_thread/ |
| A D | Makefile | 20 install-tests: all 24 $(call QUIET_INSTALL, tests) \ 33 .PHONY: all clean install-tests
|