/linux/tools/lib/perf/include/perf/ |
A D | cpumap.h | 51 LIBPERF_API struct perf_cpu perf_cpu_map__cpu(const struct perf_cpu_map *cpus, int idx); 90 for ((idx) = 0, (cpu) = perf_cpu_map__cpu(cpus, idx); \ 92 (idx)++, (cpu) = perf_cpu_map__cpu(cpus, idx)) 95 for ((idx) = 0, (_cpu) = perf_cpu_map__cpu(cpus, idx); \ 97 (idx)++, (_cpu) = perf_cpu_map__cpu(cpus, idx)) \
|
/linux/tools/perf/tests/ |
A D | cpumap.c | 42 TEST_ASSERT_VAL("wrong cpu", perf_cpu_map__cpu(map, 0).cpu == 0); in process_event_mask() 44 TEST_ASSERT_VAL("wrong cpu", perf_cpu_map__cpu(map, i - 1).cpu == i); in process_event_mask() 69 TEST_ASSERT_VAL("wrong cpu", perf_cpu_map__cpu(map, 0).cpu == 1); in process_event_cpus() 70 TEST_ASSERT_VAL("wrong cpu", perf_cpu_map__cpu(map, 1).cpu == 256); in process_event_cpus() 95 TEST_ASSERT_VAL("wrong cpu", perf_cpu_map__cpu(map, 0).cpu == 1); in process_event_range_cpus()
|
A D | event_update.c | 73 TEST_ASSERT_VAL("wrong cpus", perf_cpu_map__cpu(map, 0).cpu == 1); in process_event_cpus() 74 TEST_ASSERT_VAL("wrong cpus", perf_cpu_map__cpu(map, 1).cpu == 2); in process_event_cpus() 75 TEST_ASSERT_VAL("wrong cpus", perf_cpu_map__cpu(map, 2).cpu == 3); in process_event_cpus()
|
A D | mmap-basic.c | 62 CPU_SET(perf_cpu_map__cpu(cpus, 0).cpu, &cpu_set); in test__basic_mmap() 66 perf_cpu_map__cpu(cpus, 0).cpu, in test__basic_mmap()
|
A D | topology.c | 110 if (cpu__get_socket_id(perf_cpu_map__cpu(map, 0)) == -1) in check_cpu_topology()
|
/linux/tools/perf/util/ |
A D | record.c | 102 if (perf_cpu_map__cpu(evlist->core.user_requested_cpus, 0).cpu < 0) in evlist__config() 244 cpu = perf_cpu_map__cpu(cpus, 0); in evlist__can_select_event() 248 cpu = perf_cpu_map__cpu(evlist->core.user_requested_cpus, 0); in evlist__can_select_event()
|
A D | cpumap.c | 613 cpu = perf_cpu_map__cpu(map, i); in cpu_map__snprint() 620 perf_cpu_map__cpu(map, i).cpu); in cpu_map__snprint() 622 } else if (((i - start) != (cpu.cpu - perf_cpu_map__cpu(map, start).cpu)) || last) { in cpu_map__snprint() 628 perf_cpu_map__cpu(map, start).cpu); in cpu_map__snprint() 632 perf_cpu_map__cpu(map, start).cpu, perf_cpu_map__cpu(map, end).cpu); in cpu_map__snprint()
|
A D | perf_api_probe.c | 70 cpu = perf_cpu_map__cpu(cpus, 0); in perf_probe_api() 147 cpu = perf_cpu_map__cpu(cpus, 0); in perf_can_record_cpu_wide()
|
A D | cpumap.h | 71 return perf_cpu_map__nr(cpus) == 1 && perf_cpu_map__cpu(cpus, 0).cpu == -1; in cpu_map__is_dummy()
|
A D | bpf_counter.c | 551 key = perf_cpu_map__cpu(evsel->core.cpus, i).cpu; in bperf__load() 595 cpu = perf_cpu_map__cpu(all_cpu_map, i).cpu; in bperf_sync_counters() 645 cpu = perf_cpu_map__cpu(evsel__cpus(evsel), i).cpu; in bperf__read()
|
A D | bpf_ftrace.c | 69 cpu = perf_cpu_map__cpu(ftrace->evlist->core.user_requested_cpus, i).cpu; in perf_ftrace__latency_prepare_bpf()
|
A D | stat.c | 309 struct perf_cpu cpu = perf_cpu_map__cpu(cpus, cpu_map_idx); in check_per_pkg() 425 struct perf_cpu cpu = perf_cpu_map__cpu(evsel->core.cpus, cpu_map_idx); in process_counter_values()
|
A D | bpf_off_cpu.c | 219 cpu = perf_cpu_map__cpu(evlist->core.user_requested_cpus, i).cpu; in off_cpu_prepare()
|
A D | cputopo.c | 413 if (load_numa_node(&tp->nodes[i], perf_cpu_map__cpu(node_map, i).cpu)) { in numa_topology__new()
|
A D | mmap.c | 255 cpu = perf_cpu_map__cpu(cpu_map, idx); /* map c index to online cpu index */ in build_node_mask()
|
A D | evsel.c | 1817 struct perf_cpu cpu = perf_cpu_map__cpu(evsel->core.cpus, in evsel__read_tool() 1887 cpu = perf_cpu_map__cpu(evsel->core.cpus, cpu_map_idx); in evsel__match_other_cpu() 2338 cpu = perf_cpu_map__cpu(evsel->core.cpus, idx); in evsel__open_cpu() 2359 pid, perf_cpu_map__cpu(cpus, idx).cpu, group_fd, evsel->open_flags); in evsel__open_cpu() 2362 perf_cpu_map__cpu(cpus, idx).cpu, in evsel__open_cpu() 2379 perf_cpu_map__cpu(cpus, idx), in evsel__open_cpu()
|
/linux/tools/lib/perf/ |
A D | libperf.map | 11 perf_cpu_map__cpu;
|
A D | evlist.c | 411 sid->cpu = perf_cpu_map__cpu(evsel->cpus, cpu); in perf_evsel__set_sid_idx() 459 struct perf_cpu evlist_cpu = perf_cpu_map__cpu(evlist->all_cpus, cpu_idx); in mmap_per_evsel()
|
/linux/tools/perf/bench/ |
A D | futex-wake.c | 116 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads()
|
A D | futex-hash.c | 190 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in bench_futex_hash()
|
A D | futex-lock-pi.c | 148 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in create_threads()
|
A D | futex-requeue.c | 143 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads()
|
A D | futex-wake-parallel.c | 167 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads()
|
A D | epoll-ctl.c | 264 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, in do_threads()
|
/linux/tools/lib/perf/Documentation/ |
A D | libperf.txt | 47 int perf_cpu_map__cpu(const struct perf_cpu_map *cpus, int idx);
|