| /tools/perf/util/ |
| A D | block-range.c | 88 if (start < entry->start) in block_range__create() 121 .start = start, in block_range__create() 131 iter.start = head; in block_range__create() 144 .start = start, in block_range__create() 162 if (entry->start < start) { /* split: [e->start...][start...] */ in block_range__create() 168 .start = entry->start, in block_range__create() 177 entry->start = start; in block_range__create() 185 } else if (entry->start == start) in block_range__create() 188 iter.start = entry; in block_range__create() 195 entry = iter.start; in block_range__create() [all …]
|
| A D | svghelper.h | 9 void open_svg(const char *filename, int cpus, int rows, u64 start, u64 end); 13 void svg_box(int Yslot, u64 start, u64 end, const char *type); 14 void svg_blocked(int Yslot, int cpu, u64 start, u64 end, const char *backtrace); 15 void svg_running(int Yslot, int cpu, u64 start, u64 end, const char *backtrace); 16 void svg_waiting(int Yslot, int cpu, u64 start, u64 end, const char *backtrace); 20 void svg_process(int cpu, u64 start, u64 end, int pid, const char *name, const char *backtrace); 21 void svg_cstate(int cpu, u64 start, u64 end, int type); 22 void svg_pstate(int cpu, u64 start, u64 end, u64 freq); 28 void svg_wakeline(u64 start, int row1, int row2, const char *backtrace); 30 void svg_interrupt(u64 start, int row, const char *backtrace); [all …]
|
| A D | svghelper.c | 95 first_time = start; in open_svg() 165 time2pixels(start), in svg_ubox() 184 time2pixels(start), in svg_lbox() 203 time2pixels(start), in svg_fbox() 228 time_to_string(end - start)); in svg_blocked() 254 time2pixels(start), time2pixels(end)-time2pixels(start), Yslot * SLOT_MULT, SLOT_HEIGHT, in svg_running() 434 time2pixels(start), time2pixels(end)-time2pixels(start), in svg_cstate() 548 time2pixels(start), height); in svg_partial_wakeline() 568 time2pixels(start), row1 * SLOT_MULT + SLOT_HEIGHT, time2pixels(start), row2 * SLOT_MULT); in svg_wakeline() 571 time2pixels(start), row2 * SLOT_MULT + SLOT_HEIGHT, time2pixels(start), row1 * SLOT_MULT); in svg_wakeline() [all …]
|
| A D | block-range.h | 27 u64 start; member 47 struct block_range *start; member 53 return iter->start; in block_range_iter() 58 if (iter->start == iter->end) in block_range_iter__next() 61 iter->start = block_range__next(iter->start); in block_range_iter__next() 67 if (!iter->start || !iter->end) in block_range_iter__valid() 73 extern struct block_range_iter block_range__create(u64 start, u64 end);
|
| A D | time-utils.c | 88 *start = start_str; in split_start_end() 103 ptime->start = 0; in perf_time__parse_str() 167 if (ptime[i].end >= ptime[i + 1].start) { in perf_time__parse_strs() 200 double end_pcnt, u64 start, u64 end) in set_percent_time() argument 202 u64 total = end - start; in set_percent_time() 209 ptime->start = start + round(start_pcnt * total); in set_percent_time() 219 u64 start, u64 end) in percent_slash_split() argument 254 u64 start, u64 end) in percent_dash_split() argument 280 u64 start, u64 end); 426 if ((ptime->start && timestamp < ptime->start) || in perf_time__skip_sample() [all …]
|
| A D | mem2node.c | 13 u64 start; member 28 if (entry->start < e->start) in phys_entry__insert() 39 phys_entry__init(struct phys_entry *entry, u64 start, u64 bsize, u64 node) in phys_entry__init() argument 41 entry->start = start; in phys_entry__init() 42 entry->end = start + bsize; in phys_entry__init() 72 u64 start; in mem2node__init() local 77 start = bit * bsize; in mem2node__init() 86 if ((prev->end == start) && in mem2node__init() 93 phys_entry__init(&entries[j++], start, bsize, n->node); in mem2node__init() 105 entries[i].node, entries[i].start, entries[i].end); in mem2node__init() [all …]
|
| /tools/include/linux/ |
| A D | bitmap.h | 27 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) argument 162 __set_bit(start, map); in bitmap_set() 163 else if (small_const_nbits(start + nbits)) in bitmap_set() 164 *map |= GENMASK(start + nbits - 1, start); in bitmap_set() 166 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && in bitmap_set() 171 __bitmap_set(map, start, nbits); in bitmap_set() 178 __clear_bit(start, map); in bitmap_clear() 179 else if (small_const_nbits(start + nbits)) in bitmap_clear() 180 *map &= ~GENMASK(start + nbits - 1, start); in bitmap_clear() 182 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && in bitmap_clear() [all …]
|
| A D | interval_tree_generic.h | 42 ITTYPE start = ITSTART(node), last = ITLAST(node); \ 51 if (start < ITSTART(parent)) \ 91 if (start <= left->ITSUBTREE) { \ 105 if (start <= ITLAST(node)) /* Cond2 */ \ 110 if (start <= node->ITSUBTREE) \ 120 ITTYPE start, ITTYPE last) \ 141 if (node->ITSUBTREE < start) \ 148 return ITPREFIX ## _subtree_search(node, start, last); \ 166 if (start <= right->ITSUBTREE) \ 168 start, last); \ [all …]
|
| /tools/testing/nvdimm/test/ |
| A D | iomap.c | 68 - nfit_res->res.start \ 80 - nfit_res->res.start; in __wrap_devm_ioremap() 206 return res->start == start; in match() 222 &start); in nfit_test_release_region() 228 if (req->res.start == start) { in nfit_test_release_region() 258 if (start + n > nfit_res->res.start in nfit_test_request_region() 261 __func__, start, n, in nfit_test_request_region() 268 if (start == req->res.start) { in nfit_test_request_region() 285 res->start = start; in nfit_test_request_region() 286 res->end = start + n - 1; in nfit_test_request_region() [all …]
|
| /tools/lib/ |
| A D | string.c | 188 if (*start != value) in check_bytes8() 189 return (void *)start; in check_bytes8() 190 start++; in check_bytes8() 212 return check_bytes8(start, value, bytes); in memchr_inv() 219 prefix = (unsigned long)start % 8; in memchr_inv() 224 r = check_bytes8(start, value, prefix); in memchr_inv() 227 start += prefix; in memchr_inv() 234 if (*(u64 *)start != value64) in memchr_inv() 235 return check_bytes8(start, value, 8); in memchr_inv() 236 start += 8; in memchr_inv() [all …]
|
| A D | find_bit.c | 49 #define FIND_NEXT_BIT(FETCH, MUNGE, size, start) \ argument 51 unsigned long mask, idx, tmp, sz = (size), __start = (start); \ 103 unsigned long _find_next_bit(const unsigned long *addr, unsigned long nbits, unsigned long start) in _find_next_bit() argument 105 return FIND_NEXT_BIT(addr[idx], /* nop */, nbits, start); in _find_next_bit() 111 unsigned long nbits, unsigned long start) in _find_next_and_bit() argument 113 return FIND_NEXT_BIT(addr1[idx] & addr2[idx], /* nop */, nbits, start); in _find_next_and_bit() 119 unsigned long start) in _find_next_zero_bit() argument 121 return FIND_NEXT_BIT(~addr[idx], /* nop */, nbits, start); in _find_next_zero_bit()
|
| A D | bitmap.c | 104 void __bitmap_set(unsigned long *map, unsigned int start, int len) in __bitmap_set() argument 106 unsigned long *p = map + BIT_WORD(start); in __bitmap_set() 107 const unsigned int size = start + len; in __bitmap_set() 108 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); in __bitmap_set() 109 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); in __bitmap_set() 124 void __bitmap_clear(unsigned long *map, unsigned int start, int len) in __bitmap_clear() argument 126 unsigned long *p = map + BIT_WORD(start); in __bitmap_clear() 127 const unsigned int size = start + len; in __bitmap_clear() 128 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); in __bitmap_clear() 129 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); in __bitmap_clear()
|
| /tools/testing/radix-tree/ |
| A D | benchmark.c | 18 struct timespec start, finish; in benchmark_iter() local 26 clock_gettime(CLOCK_MONOTONIC, &start); in benchmark_iter() 39 (finish.tv_nsec - start.tv_nsec); in benchmark_iter() 55 struct timespec start, finish; in benchmark_insert() local 59 clock_gettime(CLOCK_MONOTONIC, &start); in benchmark_insert() 67 (finish.tv_nsec - start.tv_nsec); in benchmark_insert() 76 struct timespec start, finish; in benchmark_tagging() local 80 clock_gettime(CLOCK_MONOTONIC, &start); in benchmark_tagging() 88 (finish.tv_nsec - start.tv_nsec); in benchmark_tagging() 97 struct timespec start, finish; in benchmark_delete() local [all …]
|
| A D | main.c | 50 unsigned long start; in __big_gang_check() local 53 start = 0; in __big_gang_check() 60 old_start = start; in __big_gang_check() 61 start += rand() % 1000000; in __big_gang_check() 62 start %= 1ULL << 33; in __big_gang_check() 63 if (start < old_start) in __big_gang_check() 164 start = rand(); in copy_tag_check() 167 cur = start; in copy_tag_check() 168 start = end; in copy_tag_check() 177 if (start <= end) in copy_tag_check() [all …]
|
| /tools/testing/selftests/net/netfilter/ |
| A D | nft_concat_range.sh | 1317 for i in $(seq "${start}" $((start + count))); do 1528 for i in $(seq "$start" $((start + count))); do 1551 for i in $(seq "$start" $((start + count))); do 1566 for i in $(seq "$start" $((start + count))); do 1582 for i in $(seq "$start" $((start + count))); do 1595 for i in $(seq "$start" $((start + count))); do 1630 for i in $(seq "$start" $((start + count))); do 1648 for i in $(seq "$start" $((start + count))); do 1678 for i in $(seq "$start" $((start + perf_entries))); do 1763 for i in $(seq "${start}" $((start + count))); do [all …]
|
| /tools/perf/ |
| A D | perf-read-vdso.c | 15 void *start, *end; in main() local 18 if (find_map(&start, &end, VDSO__MAP_NAME)) in main() 21 size = end - start; in main() 24 written = fwrite(start, 1, size, stdout); in main() 27 start += written; in main()
|
| /tools/perf/arch/s390/util/ |
| A D | machine.c | 12 int arch__fix_module_text_start(u64 *start, u64 *size, const char *name) in arch__fix_module_text_start() argument 14 u64 m_start = *start; in arch__fix_module_text_start() 19 if (sysfs__read_ull(path, (unsigned long long *)start) < 0) { in arch__fix_module_text_start() 21 *start = m_start; in arch__fix_module_text_start() 33 *size -= (*start - m_start); in arch__fix_module_text_start()
|
| /tools/testing/selftests/bpf/ |
| A D | trace_helpers.c | 142 while (start < end) { in ksym_search_local() 143 size_t mid = start + (end - start) / 2; in ksym_search_local() 149 start = mid + 1; in ksym_search_local() 154 if (start >= 1 && ksyms->syms[start - 1].addr < key && in ksym_search_local() 157 return &ksyms->syms[start - 1]; in ksym_search_local() 170 while (start < end) { in search_kallsyms_custom_local() 171 mid = start + (end - start) / 2; in search_kallsyms_custom_local() 177 start = mid + 1; in search_kallsyms_custom_local() 290 *start = q.vma_start; in procmap_query() 308 size_t start, base, end; in get_uprobe_offset() local [all …]
|
| /tools/tracing/rtla/src/ |
| A D | utils.c | 366 start = strstr(start, ":"); in get_long_ns_after_colon() 367 if (!start) in get_long_ns_after_colon() 371 start++; in get_long_ns_after_colon() 382 start = strstr(start, ":"); in get_long_after_colon() 383 if (!start) in get_long_after_colon() 387 start++; in get_long_after_colon() 739 start = strstr(start, ":"); in get_self_cgroup() 740 if (!start) in get_self_cgroup() 744 start++; in get_self_cgroup() 746 start = strstr(start, ":"); in get_self_cgroup() [all …]
|
| /tools/testing/selftests/timens/ |
| A D | gettime_perf.c | 49 struct timespec tp, start; in test() local 53 vdso_clock_gettime(clockid, &start); in test() 54 tp = start; in test() 55 for (tp = start; start.tv_sec + timeout > tp.tv_sec || in test() 56 (start.tv_sec + timeout == tp.tv_sec && in test() 57 start.tv_nsec > tp.tv_nsec); i++) { in test()
|
| /tools/perf/scripts/python/ |
| A D | net_dropmonitor.py | 38 start, end = -1, len(kallsyms) 39 while end != start + 1: 40 pivot = (start + end) // 2 44 start = pivot 48 if start >= 0: 49 symloc, name = kallsyms[start]
|
| /tools/testing/selftests/timers/ |
| A D | raw_skew.c | 58 long long diff_timespec(struct timespec start, struct timespec end) in diff_timespec() argument 62 start_ns = ts_to_nsec(start); in diff_timespec() 69 struct timespec start, mid, end; in get_monotonic_and_raw() local 76 clock_gettime(CLOCK_MONOTONIC, &start); in get_monotonic_and_raw() 80 newdiff = diff_timespec(start, end); in get_monotonic_and_raw() 84 tmp = (ts_to_nsec(start) + ts_to_nsec(end))/2; in get_monotonic_and_raw() 92 struct timespec mon, raw, start, end; in main() local 106 start = mon; in main() 122 interval = diff_timespec(start, end); in main()
|
| /tools/testing/selftests/mm/ |
| A D | vm_util.c | 23 uint64_t pagemap_get_entry(int fd, char *start) in pagemap_get_entry() argument 39 arg.start = (uintptr_t)start; in __pagemap_scan_get_categories() 40 arg.end = (uintptr_t)(start + psize()); in __pagemap_scan_get_categories() 93 if (pagemap_scan_supported(fd, start)) { in page_entry_is() 105 bool pagemap_is_softdirty(int fd, char *start) in pagemap_is_softdirty() argument 107 return page_entry_is(fd, start, "soft-dirty", in pagemap_is_softdirty() 111 bool pagemap_is_swapped(int fd, char *start) in pagemap_is_swapped() argument 116 bool pagemap_is_populated(int fd, char *start) in pagemap_is_populated() argument 118 return page_entry_is(fd, start, "populated", in pagemap_is_populated() 123 unsigned long pagemap_get_pfn(int fd, char *start) in pagemap_get_pfn() argument [all …]
|
| A D | mlock2.h | 7 static int mlock2_(void *start, size_t len, int flags) in mlock2_() argument 9 int ret = syscall(__NR_mlock2, start, len, flags); in mlock2_() 23 unsigned long start, end; in seek_to_smaps_entry() local 36 &start, &end, perms, &offset, dev, &inode, path) < 6) in seek_to_smaps_entry() 39 if (start <= addr && addr < end) in seek_to_smaps_entry()
|
| /tools/perf/arch/x86/util/ |
| A D | machine.c | 25 static int add_extra_kernel_map(struct extra_kernel_map_info *mi, u64 start, in add_extra_kernel_map() argument 40 mi->maps[mi->cnt].start = start; in add_extra_kernel_map() 51 u64 start) in find_extra_kernel_maps() argument 57 mi->entry_trampoline = start; in find_extra_kernel_maps() 62 u64 end = start + page_size; in find_extra_kernel_maps() 64 return add_extra_kernel_map(mi, start, end, 0, name); in find_extra_kernel_maps()
|