| /lib/math/ |
| A D | div64.c | 223 int shift = __builtin_ctzll(c); in mul_u64_u64_div_u64() local 226 if ((n_hi >> shift) == 0) { in mul_u64_u64_div_u64() 227 u64 n = shift ? (n_lo >> shift) | (n_hi << (64 - shift)) : n_lo; in mul_u64_u64_div_u64() 244 shift = __builtin_clzll(c); in mul_u64_u64_div_u64() 245 c <<= shift; in mul_u64_u64_div_u64() 247 int p = 64 + shift; in mul_u64_u64_div_u64() 254 if (p < shift) in mul_u64_u64_div_u64() 256 p -= shift; in mul_u64_u64_div_u64() 257 n_hi <<= shift; in mul_u64_u64_div_u64() 258 n_hi |= n_lo >> (64 - shift); in mul_u64_u64_div_u64() [all …]
|
| /lib/ |
| A D | xarray.c | 158 unsigned int shift = xas->xa_node->shift; in xas_move_index() local 393 node->shift = shift; in xas_alloc() 582 shift = node->shift + XA_CHUNK_SHIFT; in xas_expand() 653 int shift; in xas_create() local 673 shift = node->shift; in xas_create() 677 shift = 0; in xas_create() 734 if (node->shift >= shift) in xas_create_range() 1106 child->shift = node->shift - XA_CHUNK_SHIFT; in xas_split() 1226 child->shift = node->shift - XA_CHUNK_SHIFT; in xas_try_split() 2452 index + (i << node->shift), node->shift); in xa_dump_entry() [all …]
|
| A D | ts_bm.c | 85 int shift = bm->patlen - 1; in bm_find() local 92 while (shift < text_len) { in bm_find() 94 shift, text[shift]); in bm_find() 97 &text[shift], icase); in bm_find() 101 return consumed + (shift-(bm->patlen-1)); in bm_find() 104 bs = bm->bad_shift[text[shift-i]]; in bm_find() 107 shift = max_t(int, shift-i+bs, shift+bm->good_shift[i]); in bm_find()
|
| A D | radix-tree.c | 280 ret->shift = shift; in radix_tree_node_alloc() 416 maxshift = shift; in radix_tree_extend() 426 root, shift, 0, 1, 0); in radix_tree_extend() 459 } while (shift <= maxshift); in radix_tree_extend() 616 shift = error; in __radix_tree_create() 620 while (shift > 0) { in __radix_tree_create() 770 if (parent->shift == 0) in __radix_tree_lookup() 1496 shift = error; in idr_get_free() 1499 if (start == 0 && shift == 0) in idr_get_free() 1502 while (shift) { in idr_get_free() [all …]
|
| A D | iommu-helper.c | 11 unsigned long shift, unsigned long boundary_size, in iommu_area_alloc() argument 21 if (iommu_is_span_boundary(index, nr, shift, boundary_size)) { in iommu_area_alloc() 22 start = ALIGN(shift + index, boundary_size) - shift; in iommu_area_alloc()
|
| A D | rhashtable.c | 78 const unsigned int len = 1 << shift; in nested_table_free() 86 size >>= shift; in nested_table_free() 156 if (nbuckets < (1 << (shift + 1))) in nested_bucket_table_alloc() 1201 while (ntbl && size > (1 << shift)) { in __rht_bucket_nested() 1205 size >>= shift; in __rht_bucket_nested() 1206 subhash >>= shift; in __rht_bucket_nested() 1239 size <= (1 << shift)); in rht_bucket_nested_insert() 1242 index = hash & ((1 << shift) - 1); in rht_bucket_nested_insert() 1243 size >>= shift; in rht_bucket_nested_insert() 1244 hash >>= shift; in rht_bucket_nested_insert() [all …]
|
| A D | sbitmap.c | 101 int sbitmap_init_node(struct sbitmap *sb, unsigned int depth, int shift, in sbitmap_init_node() argument 108 if (shift < 0) in sbitmap_init_node() 109 shift = sbitmap_calculate_shift(depth); in sbitmap_init_node() 111 bits_per_word = 1U << shift; in sbitmap_init_node() 115 sb->shift = shift; in sbitmap_init_node() 147 unsigned int bits_per_word = 1U << sb->shift; in sbitmap_resize() 248 nr += index << sb->shift; in sbitmap_find_bit() 385 seq_printf(m, "bits_per_word=%u\n", 1U << sb->shift); in sbitmap_show() 449 int shift, bool round_robin, gfp_t flags, int node) in sbitmap_queue_init_node() argument 454 ret = sbitmap_init_node(&sbq->sb, depth, shift, flags, node, in sbitmap_queue_init_node() [all …]
|
| A D | sort.c | 200 size_t shift = 0; in __sort_r() local 229 a -= size << shift; in __sort_r() 233 shift = do_cmp(base + size, base + 2 * size, cmp_func, priv) <= 0; in __sort_r() 234 a = size << shift; in __sort_r()
|
| A D | bitmap.c | 93 unsigned shift, unsigned nbits) in __bitmap_shift_right() argument 96 unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; in __bitmap_shift_right() 138 unsigned int shift, unsigned int nbits) in __bitmap_shift_left() argument 142 unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; in __bitmap_shift_left()
|
| A D | idr.c | 625 extern void xa_dump_index(unsigned long index, unsigned int shift); 637 unsigned int shift = node->shift + IDA_CHUNK_SHIFT + in ida_dump_entry() local 640 xa_dump_index(index * IDA_BITMAP_BITS, shift); in ida_dump_entry() 644 index | (i << node->shift)); in ida_dump_entry()
|
| A D | Kconfig.ubsan | 90 bool "Perform checking for bit-shift overflows" 91 depends on $(cc-option,-fsanitize=shift) 93 This option enables -fsanitize=shift which checks for bit-shift
|
| A D | vsprintf.c | 510 int shift = 3; in number() local 513 shift = 4; in number() 516 num >>= shift; in number() 2055 int shift; member 2109 buf = number(buf, end, (flags >> pff[i].shift) & pff[i].mask, in format_page_flags() 2807 unsigned int shift = 32 - size*8; in convert_num_spec() local 2809 val <<= shift; in convert_num_spec() 2811 return val >> shift; in convert_num_spec() 2812 return (int)val >> shift; in convert_num_spec()
|
| A D | maple_tree.c | 504 unsigned long shift; in mas_set_parent() local 514 shift = MAPLE_PARENT_SLOT_SHIFT; in mas_set_parent() 520 shift = type = 0; in mas_set_parent() 525 val |= (slot << shift) | type; in mas_set_parent() 1803 unsigned char shift) in mab_shift_right() argument 2974 unsigned char shift, b_end = ++b_node->b_end; in mas_rebalance() local 3003 shift = mas_data_end(&l_mas) + 1; in mas_rebalance() 3004 mab_shift_right(b_node, shift); in mas_rebalance() 3005 mas->offset += shift; in mas_rebalance() 3006 mas_mab_cp(&l_mas, 0, shift - 1, b_node, 0); in mas_rebalance() [all …]
|
| A D | assoc_array.c | 260 int shift = shortcut->skip_to_level & ASSOC_ARRAY_KEY_CHUNK_MASK; in assoc_array_walk() local 261 dissimilarity &= ~(ULONG_MAX << shift); in assoc_array_walk()
|
| A D | test_maple_tree.c | 2990 int loop, shift; in check_empty_area_fill() local 2994 for (shift = 12; shift <= 16; shift++) { in check_empty_area_fill() 2996 size = 1 << shift; in check_empty_area_fill()
|
| /lib/vdso/ |
| A D | gettimeofday.c | 33 static __always_inline u64 vdso_shift_ns(u64 ns, u32 shift) in vdso_shift_ns() argument 35 return ns >> shift; in vdso_shift_ns() 48 return vdso_shift_ns((delta * vc->mult) + base, vc->shift); in vdso_calc_ns() 50 return mul_u64_u32_add_u64_shr(delta, vc->mult, base, vc->shift); in vdso_calc_ns()
|
| /lib/zstd/compress/ |
| A D | zstd_compress_literals.c | 123 { int const shift = MIN(9-(int)strategy, 3); in ZSTD_minLiteralsToCompress() local 124 size_t const mintc = (huf_repeat == HUF_repeat_valid) ? 6 : (size_t)8 << shift; in ZSTD_minLiteralsToCompress()
|
| A D | zstd_compress_sequences.c | 143 unsigned const shift = 8 - accuracyLog; in ZSTD_crossEntropyCost() local 149 unsigned const norm256 = normAcc << shift; in ZSTD_crossEntropyCost()
|
| A D | zstd_opt.c | 106 ZSTD_downscaleStats(unsigned* table, U32 lastEltIndex, U32 shift, base_directive_e base1) in ZSTD_downscaleStats() argument 110 (unsigned)lastEltIndex+1, (unsigned)shift ); in ZSTD_downscaleStats() 111 assert(shift < 30); in ZSTD_downscaleStats() 114 unsigned const newStat = base + (table[s] >> shift); in ZSTD_downscaleStats()
|
| /lib/reed_solomon/ |
| A D | test_rslib.c | 87 int shift; member 463 int pad = (pad_coef[i].mult * max_pad) >> pad_coef[i].shift; in run_exercise()
|
| /lib/tests/ |
| A D | printf_kunit.c | 576 int shift; member 606 flags |= (values[i] & pft[i].mask) << pft[i].shift; in page_flags_test()
|
| /lib/crypto/x86/ |
| A D | chacha-ssse3-x86_64.S | 238 # 7/12-bit word rotation uses traditional shift+OR.
|
| A D | chacha-avx2-x86_64.S | 549 # 7/12-bit word rotation uses traditional shift+OR.
|