/lib/ |
A D | kfifo.c | 97 off &= fifo->mask; in kfifo_copy_in() 99 off *= esize; in kfifo_copy_in() 136 off &= fifo->mask; in kfifo_copy_out() 138 off *= esize; in kfifo_copy_out() 174 *tail = off; in __kfifo_out_linear() 198 off &= fifo->mask; in kfifo_copy_from_user() 200 off *= esize; in kfifo_copy_from_user() 258 off &= fifo->mask; in kfifo_copy_to_user() 260 off *= esize; in kfifo_copy_to_user() 336 off &= fifo->mask; in setup_sgl() [all …]
|
A D | bitmap-str.c | 80 int nmaskbits, loff_t off, size_t count) in bitmap_print_to_buf() argument 90 size = memory_read_from_buffer(buf, count, &off, data, strlen(data) + 1); in bitmap_print_to_buf() 181 int nmaskbits, loff_t off, size_t count) in bitmap_print_bitmask_to_buf() argument 183 return bitmap_print_to_buf(false, buf, maskp, nmaskbits, off, count); in bitmap_print_bitmask_to_buf() 199 int nmaskbits, loff_t off, size_t count) in bitmap_print_list_to_buf() argument 201 return bitmap_print_to_buf(true, buf, maskp, nmaskbits, off, count); in bitmap_print_list_to_buf() 214 unsigned int off; member 225 bitmap_set(bitmap, start, min(r->end - start + 1, r->off)); in bitmap_set_region() 230 if (r->start > r->end || r->group_len == 0 || r->off > r->group_len) in bitmap_check_region() 328 str = bitmap_getnum(str + 1, &r->off, lastbit); in bitmap_parse_region() [all …]
|
A D | bitmap.c | 98 for (k = 0; off + k < lim; ++k) { in __bitmap_shift_right() 105 if (!rem || off + k + 1 >= lim) in __bitmap_shift_right() 108 upper = src[off + k + 1]; in __bitmap_shift_right() 109 if (off + k + 1 == lim - 1) in __bitmap_shift_right() 113 lower = src[off + k]; in __bitmap_shift_right() 114 if (off + k == lim - 1) in __bitmap_shift_right() 119 if (off) in __bitmap_shift_right() 120 memset(&dst[lim - off], 0, off*sizeof(unsigned long)); in __bitmap_shift_right() 143 for (k = lim - off - 1; k >= 0; --k) { in __bitmap_shift_left() 155 dst[k + off] = lower | upper; in __bitmap_shift_left() [all …]
|
A D | scatterlist.c | 1099 size_t len, off; in extract_user_to_sg() local 1108 extraction_flags, &off); in extract_user_to_sg() 1123 sg_set_page(sg, page, seg, off); in extract_user_to_sg() 1127 off = 0; in extract_user_to_sg() 1156 size_t off, len; in extract_bvec_to_sg() local 1165 off = bv[i].bv_offset + start; in extract_bvec_to_sg() 1167 sg_set_page(sg, bv[i].bv_page, len, off); in extract_bvec_to_sg() 1204 size_t off, len, seg; in extract_kvec_to_sg() local 1213 off = kaddr & ~PAGE_MASK; in extract_kvec_to_sg() 1226 sg_set_page(sg, page, len, off); in extract_kvec_to_sg() [all …]
|
A D | Kconfig.ubsan | 28 trade-off.
|
A D | vsprintf.c | 1536 u8 off = 0; in ip6_addr_string_sa() local 1558 off = 1; in ip6_addr_string_sa() 1562 p = ip6_compressed_string(ip6_addr + off, addr); in ip6_addr_string_sa() 1564 p = ip6_string(ip6_addr + off, addr, fmt6); in ip6_addr_string_sa()
|
A D | test_bpf.c | 2725 int off, ind; in __bpf_fill_staggered_jumps() local 2738 for (ind = 0, off = size; ind <= size; ind++, off -= 2) { in __bpf_fill_staggered_jumps() 2742 if (off == 0) in __bpf_fill_staggered_jumps() 2743 off--; in __bpf_fill_staggered_jumps() 2745 loc = abs(off); in __bpf_fill_staggered_jumps() 2750 ins[2].off = 3 * (off - 1); in __bpf_fill_staggered_jumps() 15512 if (insn->off == TAIL_CALL_NULL) in prepare_tail_call_tests() 15514 else if (insn->off == TAIL_CALL_INVALID) in prepare_tail_call_tests() 15517 insn->imm = which + insn->off; in prepare_tail_call_tests() 15518 insn->off = 0; in prepare_tail_call_tests()
|
A D | Kconfig | 398 bool "Force CPU masks off stack" if DEBUG_PER_CPU_MAPS
|
A D | Kconfig.debug | 682 debugfs=[on,no-mount,off]. The restrictions apply for API access 701 Access is off. Clients get -PERM when trying to create nodes in 703 Client can then back-off or continue without debugfs access. 3357 compilation on or off. This can be used to enable extra debugging
|
A D | maple_tree.c | 1570 unsigned char *off) in ma_max_gap() argument 1583 *off = offset; in ma_max_gap()
|
/lib/crypto/powerpc/ |
A D | sha256-spe-asm.S | 90 #define LOAD_DATA(reg, off) \ argument 91 lwz reg,off(rWP); /* load data */ 95 #define LOAD_DATA(reg, off) \ argument 101 #define R_LOAD_W(a, b, c, d, e, f, g, h, w, off) \ argument 102 LOAD_DATA(w, off) /* 1: W */ \ 110 lwz rT2,off(rKP); /* 1: K */ \ 126 LOAD_DATA(w, off+4) /* 2: W */ \ 137 lwz rT2,off+4(rKP); /* 2: K */ \ 156 #define R_CALC_W(a, b, c, d, e, f, g, h, w0, w1, w4, w5, w7, k, off) \ argument 182 evldw rT1,off(rKP); /* k */ \
|
A D | sha1-spe-asm.S | 95 #define LOAD_DATA(reg, off) \ argument 96 lwz reg,off(rWP); /* load data */ 100 #define LOAD_DATA(reg, off) \ argument 106 #define R_00_15(a, b, c, d, e, w0, w1, k, off) \ argument 107 LOAD_DATA(w0, off) /* 1: W */ \ 116 LOAD_DATA(w1, off+4) /* 2: W */ \
|
/lib/zstd/compress/ |
A D | zstd_opt.c | 751 matches[mnum].off = OFFSET_TO_OFFBASE(curr - matchIndex); in ZSTD_insertBtAndGetAllMatches() 1017 matches[*nbMatches].off = candidateOffBase; in ZSTD_optLdm_maybeAddMatch() 1157 U32 const maxOffBase = matches[nbMatches-1].off; in ZSTD_compressBlock_opt_generic() 1164 lastStretch.off = maxOffBase; in ZSTD_compressBlock_opt_generic() 1182 U32 const offBase = matches[matchNb].off; in ZSTD_compressBlock_opt_generic() 1190 opt[pos].off = offBase; in ZSTD_compressBlock_opt_generic() 1299 lastStretch.off = matches[nbMatches-1].off; in ZSTD_compressBlock_opt_generic() 1307 U32 const offset = matches[matchNb].off; in ZSTD_compressBlock_opt_generic() 1329 opt[pos].off = offset; in ZSTD_compressBlock_opt_generic() 1356 assert(lastStretch.off > 0); in ZSTD_compressBlock_opt_generic() [all …]
|
A D | zstd_compress_internal.h | 193 U32 off; /* Offset sumtype code for the match, using ZSTD_storeSeq() format */ member 216 U32 off; /* offset of previous match */ member
|
/lib/crypto/arm/ |
A D | sha1-armv4-large.S | 23 @ Size/performance trade-off
|