| /lib/tests/ |
| A D | usercopy_kunit.c | 38 size_t size; member 53 size_t size = priv->size; in usercopy_test_check_nonzero_user() local 63 size = 1024; in usercopy_test_check_nonzero_user() 112 size_t size = priv->size; in usercopy_test_copy_struct_from_user() local 127 ksize = size; in usercopy_test_copy_struct_from_user() 128 usize = size; in usercopy_test_copy_struct_from_user() 139 ksize = size; in usercopy_test_copy_struct_from_user() 194 size val_##size = (check); \ in usercopy_test_valid() 196 put_user(val_##size, (size __user *)usermem), \ in usercopy_test_valid() 200 get_user(val_##size, (size __user *)usermem), \ in usercopy_test_valid() [all …]
|
| A D | kunit_iov_iter.c | 79 size_t size = 0; in iov_kunit_load_kvec() local 121 size = iter.count; in iov_kunit_copy_to_kvec() 225 size_t size = 0; in iov_kunit_load_bvec() local 452 i += size; in iov_kunit_copy_to_folioq() 514 i += size; in iov_kunit_copy_from_folioq() 613 i += size; in iov_kunit_copy_to_xarray() 671 i += size; in iov_kunit_copy_from_xarray() 736 size -= len; in iov_kunit_extract_pages_kvec() 814 size -= len; in iov_kunit_extract_pages_bvec() 898 size -= len; in iov_kunit_extract_pages_folioq() [all …]
|
| /lib/ |
| A D | find_bit.c | 31 unsigned long idx, val, sz = (size); \ 113 unsigned long size) in _find_first_and_bit() argument 125 unsigned long size) in _find_first_andnot_bit() argument 137 unsigned long size) in _find_first_and_and_bit() argument 230 if (size) { in _find_last_bit() 242 return size; in _find_last_bit() 251 if (offset == size) in find_next_clump8() 252 return size; in find_next_clump8() 305 int w = bitmap_weight(addr, size); in find_random_bit() 309 return size; in find_random_bit() [all …]
|
| A D | test_meminit.c | 46 WARN_ON(skip > size); in fill_with_garbage_skip() 47 size -= skip; in fill_with_garbage_skip() 52 size -= sizeof(*p); in fill_with_garbage_skip() 54 if (size) in fill_with_garbage_skip() 133 buf = vmalloc(size); in do_vmalloc_size() 156 int i, size; in test_kvmalloc() local 159 size = 1 << i; in test_kvmalloc() 355 c = kmem_cache_create("test_cache", size, size, 0, NULL); in do_kmem_cache_size_bulk() 385 size = 8 << i; in test_kmemcache() 406 int i, size; in test_rcu_persistent() local [all …]
|
| A D | sort.c | 184 i -= size; in parent() 198 size_t n = num * size, a = (num/2) * size; in __sort_r() 199 const unsigned int lsbit = size & -size; /* Used to find parent */ in __sort_r() 231 n -= size; in __sort_r() 233 shift = do_cmp(base + size, base + 2 * size, cmp_func, priv) <= 0; in __sort_r() 234 a = size << shift; in __sort_r() 235 n -= size; in __sort_r() 253 for (b = a; c = 2*b + size, (d = c + size) < n;) in __sort_r() 271 n -= size; in __sort_r() 273 if (n == size * 2 && do_cmp(base, base + size, cmp_func, priv) > 0) in __sort_r() [all …]
|
| A D | genalloc.c | 292 if (size == 0) in gen_pool_alloc_algo_owner() 317 size = nbits << order; in gen_pool_alloc_algo_owner() 446 memset(vaddr, 0, size); in gen_pool_dma_zalloc_algo() 509 size = nbits << order; in gen_pool_free_owner() 554 size_t size) in gen_pool_has_addr() argument 602 size_t size = 0; in gen_pool_size() local 606 size += chunk_size(chunk); in gen_pool_size() 608 return size; in gen_pool_size() 707 return size; in gen_pool_fixed_alloc() 712 start_bit = size; in gen_pool_fixed_alloc() [all …]
|
| A D | devres.c | 30 resource_size_t size, in __devm_ioremap() argument 42 addr = ioremap(offset, size); in __devm_ioremap() 73 resource_size_t size) in devm_ioremap() argument 88 resource_size_t size) in devm_ioremap_uc() argument 103 resource_size_t size) in devm_ioremap_wc() argument 127 resource_size_t size; in __devm_ioremap_resource() local 142 size = resource_size(res); in __devm_ioremap_resource() 242 resource_size_t *size) in devm_of_iomap() argument 248 if (size) in devm_of_iomap() 356 resource_size_t size; member [all …]
|
| A D | decompress_unlz4.c | 43 long size = in_len; in unlz4() local 84 size = fill(inp, 4); in unlz4() 85 if (size < 4) { in unlz4() 95 size -= 4; in unlz4() 108 size = fill(inp, 4); in unlz4() 109 if (size == 0) in unlz4() 111 if (size < 4) { in unlz4() 124 size -= 4; in unlz4() 141 size -= 4; in unlz4() 182 size -= chunksize; in unlz4() [all …]
|
| A D | logic_iomem.c | 75 (unsigned long long)offset, size); in real_ioremap() 112 size, &mapped_areas[i].ops, in ioremap() 131 return real_ioremap(offset, size); in ioremap() 202 size_t size) in real_memcpy_fromio() argument 207 memset(buffer, 0xff, size); in real_memcpy_fromio() 211 size_t size) in real_memcpy_toio() argument 260 real_memset_io(addr, value, size); in memset_io() 271 for (offs = 0; offs < size; offs++) in memset_io() 277 size_t size) in memcpy_fromio() argument 295 for (offs = 0; offs < size; offs++) in memcpy_fromio() [all …]
|
| A D | usercopy.c | 45 int check_zeroed_user(const void __user *from, size_t size) in check_zeroed_user() argument 50 if (unlikely(size == 0)) in check_zeroed_user() 54 size += align; in check_zeroed_user() 56 if (!user_read_access_begin(from, size)) in check_zeroed_user() 63 while (size > sizeof(unsigned long)) { in check_zeroed_user() 68 size -= sizeof(unsigned long); in check_zeroed_user() 73 if (size < sizeof(unsigned long)) in check_zeroed_user() 74 val &= aligned_byte_mask(size); in check_zeroed_user()
|
| A D | decompress_unxz.c | 164 #define kmalloc(size, flags) malloc(size) argument 166 #define vmalloc(size) malloc(size) argument 190 for (i = 0; i < size; ++i) in memeq() 199 static void memzero(void *buf, size_t size) in memzero() argument 202 uint8_t *e = b + size; in memzero() 218 for (i = 0; i < size; ++i) in memmove() 221 i = size; in memmove() 259 long (*fill)(void *dest, unsigned long size), in unxz() argument 260 long (*flush)(void *src, unsigned long size), in unxz() 406 long (*fill)(void *dest, unsigned long size), in __decompress() argument [all …]
|
| A D | iov_iter.c | 101 size -= count; in fault_in_iov_iter_readable() 144 size -= count; in fault_in_iov_iter_writeable() 798 len = size; in iov_iter_aligned_iovec() 805 size -= len; in iov_iter_aligned_iovec() 823 len = size; in iov_iter_aligned_bvec() 830 size -= len; in iov_iter_aligned_bvec() 915 len = size; in iov_iter_alignment_bvec() 929 if (size) in iov_iter_alignment() 1750 return size; in iov_iter_extract_bvec_pages() 1772 if (size) in iov_iter_extract_kvec_pages() [all …]
|
| A D | seq_buf.c | 40 return s->len + len <= s->size; in seq_buf_can_fit() 71 WARN_ON(s->size == 0); in seq_buf_vprintf() 73 if (s->len < s->size) { in seq_buf_vprintf() 159 WARN_ON(s->size == 0); in seq_buf_bprintf() 161 if (s->len < s->size) { in seq_buf_bprintf() 186 WARN_ON(s->size == 0); in seq_buf_puts() 213 WARN_ON(s->size == 0); in seq_buf_putc() 238 WARN_ON(s->size == 0); in seq_buf_putmem() 272 WARN_ON(s->size == 0); 318 WARN_ON(s->size == 0); [all …]
|
| A D | kfifo.c | 32 size = roundup_pow_of_two(size); in __kfifo_alloc() 38 if (size < 2) { in __kfifo_alloc() 50 fifo->mask = size - 1; in __kfifo_alloc() 70 size /= esize; in __kfifo_init() 73 size = rounddown_pow_of_two(size); in __kfifo_init() 80 if (size < 2) { in __kfifo_init() 100 size *= esize; in kfifo_copy_in() 139 size *= esize; in kfifo_copy_out() 201 size *= esize; in kfifo_copy_from_user() 261 size *= esize; in kfifo_copy_to_user() [all …]
|
| A D | logic_pio.c | 47 end = new_range->hw_start + new_range->size; in logic_pio_register_range() 59 if (start >= range->hw_start + range->size || in logic_pio_register_range() 61 mmio_end = range->io_start + range->size; in logic_pio_register_range() 68 iio_sz += range->size; in logic_pio_register_range() 80 new_range->size = SZ_64K; in logic_pio_register_range() 85 if (iio_sz + new_range->size - 1 > IO_SPACE_LIMIT) { in logic_pio_register_range() 148 if (in_range(pio, range->io_start, range->size)) { in find_io_range() 190 resource_size_t addr, resource_size_t size) in logic_pio_trans_hwaddr() argument 199 if (range->size < size) { in logic_pio_trans_hwaddr() 201 &size, &range->size); in logic_pio_trans_hwaddr() [all …]
|
| A D | bucket_locks.c | 19 unsigned int i, size; in __alloc_bucket_spinlocks() local 28 size = min_t(unsigned int, nr_pcpus * cpu_mult, max_size); in __alloc_bucket_spinlocks() 30 size = max_size; in __alloc_bucket_spinlocks() 34 tlocks = kvmalloc_array(size, sizeof(spinlock_t), gfp); in __alloc_bucket_spinlocks() 37 for (i = 0; i < size; i++) { in __alloc_bucket_spinlocks() 44 *locks_mask = size - 1; in __alloc_bucket_spinlocks()
|
| A D | rhashtable.c | 86 size >>= shift; in nested_table_free() 96 unsigned int size = tbl->size >> tbl->nest; in nested_bucket_table_free() local 154 size_t size; in nested_bucket_table_alloc() local 182 size_t size; in bucket_table_alloc() local 202 tbl->size = size; in bucket_table_alloc() 405 if (old_tbl->size <= size) in rhashtable_shrink() 456 size = tbl->size; in rhashtable_insert_rehash() 461 size *= 2; in rhashtable_insert_rehash() 1031 size_t size; in rhashtable_init_noprof() local 1193 unsigned int size = tbl->size >> tbl->nest; in __rht_bucket_nested() local [all …]
|
| A D | test_static_keys.c | 55 static void invert_keys(struct test_key *keys, int size) in invert_keys() argument 60 for (i = 0; i < size; i++) { in invert_keys() 68 static int verify_keys(struct test_key *keys, int size, bool invert) in verify_keys() argument 73 for (i = 0; i < size; i++) { in verify_keys() 112 int size; in test_key_func() local 211 size = ARRAY_SIZE(static_key_tests); in test_key_func() 213 ret = verify_keys(static_key_tests, size, false); in test_key_func() 217 invert_keys(static_key_tests, size); in test_key_func() 218 ret = verify_keys(static_key_tests, size, true); in test_key_func() 222 invert_keys(static_key_tests, size); in test_key_func() [all …]
|
| A D | string_helpers.c | 66 size = 0; in string_get_size() 67 if (size == 0) in string_get_size() 86 while (size >> 32) { in string_get_size() 93 size *= blk_size; in string_get_size() 103 sf_cap = size; in string_get_size() 120 size += 1; in string_get_size() 327 if (!size) in string_unescape() 328 size = SIZE_MAX; in string_unescape() 333 size--; in string_unescape() 863 size_t size; in strim() local [all …]
|
| /lib/crypto/mpi/ |
| A D | mpih-mul.c | 73 prodp[size] = cy; in mul_n_basecase() 87 prodp[size] = cy; in mul_n_basecase() 98 if (size & 1) { in mul_n() 167 tspace + size); in mul_n() 171 cy = mpihelp_add_n(prodp + size, prodp + size, in mul_n() 178 size); in mul_n() 182 size); in mul_n() 203 mpihelp_add_1(prodp + size, prodp + size, size, 1); in mul_n() 247 if (size & 1) { in mpih_sqr_n() 293 cy = mpihelp_add_n(prodp + size, prodp + size, in mpih_sqr_n() [all …]
|
| /lib/xz/ |
| A D | xz_dec_bcj.c | 61 size_t size; member 104 if (size <= 4) in bcj_x86() 107 size -= 4; in bcj_x86() 166 size &= ~(size_t)3; in bcj_powerpc() 300 if (size < 4) in bcj_armthumb() 303 size -= 4; in bcj_armthumb() 404 if (size < 8) in bcj_riscv() 407 size -= 8; in bcj_riscv() 499 size -= *pos; in bcj_apply() 606 if (s->temp.size < b->out_size - b->out_pos || s->temp.size == 0) { in xz_dec_bcj_run() [all …]
|
| /lib/crc/s390/ |
| A D | crc32le-vx.c | 92 static u32 crc32_le_vgfm_generic(u32 crc, unsigned char const *buf, size_t size, unsigned long *con… in crc32_le_vgfm_generic() argument 116 size -= 64; in crc32_le_vgfm_generic() 118 while (size >= 64) { in crc32_le_vgfm_generic() 136 size -= 64; in crc32_le_vgfm_generic() 148 while (size >= 16) { in crc32_le_vgfm_generic() 153 size -= 16; in crc32_le_vgfm_generic() 232 u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size) in crc32_le_vgfm_16() argument 234 return crc32_le_vgfm_generic(crc, buf, size, &constants_CRC_32_LE[0]); in crc32_le_vgfm_16() 237 u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size) in crc32c_le_vgfm_16() argument 239 return crc32_le_vgfm_generic(crc, buf, size, &constants_CRC_32C_LE[0]); in crc32c_le_vgfm_16()
|
| A D | crc32be-vx.c | 80 u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size) in crc32_be_vgfm_16() argument 93 size -= 64; in crc32_be_vgfm_16() 95 while (size >= 64) { in crc32_be_vgfm_16() 111 size -= 64; in crc32_be_vgfm_16() 119 while (size >= 16) { in crc32_be_vgfm_16() 123 size -= 16; in crc32_be_vgfm_16()
|
| /lib/zstd/common/ |
| A D | allocations.h | 27 MEM_STATIC void* ZSTD_customMalloc(size_t size, ZSTD_customMem customMem) in ZSTD_customMalloc() argument 30 return customMem.customAlloc(customMem.opaque, size); in ZSTD_customMalloc() 31 return ZSTD_malloc(size); in ZSTD_customMalloc() 34 MEM_STATIC void* ZSTD_customCalloc(size_t size, ZSTD_customMem customMem) in ZSTD_customCalloc() argument 39 void* const ptr = customMem.customAlloc(customMem.opaque, size); in ZSTD_customCalloc() 40 ZSTD_memset(ptr, 0, size); in ZSTD_customCalloc() 43 return ZSTD_calloc(1, size); in ZSTD_customCalloc()
|
| /lib/crypto/ |
| A D | memneq.c | 69 __crypto_memneq_generic(const void *a, const void *b, size_t size) in __crypto_memneq_generic() argument 74 while (size >= sizeof(unsigned long)) { in __crypto_memneq_generic() 80 size -= sizeof(unsigned long); in __crypto_memneq_generic() 83 while (size > 0) { in __crypto_memneq_generic() 88 size -= 1; in __crypto_memneq_generic() 165 size_t size) in __crypto_memneq() argument 167 switch (size) { in __crypto_memneq() 171 return __crypto_memneq_generic(a, b, size); in __crypto_memneq()
|