/linux-6.3-rc2/drivers/android/ |
A D | binder_alloc.c | 64 return alloc->buffer + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size() 204 page = &alloc->pages[(page_addr - alloc->buffer) / PAGE_SIZE]; in binder_update_page_range() 256 page->alloc = alloc; in binder_update_page_range() 318 vma = vma_lookup(alloc->mm, alloc->vma_addr); in binder_alloc_get_vma() 524 alloc->pid, size, alloc->free_async_space); in binder_alloc_new_buf_locked() 525 if (alloc->free_async_space < alloc->buffer_size / 10) { in binder_alloc_new_buf_locked() 663 alloc->pid, size, alloc->free_async_space); in binder_free_buf_locked() 760 alloc->pages = kcalloc(alloc->buffer_size / PAGE_SIZE, in binder_alloc_mmap_handler() 780 alloc->free_async_space = alloc->buffer_size / 2; in binder_alloc_mmap_handler() 812 vma_lookup(alloc->mm, alloc->vma_addr)); in binder_alloc_deferred_release() [all …]
|
A D | binder_alloc_selftest.c | 104 if (!alloc->pages[page_index].page_ptr || in check_buffer_pages_allocated() 105 !list_empty(&alloc->pages[page_index].lru)) { in check_buffer_pages_allocated() 107 alloc->pages[page_index].page_ptr ? in check_buffer_pages_allocated() 139 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf() 147 if (list_empty(&alloc->pages[i].lru)) { in binder_selftest_free_buf() 167 if (alloc->pages[i].page_ptr) { in binder_selftest_free_page() 169 list_empty(&alloc->pages[i].lru) ? in binder_selftest_free_page() 190 binder_selftest_free_page(alloc); in binder_selftest_alloc_free() 246 binder_selftest_free_seq(alloc, back_sizes, seq, 0, alloc->buffer_size); in binder_selftest_alloc_size() 256 binder_selftest_alloc_size(alloc, end_offset); in binder_selftest_alloc_offset() [all …]
|
A D | binder_alloc.h | 72 struct binder_alloc *alloc; member 117 void binder_selftest_alloc(struct binder_alloc *alloc); 130 extern void binder_alloc_init(struct binder_alloc *alloc); 134 binder_alloc_prepare_to_free(struct binder_alloc *alloc, 143 struct binder_alloc *alloc); 145 struct binder_alloc *alloc); 158 mutex_lock(&alloc->mutex); in binder_alloc_get_free_async_space() 159 free_async_space = alloc->free_async_space; in binder_alloc_get_free_async_space() 160 mutex_unlock(&alloc->mutex); in binder_alloc_get_free_async_space() 165 binder_alloc_copy_user_to_buffer(struct binder_alloc *alloc, [all …]
|
A D | binder_trace.h | 329 __entry->proc = alloc->pid; 341 TP_ARGS(alloc, page_index), 347 __entry->proc = alloc->pid; 356 TP_ARGS(alloc, page_index)); 360 TP_ARGS(alloc, page_index)); 364 TP_ARGS(alloc, page_index)); 368 TP_ARGS(alloc, page_index)); 372 TP_ARGS(alloc, page_index)); 376 TP_ARGS(alloc, page_index)); 380 TP_ARGS(alloc, page_index)); [all …]
|
/linux-6.3-rc2/drivers/infiniband/hw/cxgb4/ |
A D | id_table.c | 51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc() 53 obj = find_first_zero_bit(alloc->table, alloc->max); in c4iw_id_alloc() 60 if (alloc->last >= alloc->max) in c4iw_id_alloc() 61 alloc->last = 0; in c4iw_id_alloc() 63 obj += alloc->start; in c4iw_id_alloc() 75 obj -= alloc->start; in c4iw_id_free() 85 alloc->start = start; in c4iw_id_table_alloc() 86 alloc->flags = flags; in c4iw_id_table_alloc() 90 alloc->last = 0; in c4iw_id_table_alloc() 91 alloc->max = num; in c4iw_id_table_alloc() [all …]
|
/linux-6.3-rc2/sound/isa/gus/ |
A D | gus_mem.c | 43 pblock = alloc->first; in snd_gf1_mem_xalloc() 50 alloc->first = nblock; in snd_gf1_mem_xalloc() 59 if (alloc->last == NULL) { in snd_gf1_mem_xalloc() 61 alloc->first = alloc->last = nblock; in snd_gf1_mem_xalloc() 63 nblock->prev = alloc->last; in snd_gf1_mem_xalloc() 65 alloc->last = nblock; in snd_gf1_mem_xalloc() 86 if (alloc->last == block) { in snd_gf1_mem_xfree() 87 alloc->last = block->prev; in snd_gf1_mem_xfree() 132 struct snd_gf1_bank_info *info = w_16 ? alloc->banks_16 : alloc->banks_8; in snd_gf1_mem_find() 236 alloc->first = alloc->last = NULL; in snd_gf1_mem_init() [all …]
|
/linux-6.3-rc2/drivers/infiniband/hw/mthca/ |
A D | mthca_allocator.c | 47 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in mthca_alloc() 49 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_alloc() 50 obj = find_first_zero_bit(alloc->table, alloc->max); in mthca_alloc() 55 obj |= alloc->top; in mthca_alloc() 73 alloc->last = min(alloc->last, obj); in mthca_free() 74 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_free() 86 alloc->last = 0; in mthca_alloc_init() 87 alloc->top = 0; in mthca_alloc_init() 88 alloc->max = num; in mthca_alloc_init() 89 alloc->mask = mask; in mthca_alloc_init() [all …]
|
A D | mthca_uar.c | 40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc() 51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free() 58 ret = mthca_alloc_init(&dev->uar_table.alloc, in mthca_init_uar_table() 67 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_init_uar_table() 77 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_cleanup_uar_table()
|
A D | mthca_pd.c | 46 pd->pd_num = mthca_alloc(&dev->pd_table.alloc); in mthca_pd_alloc() 56 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_alloc() 66 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_free() 71 return mthca_alloc_init(&dev->pd_table.alloc, in mthca_init_pd_table() 80 mthca_alloc_cleanup(&dev->pd_table.alloc); in mthca_cleanup_pd_table()
|
/linux-6.3-rc2/fs/ocfs2/ |
A D | localalloc.c | 38 struct ocfs2_dinode *alloc, 46 struct ocfs2_dinode *alloc, 306 la = OCFS2_LOCAL_ALLOC(alloc); in ocfs2_load_local_alloc() 483 struct ocfs2_dinode *alloc; in ocfs2_begin_local_alloc_recovery() local 619 struct ocfs2_dinode *alloc; in ocfs2_reserve_local_alloc_bits() local 723 struct ocfs2_dinode *alloc; in ocfs2_claim_local_alloc_bits() local 730 la = OCFS2_LOCAL_ALLOC(alloc); in ocfs2_claim_local_alloc_bits() 779 struct ocfs2_dinode *alloc; in ocfs2_free_local_alloc_bits() local 786 la = OCFS2_LOCAL_ALLOC(alloc); in ocfs2_free_local_alloc_bits() 916 alloc->id1.bitmap1.i_used = 0; in ocfs2_clear_local_alloc() [all …]
|
/linux-6.3-rc2/rust/alloc/ |
A D | raw_vec.rs | 5 use core::alloc::LayoutError; 14 use crate::alloc::handle_alloc_error; 15 use crate::alloc::{Allocator, Global, Layout}; 57 alloc: A, field 181 Self::new_in(alloc) in allocate_in() 208 alloc, in allocate_in() 233 alloc, in try_allocate_in() 270 &self.alloc in allocator() 473 self.alloc in shrink() 490 alloc: &mut A, in finish_grow() [all …]
|
A D | boxed.rs | 361 pub const fn new_in(x: T, alloc: A) -> Self in new_in() 365 let mut boxed = Self::new_uninit_in(alloc); in new_in() 434 match Box::try_new_uninit_in(alloc) { in new_uninit_in() 470 let ptr = alloc.allocate(layout)?.cast(); in try_new_uninit_in() 506 match Box::try_new_zeroed_in(alloc) { in new_zeroed_in() 557 Self::into_pin(Self::new_in(x, alloc)) in pin_in() 1085 (leaked.as_ptr(), alloc) in into_raw_with_allocator() 1102 let alloc = unsafe { ptr::read(&b.1) }; in into_unique() 1103 (Unique::from(Box::leak(b)), alloc) in into_unique() 1912 let alloc = Box::allocator(self).clone(); in clone() [all …]
|
A D | README.md | 1 # `alloc` 16 On one hand, kernel folks wanted to keep `alloc` in-tree to have more 20 On the other hand, Rust folks wanted to keep `alloc` as close as 24 We agreed on a middle-ground: we would keep a subset of `alloc` 26 Then, upstream can start adding the functions that we add to `alloc` 28 what it needs in `alloc` and all the new methods are merged into 29 upstream, so that we can drop `alloc` from the kernel tree and go back
|
/linux-6.3-rc2/fs/xfs/libxfs/ |
A D | xfs_alloc_btree.c | 129 len = rec->alloc.ar_blockcount; in xfs_allocbt_update_lastrec() 135 len = rec->alloc.ar_blockcount; in xfs_allocbt_update_lastrec() 185 key->alloc.ar_startblock = rec->alloc.ar_startblock; in xfs_allocbt_init_key_from_rec() 186 key->alloc.ar_blockcount = rec->alloc.ar_blockcount; in xfs_allocbt_init_key_from_rec() 199 key->alloc.ar_blockcount = 0; in xfs_bnobt_init_high_key_from_rec() 207 key->alloc.ar_blockcount = rec->alloc.ar_blockcount; in xfs_cntbt_init_high_key_from_rec() 208 key->alloc.ar_startblock = 0; in xfs_cntbt_init_high_key_from_rec() 278 be32_to_cpu(k2->alloc.ar_blockcount); in xfs_cntbt_diff_two_keys() 283 be32_to_cpu(k2->alloc.ar_startblock); in xfs_cntbt_diff_two_keys() 408 (k1->alloc.ar_blockcount == k2->alloc.ar_blockcount && in xfs_cntbt_keys_inorder() [all …]
|
/linux-6.3-rc2/lib/zstd/compress/ |
A D | zstd_cwksp.h | 252 assert(alloc >= bottom); in ZSTD_cwksp_reserve_internal_buffer_space() 253 if (alloc < bottom) { in ZSTD_cwksp_reserve_internal_buffer_space() 263 ws->allocStart = alloc; in ZSTD_cwksp_reserve_internal_buffer_space() 264 return alloc; in ZSTD_cwksp_reserve_internal_buffer_space() 326 void* alloc; in ZSTD_cwksp_reserve_internal() local 335 return alloc; in ZSTD_cwksp_reserve_internal() 365 void* alloc; in ZSTD_cwksp_reserve_table() local 372 alloc = ws->tableEnd; in ZSTD_cwksp_reserve_table() 373 end = (BYTE *)alloc + bytes; in ZSTD_cwksp_reserve_table() 391 return alloc; in ZSTD_cwksp_reserve_table() [all …]
|
/linux-6.3-rc2/tools/perf/util/ |
A D | strbuf.c | 22 sb->alloc = sb->len = 0; in strbuf_init() 31 if (sb->alloc) { in strbuf_release() 39 char *res = sb->alloc ? sb->buf : NULL; in strbuf_detach() 51 if (nr < sb->alloc) in strbuf_grow() 57 if (alloc_nr(sb->alloc) > nr) in strbuf_grow() 58 nr = alloc_nr(sb->alloc); in strbuf_grow() 64 buf = realloc(sb->alloc ? sb->buf : NULL, nr * sizeof(*buf)); in strbuf_grow() 69 sb->alloc = nr; in strbuf_grow() 106 len = vsnprintf(sb->buf + sb->len, sb->alloc - sb->len, fmt, ap); in strbuf_addv() 142 size_t oldalloc = sb->alloc; in strbuf_read() [all …]
|
A D | strbuf.h | 51 size_t alloc; member 65 return sb->alloc ? sb->alloc - sb->len - 1 : 0; in strbuf_avail() 71 if (!sb->alloc) { in strbuf_setlen() 76 assert(len < sb->alloc); in strbuf_setlen()
|
/linux-6.3-rc2/tools/lib/subcmd/ |
A D | subcmd-util.h | 39 #define ALLOC_GROW(x, nr, alloc) \ argument 41 if ((nr) > alloc) { \ 42 if (alloc_nr(alloc) < (nr)) \ 43 alloc = (nr); \ 45 alloc = alloc_nr(alloc); \ 46 x = xrealloc((x), alloc * sizeof(*(x))); \
|
/linux-6.3-rc2/net/core/ |
A D | page_pool.c | 255 pool->alloc.cache[pool->alloc.count++] = page; in page_pool_refill_alloc_cache() 271 page = pool->alloc.cache[--pool->alloc.count]; in page_pool_refill_alloc_cache() 286 page = pool->alloc.cache[--pool->alloc.count]; in __page_pool_get_cached() 387 return pool->alloc.cache[--pool->alloc.count]; in __page_pool_alloc_pages_slow() 409 pool->alloc.cache[pool->alloc.count++] = page; in __page_pool_alloc_pages_slow() 418 page = pool->alloc.cache[--pool->alloc.count]; in __page_pool_alloc_pages_slow() 542 pool->alloc.cache[pool->alloc.count++] = page; in page_pool_recycle_in_cache() 780 while (pool->alloc.count) { in page_pool_empty_alloc_cache_once() 781 page = pool->alloc.cache[--pool->alloc.count]; in page_pool_empty_alloc_cache_once() 870 while (pool->alloc.count) { in page_pool_update_nid() [all …]
|
/linux-6.3-rc2/arch/xtensa/variants/dc233c/include/variant/ |
A D | tie-asm.h | 77 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 85 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 97 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 115 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 141 .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 149 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 161 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 179 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0
|
/linux-6.3-rc2/arch/xtensa/variants/csp/include/variant/ |
A D | tie-asm.h | 76 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 84 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 96 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 116 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 142 .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 150 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 162 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 182 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0
|
/linux-6.3-rc2/arch/s390/mm/ |
A D | pgalloc.c | 489 unsigned long end, int alloc) in base_page_walk() argument 493 if (!alloc) in base_page_walk() 505 unsigned long end, int alloc) in base_segment_walk() argument 515 if (!alloc) in base_segment_walk() 526 if (!alloc) in base_segment_walk() 544 if (!alloc) in base_region3_walk() 555 if (!alloc) in base_region3_walk() 572 if (!alloc) in base_region2_walk() 583 if (!alloc) in base_region2_walk() 600 if (!alloc) in base_region1_walk() [all …]
|
/linux-6.3-rc2/rust/ |
A D | Makefile | 15 obj-$(CONFIG_RUST) += alloc.o bindings.o kernel.o 49 alloc-cfgs = \ 81 rustdoc-alloc rustdoc-kernel 109 rustdoc-alloc: private rustc_target_flags = $(alloc-cfgs) \ 111 rustdoc-alloc: $(src)/alloc/lib.rs rustdoc-core rustdoc-compiler_builtins FORCE 202 cp -r $(srctree)/$(src)/alloc/* \ 315 $(obj)/exports_alloc_generated.h: $(obj)/alloc.o FORCE 376 $(obj)/alloc.o: private skip_clippy = 1 377 $(obj)/alloc.o: private skip_flags = -Dunreachable_pub 378 $(obj)/alloc.o: private rustc_target_flags = $(alloc-cfgs) [all …]
|
/linux-6.3-rc2/fs/nfs/ |
A D | nfs3acl.c | 261 struct posix_acl *orig = acl, *dfacl = NULL, *alloc; in nfs3_set_acl() local 268 alloc = get_inode_acl(inode, ACL_TYPE_DEFAULT); in nfs3_set_acl() 269 if (IS_ERR(alloc)) in nfs3_set_acl() 271 dfacl = alloc; in nfs3_set_acl() 275 alloc = get_inode_acl(inode, ACL_TYPE_ACCESS); in nfs3_set_acl() 276 if (IS_ERR(alloc)) in nfs3_set_acl() 279 acl = alloc; in nfs3_set_acl() 285 alloc = posix_acl_from_mode(inode->i_mode, GFP_KERNEL); in nfs3_set_acl() 286 if (IS_ERR(alloc)) in nfs3_set_acl() 288 acl = alloc; in nfs3_set_acl() [all …]
|
/linux-6.3-rc2/drivers/char/agp/ |
A D | compat_ioctl.c | 155 struct agp_allocate32 alloc; in compat_agpioc_allocate_wrap() local 158 if (copy_from_user(&alloc, arg, sizeof(alloc))) in compat_agpioc_allocate_wrap() 161 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in compat_agpioc_allocate_wrap() 166 alloc.key = memory->key; in compat_agpioc_allocate_wrap() 167 alloc.physical = memory->physical; in compat_agpioc_allocate_wrap() 169 if (copy_to_user(arg, &alloc, sizeof(alloc))) { in compat_agpioc_allocate_wrap()
|