| /mm/ |
| A D | maccess.c | 31 unsigned long align = 0; in copy_from_kernel_nofault() local 34 align = (unsigned long)dst | (unsigned long)src; in copy_from_kernel_nofault() 40 if (!(align & 7)) in copy_from_kernel_nofault() 42 if (!(align & 3)) in copy_from_kernel_nofault() 44 if (!(align & 1)) in copy_from_kernel_nofault() 66 unsigned long align = 0; in copy_to_kernel_nofault() local 69 align = (unsigned long)dst | (unsigned long)src; in copy_to_kernel_nofault() 72 if (!(align & 7)) in copy_to_kernel_nofault() 74 if (!(align & 3)) in copy_to_kernel_nofault() 76 if (!(align & 1)) in copy_to_kernel_nofault()
|
| A D | dmapool_test.c | 18 size_t align; member 23 { .size = 16, .align = 16, .boundary = 0 }, 24 { .size = 64, .align = 64, .boundary = 0 }, 25 { .size = 256, .align = 256, .boundary = 0 }, 26 { .size = 1024, .align = 1024, .boundary = 0 }, 27 { .size = 4096, .align = 4096, .boundary = 0 }, 28 { .size = 68, .align = 32, .boundary = 4096 }, 74 parms->align, parms->boundary); in dmapool_test_block() 91 parms->size, parms->align, blocks, in dmapool_test_block()
|
| A D | memblock.c | 245 cand = round_up(this_start, align); in __memblock_find_range_bottom_up() 344 phys_addr_t align) in memblock_find_in_range() argument 1549 if (!align) { in memblock_alloc_range_nid() 1552 align = SMP_CACHE_BYTES; in memblock_alloc_range_nid() 1617 phys_addr_t align, in memblock_phys_alloc_range() argument 2628 phys_addr_t align) in reserve_mem_kho_revive() argument 2658 if (*p_start & (align - 1)) { in reserve_mem_kho_revive() 2677 phys_addr_t align) in reserve_mem_kho_revive() argument 2708 align = memparse(p+1, &p); in reserve_mem() 2716 if (align < SMP_CACHE_BYTES) in reserve_mem() [all …]
|
| A D | cma.c | 574 align = max_t(phys_addr_t, align, CMA_MIN_ALIGNMENT_BYTES); in cma_declare_contiguous_multi() 582 start = ALIGN(start, align); in cma_declare_contiguous_multi() 586 end = ALIGN_DOWN(end, align); in cma_declare_contiguous_multi() 780 unsigned long count, unsigned int align, in cma_range_alloc() argument 790 mask = cma_bitmap_aligned_mask(cma, align); in cma_range_alloc() 841 count, align); in cma_range_alloc() 851 unsigned int align, gfp_t gfp) in __cma_alloc() argument 862 (void *)cma, cma->name, count, align); in __cma_alloc() 867 trace_cma_alloc_start(name, count, align); in __cma_alloc() 896 page, count, align, ret); in __cma_alloc() [all …]
|
| A D | vmalloc.c | 1532 nva_start_addr = ALIGN(vstart, align); in is_within_this_va() 1798 nva_start_addr = ALIGN(vstart, align); in va_alloc() 1818 unsigned long size, unsigned long align, in __alloc_vmap_area() argument 1834 if (align <= PAGE_SIZE || (align > PAGE_SIZE && (vend - vstart) == size)) in __alloc_vmap_area() 1942 if (IS_ALIGNED(va->va_start, align)) { in node_pool_del_va() 2010 unsigned long align, in alloc_vmap_area() argument 2058 size, align, vstart, vend); in alloc_vmap_area() 3823 unsigned long original_align = align; in __vmalloc_node_range_noprof() 3927 align = original_align; in __vmalloc_node_range_noprof() 4694 size_t align) in pcpu_get_vm_areas() argument [all …]
|
| A D | dmapool.c | 227 size_t size, size_t align, size_t boundary, int node) in dma_pool_create_node() argument 236 if (align == 0) in dma_pool_create_node() 237 align = 1; in dma_pool_create_node() 238 else if (align & (align - 1)) in dma_pool_create_node() 246 size = ALIGN(size, align); in dma_pool_create_node() 497 size_t size, size_t align, size_t allocation) in dmam_pool_create() argument 505 pool = *ptr = dma_pool_create(name, dev, size, align, allocation); in dmam_pool_create()
|
| A D | percpu.c | 318 size_t align) in pcpu_check_block_hint() argument 421 int align, int *bit_off, int *bits) in pcpu_next_fit_region() argument 460 align); in pcpu_next_fit_region() 1111 size_t align, bool pop_only) in pcpu_find_block_fit() argument 1217 size_t align, int start) in pcpu_alloc_area() argument 1220 size_t align_mask = (align) ? (align - 1) : 0; in pcpu_alloc_area() 1757 if (unlikely(align < PCPU_MIN_ALLOC_SIZE)) in pcpu_alloc_noprof() 1758 align = PCPU_MIN_ALLOC_SIZE; in pcpu_alloc_noprof() 1765 !is_power_of_2(align))) { in pcpu_alloc_noprof() 1767 size, align); in pcpu_alloc_noprof() [all …]
|
| A D | slab_common.c | 127 unsigned int align, unsigned int size) in calculate_alignment() argument 142 align = max(align, ralign); in calculate_alignment() 145 align = max(align, arch_slab_minalign()); in calculate_alignment() 147 return ALIGN(align, sizeof(void *)); in calculate_alignment() 192 align = calculate_alignment(flags, align, size); in find_mergeable() 193 size = ALIGN(size, align); in find_mergeable() 208 if ((s->size & ~(align - 1)) != s->size) in find_mergeable() 336 args->align = calculate_alignment(flags, args->align, object_size); in __kmem_cache_create_args() 654 unsigned int align = ARCH_KMALLOC_MINALIGN; in create_boot_cache() local 663 align = max(align, 1U << (ffs(size) - 1)); in create_boot_cache() [all …]
|
| A D | sparse-vmemmap.c | 53 unsigned long align, in __earlyonly_bootmem_alloc() argument 56 return memmap_alloc(size, align, goal, node, false); in __earlyonly_bootmem_alloc() 104 + altmap->align; in vmem_altmap_next_pfn() 109 unsigned long allocated = altmap->alloc + altmap->align; in vmem_altmap_nr_free() 135 altmap->align += nr_align; in altmap_alloc_block_buf() 139 __func__, pfn, altmap->alloc, altmap->align, nr_pfns); in altmap_alloc_block_buf()
|
| A D | execmem.c | 33 unsigned int align = range->alignment; in execmem_vmalloc() local 42 align = PMD_SIZE; in execmem_vmalloc() 44 p = __vmalloc_node_range(size, align, start, end, gfp_flags, in execmem_vmalloc() 50 p = __vmalloc_node_range(size, align, start, end, gfp_flags, in execmem_vmalloc()
|
| A D | slab.h | 259 unsigned int align; /* Alignment */ member 416 struct kmem_cache *find_mergeable(unsigned size, unsigned align, 419 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align,
|
| A D | readahead.c | 629 pgoff_t expected, start, end, aligned_end, align; in page_cache_async_ra() local 682 align = 1UL << min(ra->order, ffs(max_pages) - 1); in page_cache_async_ra() 684 aligned_end = round_down(end, align); in page_cache_async_ra()
|
| A D | sparse.c | 251 unsigned long size, align; in memblocks_present() local 254 align = 1 << (INTERNODE_CACHE_SHIFT); in memblocks_present() 255 mem_section = memblock_alloc_or_panic(size, align); in memblocks_present()
|
| A D | mm_init.c | 1623 void __init *memmap_alloc(phys_addr_t size, phys_addr_t align, in memmap_alloc() argument 1633 ptr = memblock_alloc_exact_nid_raw(size, align, min_addr, in memmap_alloc() 1637 ptr = memblock_alloc_try_nid_raw(size, align, min_addr, in memmap_alloc() 2211 .align = PAGES_PER_SECTION, in deferred_init_memmap()
|
| A D | internal.h | 850 extern void *memmap_alloc(phys_addr_t size, phys_addr_t align, 1393 unsigned long align, unsigned long shift,
|
| A D | slub.c | 5821 s->red_left_pad = ALIGN(s->red_left_pad, s->align); in calculate_sizes() 5831 size = ALIGN(size, s->align); in calculate_sizes() 6370 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument 6375 s = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias() 6407 s->align = args->align; in do_kmem_cache_create() 6891 return sysfs_emit(buf, "%u\n", s->align); in align_show() 6893 SLAB_ATTR_RO(align);
|
| A D | nommu.c | 127 void *__vmalloc_node_range_noprof(unsigned long size, unsigned long align, in __vmalloc_node_range_noprof() argument 135 void *__vmalloc_node_noprof(unsigned long size, unsigned long align, gfp_t gfp_mask, in __vmalloc_node_noprof() argument
|
| A D | hugetlb.c | 3473 .align = 1, in gather_bootmem_prealloc() 3586 .align = 1, in hugetlb_pages_alloc_boot()
|
| /mm/kfence/ |
| A D | kfence_test.c | 218 return kmalloc_caches[type][__kmalloc_index(size, false)]->align; in kmalloc_cache_alignment() 474 const size_t align = kmalloc_cache_alignment(size); in test_kmalloc_aligned_oob_read() local 499 expect.addr = buf + size + align; in test_kmalloc_aligned_oob_read()
|
| A D | core.c | 472 meta->addr = ALIGN_DOWN(meta->addr, cache->align); in kfence_guarded_alloc()
|