Home
last modified time | relevance | path

Searched refs:ptr (Results 1 – 25 of 28) sorted by relevance

12

/mm/kasan/
A Dkasan_test_c.c165 char *ptr; in kmalloc_oob_right() local
194 char *ptr; in kmalloc_oob_left() local
201 KUNIT_EXPECT_KASAN_FAIL(test, *ptr = *(ptr - 1)); in kmalloc_oob_left()
207 char *ptr; in kmalloc_node_oob_right() local
214 KUNIT_EXPECT_KASAN_FAIL(test, ptr[0] = ptr[size]); in kmalloc_node_oob_right()
220 char *ptr; in kmalloc_track_caller_oob_right() local
254 char *ptr; in kmalloc_big_oob_right() local
273 char *ptr; in kmalloc_large_oob_right() local
326 KUNIT_EXPECT_KASAN_FAIL(test, ptr[0] = ptr[size]); in page_alloc_oob_right()
1923 ptr = set_tag(ptr, KASAN_TAG_KERNEL); in match_all_ptr_tag()
[all …]
A Dcommon.c299 if (ptr != page_address(virt_to_head_page(ptr))) { in check_page_allocation()
304 if (!kasan_byte_accessible(ptr)) { in check_page_allocation()
314 check_page_allocation(ptr, ip); in __kasan_kfree_large()
428 redzone_end = (unsigned long)ptr + page_size(virt_to_page(ptr)); in poison_kmalloc_large_redzone()
439 if (unlikely(ptr == NULL)) in __kasan_kmalloc_large()
446 return (void *)ptr; in __kasan_kmalloc_large()
483 unsigned long *ptr; in __kasan_mempool_poison_pages() local
493 ptr = page_address(page); in __kasan_mempool_poison_pages()
542 slab = virt_to_slab(ptr); in __kasan_mempool_unpoison_object()
549 kasan_unpoison(ptr, size, false); in __kasan_mempool_unpoison_object()
[all …]
A Dkasan_test_rust.rs7 use core::ptr::addr_of_mut;
18 let ptr: *mut u8 = addr_of_mut!(v[2048]); in kasan_test_rust_uaf()
21 unsafe { *ptr } in kasan_test_rust_uaf()
A Dtags.c120 old_ptr = READ_ONCE(entry->ptr); in save_stack_info()
123 if (!try_cmpxchg(&entry->ptr, &old_ptr, STACK_RING_BUSY_PTR)) in save_stack_info()
132 entry->ptr = object; in save_stack_info()
A Dreport_tags.c61 if (kasan_reset_tag(entry->ptr) != info->object || in kasan_complete_mode_report_info()
62 get_tag(entry->ptr) != get_tag(info->access_addr) || in kasan_complete_mode_report_info()
A Dreport.c529 void kasan_report_invalid_free(void *ptr, unsigned long ip, enum kasan_report_type type) in kasan_report_invalid_free() argument
550 info.access_addr = ptr; in kasan_report_invalid_free()
563 end_report(&flags, ptr, true); in kasan_report_invalid_free()
A Dgeneric.c226 void __asan_register_globals(void *ptr, ssize_t size) in __asan_register_globals() argument
229 struct kasan_global *globals = ptr; in __asan_register_globals()
236 void __asan_unregister_globals(void *ptr, ssize_t size) in __asan_unregister_globals() argument
A Dinit.c82 void *ptr = memblock_alloc_try_nid(size, size, __pa(MAX_DMA_ADDRESS), in early_alloc() local
85 if (!ptr) in early_alloc()
89 return ptr; in early_alloc()
/mm/
A Dkmemleak.c1084 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_alloc()
1103 if (kmemleak_enabled && ptr && !IS_ERR_PCPU(ptr)) in kmemleak_alloc_percpu()
1144 if (kmemleak_free_enabled && ptr && !IS_ERR(ptr)) in kmemleak_free()
1162 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_free_part()
1230 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_not_leak()
1247 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_transient_leak()
1261 if (kmemleak_enabled && ptr && !IS_ERR_PCPU(ptr)) in kmemleak_ignore_percpu()
1279 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_ignore()
1299 if (kmemleak_enabled && ptr && size && !IS_ERR(ptr)) in kmemleak_scan_area()
1317 if (kmemleak_enabled && ptr && !IS_ERR(ptr)) in kmemleak_no_scan()
[all …]
A Dusercopy.c109 const unsigned long check_low = ptr; in overlaps()
147 ptr - textlow_linear, n); in check_kernel_text_object()
154 if (ptr + (n - 1) < ptr) in check_bogus_address()
158 if (ZERO_OR_NULL_PTR(ptr)) in check_bogus_address()
169 if (is_kmap_addr(ptr)) { in check_heap_object()
170 offset = offset_in_page(ptr); in check_heap_object()
189 if (!virt_addr_valid(ptr)) in check_heap_object()
192 folio = virt_to_folio(ptr); in check_heap_object()
198 offset = ptr - folio_address(folio); in check_heap_object()
225 switch (check_stack_object(ptr, n)) { in __check_object_size()
[all …]
A Dexecmem.c143 unsigned long addr = (unsigned long)ptr; in execmem_force_rw()
156 unsigned long addr = (unsigned long)ptr; in execmem_restore_rox()
239 void *area, *ptr = NULL; in __execmem_cache_alloc() local
264 void *ptr = (void *)(addr + size); in __execmem_cache_alloc() local
277 ptr = (void *)addr; in __execmem_cache_alloc()
281 return ptr; in __execmem_cache_alloc()
362 err = execmem_force_rw(ptr, size); in __execmem_cache_free()
367 execmem_restore_rox(ptr, size); in __execmem_cache_free()
497 void execmem_free(void *ptr) in execmem_free() argument
505 if (!execmem_cache_free(ptr)) in execmem_free()
[all …]
A Dpercpu.c129 #define __pcpu_ptr_to_addr(ptr) (void __force *)(ptr) argument
523 kvfree(ptr); in pcpu_mem_free()
1900 return ptr; in pcpu_alloc_noprof()
2230 if (!ptr) in free_percpu()
2401 void *ptr; in pcpu_alloc_alloc_info() local
2409 if (!ptr) in pcpu_alloc_alloc_info()
2411 ai = ptr; in pcpu_alloc_alloc_info()
2936 void *ptr; in pcpu_fc_alloc() local
2955 return ptr; in pcpu_fc_alloc()
3030 void *ptr; in pcpu_embed_first_chunk() local
[all …]
A Ddmapool.c499 struct dma_pool **ptr, *pool; in dmam_pool_create() local
501 ptr = devres_alloc(dmam_pool_release, sizeof(*ptr), GFP_KERNEL); in dmam_pool_create()
502 if (!ptr) in dmam_pool_create()
505 pool = *ptr = dma_pool_create(name, dev, size, align, allocation); in dmam_pool_create()
507 devres_add(dev, ptr); in dmam_pool_create()
509 devres_free(ptr); in dmam_pool_create()
A Dslab_common.c1285 kasan_record_aux_stack(ptr); in kvfree_call_rcu()
1293 kvfree(ptr); in kvfree_call_rcu()
1545 void *ptr = (void *) head->func; in kvfree_rcu_list() local
1546 unsigned long offset = (void *) head - ptr; in kvfree_rcu_list()
1553 kvfree(ptr); in kvfree_rcu_list()
1838 idx = !!is_vmalloc_addr(ptr); in add_ptr_to_bulk_krc_lock()
1953 if (debug_rcu_head_queue(ptr)) { in kvfree_call_rcu()
1962 kasan_record_aux_stack(ptr); in kvfree_call_rcu()
1971 head->func = ptr; in kvfree_call_rcu()
1987 kmemleak_ignore(ptr); in kvfree_call_rcu()
[all …]
A Dsparse.c473 void *ptr = NULL; in sparse_buffer_alloc() local
476 ptr = (void *) roundup((unsigned long)sparsemap_buf, size); in sparse_buffer_alloc()
477 if (ptr + size > sparsemap_buf_end) in sparse_buffer_alloc()
478 ptr = NULL; in sparse_buffer_alloc()
481 if ((unsigned long)(ptr - sparsemap_buf) > 0) in sparse_buffer_alloc()
482 sparse_buffer_free((unsigned long)(ptr - sparsemap_buf)); in sparse_buffer_alloc()
483 sparsemap_buf = ptr + size; in sparse_buffer_alloc()
486 return ptr; in sparse_buffer_alloc()
A Dsparse-vmemmap.c90 void *ptr; in vmemmap_alloc_block_buf() local
95 ptr = sparse_buffer_alloc(size); in vmemmap_alloc_block_buf()
96 if (!ptr) in vmemmap_alloc_block_buf()
97 ptr = vmemmap_alloc_block(size, node); in vmemmap_alloc_block_buf()
98 return ptr; in vmemmap_alloc_block_buf()
A Dmm_slot.h21 #define mm_slot_entry(ptr, type, member) \ argument
22 container_of(ptr, type, member)
A Dhugetlb_cgroup.c262 struct hugetlb_cgroup **ptr, in __hugetlb_cgroup_charge_cgroup() argument
294 *ptr = h_cg; in __hugetlb_cgroup_charge_cgroup()
299 struct hugetlb_cgroup **ptr) in hugetlb_cgroup_charge_cgroup() argument
301 return __hugetlb_cgroup_charge_cgroup(idx, nr_pages, ptr, false); in hugetlb_cgroup_charge_cgroup()
305 struct hugetlb_cgroup **ptr) in hugetlb_cgroup_charge_cgroup_rsvd() argument
307 return __hugetlb_cgroup_charge_cgroup(idx, nr_pages, ptr, true); in hugetlb_cgroup_charge_cgroup_rsvd()
A Ddebug_vm_pgtable.c83 unsigned long val = idx, *ptr = &val; in pte_basic_tests() local
85 pr_debug("Validating PTE basic (%pGv)\n", ptr); in pte_basic_tests()
170 unsigned long val = idx, *ptr = &val; in pmd_basic_tests() local
176 pr_debug("Validating PMD basic (%pGv)\n", ptr); in pmd_basic_tests()
289 unsigned long val = idx, *ptr = &val; in pud_basic_tests() local
295 pr_debug("Validating PUD basic (%pGv)\n", ptr); in pud_basic_tests()
A Dmemblock.c903 void __init_memblock memblock_free(void *ptr, size_t size) in memblock_free() argument
905 if (ptr) in memblock_free()
906 memblock_phys_free(__pa(ptr), size); in memblock_free()
1777 void *ptr; in memblock_alloc_try_nid() local
1782 ptr = memblock_alloc_internal(size, align, in memblock_alloc_try_nid()
1784 if (ptr) in memblock_alloc_try_nid()
1785 memset(ptr, 0, size); in memblock_alloc_try_nid()
1787 return ptr; in memblock_alloc_try_nid()
A Dslub.c497 encoded = (unsigned long)ptr; in freelist_ptr_encode()
510 decoded = (void *)ptr.v; in freelist_ptr_decode()
4297 void *ptr = NULL; in ___kmalloc_large_node() local
4311 ptr = folio_address(folio); in ___kmalloc_large_node()
4317 ptr = kasan_kmalloc_large(ptr, size, flags); in ___kmalloc_large_node()
4319 kmemleak_alloc(ptr, size, 1, flags); in ___kmalloc_large_node()
4320 kmsan_kmalloc_large(ptr, size, flags); in ___kmalloc_large_node()
4322 return ptr; in ___kmalloc_large_node()
6075 ptr = kasan_reset_tag(ptr); in __check_heap_object()
6081 if (ptr < slab_address(slab)) in __check_heap_object()
[all …]
A Dmm_init.c1626 void *ptr; in memmap_alloc() local
1633 ptr = memblock_alloc_exact_nid_raw(size, align, min_addr, in memmap_alloc()
1637 ptr = memblock_alloc_try_nid_raw(size, align, min_addr, in memmap_alloc()
1641 if (ptr && size > 0) in memmap_alloc()
1642 page_init_poison(ptr, size); in memmap_alloc()
1644 return ptr; in memmap_alloc()
/mm/kmsan/
A Dkmsan_test.c168 int *ptr; in test_uninit_kmalloc() local
171 ptr = kmalloc(sizeof(*ptr), GFP_KERNEL); in test_uninit_kmalloc()
172 USE(*ptr); in test_uninit_kmalloc()
182 int *ptr; in test_init_kmalloc() local
185 ptr = kmalloc(sizeof(*ptr), GFP_KERNEL); in test_init_kmalloc()
186 memset(ptr, 0, sizeof(*ptr)); in test_init_kmalloc()
187 USE(*ptr); in test_init_kmalloc()
195 int *ptr; in test_init_kzalloc() local
198 ptr = kzalloc(sizeof(*ptr), GFP_KERNEL); in test_init_kzalloc()
199 USE(*ptr); in test_init_kzalloc()
A Dhooks.c92 void kmsan_kmalloc_large(const void *ptr, size_t size, gfp_t flags) in kmsan_kmalloc_large() argument
94 if (unlikely(ptr == NULL)) in kmsan_kmalloc_large()
100 kmsan_internal_unpoison_memory((void *)ptr, size, in kmsan_kmalloc_large()
103 kmsan_internal_poison_memory((void *)ptr, size, flags, in kmsan_kmalloc_large()
108 void kmsan_kfree_large(const void *ptr) in kmsan_kfree_large() argument
115 page = virt_to_head_page((void *)ptr); in kmsan_kfree_large()
116 KMSAN_WARN_ON(ptr != page_address(page)); in kmsan_kfree_large()
117 kmsan_internal_poison_memory((void *)ptr, page_size(page), GFP_KERNEL, in kmsan_kfree_large()
/mm/kfence/
A Dkfence_test.c222 static __always_inline void test_free(void *ptr) in test_free() argument
225 kmem_cache_free(test_cache, ptr); in test_free()
227 kfree(ptr); in test_free()

Completed in 92 milliseconds

12