Lines Matching refs:ptr

294 static inline bool check_page_allocation(void *ptr, unsigned long ip)  in check_page_allocation()  argument
299 if (ptr != page_address(virt_to_head_page(ptr))) { in check_page_allocation()
300 kasan_report_invalid_free(ptr, ip, KASAN_REPORT_INVALID_FREE); in check_page_allocation()
304 if (!kasan_byte_accessible(ptr)) { in check_page_allocation()
305 kasan_report_invalid_free(ptr, ip, KASAN_REPORT_DOUBLE_FREE); in check_page_allocation()
312 void __kasan_kfree_large(void *ptr, unsigned long ip) in __kasan_kfree_large() argument
314 check_page_allocation(ptr, ip); in __kasan_kfree_large()
412 static inline void poison_kmalloc_large_redzone(const void *ptr, size_t size, in poison_kmalloc_large_redzone() argument
424 kasan_poison_last_granule(ptr, size); in poison_kmalloc_large_redzone()
427 redzone_start = round_up((unsigned long)(ptr + size), KASAN_GRANULE_SIZE); in poison_kmalloc_large_redzone()
428 redzone_end = (unsigned long)ptr + page_size(virt_to_page(ptr)); in poison_kmalloc_large_redzone()
433 void * __must_check __kasan_kmalloc_large(const void *ptr, size_t size, in __kasan_kmalloc_large() argument
439 if (unlikely(ptr == NULL)) in __kasan_kmalloc_large()
443 poison_kmalloc_large_redzone(ptr, size, flags); in __kasan_kmalloc_large()
446 return (void *)ptr; in __kasan_kmalloc_large()
483 unsigned long *ptr; in __kasan_mempool_poison_pages() local
493 ptr = page_address(page); in __kasan_mempool_poison_pages()
495 if (check_page_allocation(ptr, ip)) in __kasan_mempool_poison_pages()
498 kasan_poison(ptr, PAGE_SIZE << order, KASAN_PAGE_FREE, false); in __kasan_mempool_poison_pages()
509 bool __kasan_mempool_poison_object(void *ptr, unsigned long ip) in __kasan_mempool_poison_object() argument
511 struct folio *folio = virt_to_folio(ptr); in __kasan_mempool_poison_object()
519 if (check_page_allocation(ptr, ip)) in __kasan_mempool_poison_object()
521 kasan_poison(ptr, folio_size(folio), KASAN_PAGE_FREE, false); in __kasan_mempool_poison_object()
525 if (is_kfence_address(ptr) || !kasan_arch_is_ready()) in __kasan_mempool_poison_object()
530 if (check_slab_allocation(slab->slab_cache, ptr, ip)) in __kasan_mempool_poison_object()
533 poison_slab_object(slab->slab_cache, ptr, false); in __kasan_mempool_poison_object()
537 void __kasan_mempool_unpoison_object(void *ptr, size_t size, unsigned long ip) in __kasan_mempool_unpoison_object() argument
542 slab = virt_to_slab(ptr); in __kasan_mempool_unpoison_object()
549 kasan_unpoison(ptr, size, false); in __kasan_mempool_unpoison_object()
550 poison_kmalloc_large_redzone(ptr, size, flags); in __kasan_mempool_unpoison_object()
554 if (is_kfence_address(ptr)) in __kasan_mempool_unpoison_object()
558 unpoison_slab_object(slab->slab_cache, ptr, flags, false); in __kasan_mempool_unpoison_object()
562 poison_kmalloc_redzone(slab->slab_cache, ptr, size, flags); in __kasan_mempool_unpoison_object()