Home
last modified time | relevance | path

Searched refs:pages (Results 1 – 11 of 11) sorted by relevance

/lib/
A Diov_iter.c1016 pages = *ppages; in iter_folioq_get_pages()
1030 get_page(*pages); in iter_folioq_get_pages()
1031 pages++; in iter_folioq_get_pages()
1189 p = *pages; in __iov_iter_get_pages_alloc()
1218 BUG_ON(!pages); in iov_iter_get_pages2()
1229 *pages = NULL; in iov_iter_get_pages_alloc2()
1233 kvfree(*pages); in iov_iter_get_pages_alloc2()
1234 *pages = NULL; in iov_iter_get_pages_alloc2()
1606 p = *pages; in iov_iter_extract_folioq_pages()
1662 p = *pages; in iov_iter_extract_xarray_pages()
[all …]
A Dtest_lockup.c304 static void test_alloc_pages(struct list_head *pages) in test_alloc_pages() argument
315 list_add(&page->lru, pages); in test_alloc_pages()
319 static void test_free_pages(struct list_head *pages) in test_free_pages() argument
323 list_for_each_entry_safe(page, next, pages, lru) in test_free_pages()
325 INIT_LIST_HEAD(pages); in test_free_pages()
353 LIST_HEAD(pages); in test_lockup()
359 test_alloc_pages(&pages); in test_lockup()
372 test_free_pages(&pages); in test_lockup()
392 test_alloc_pages(&pages); in test_lockup()
398 test_free_pages(&pages); in test_lockup()
A Dscatterlist.c462 if (page_to_pfn(pages[0]) == next_pfn) { in sg_alloc_append_table_from_pages()
468 last_pg = pages[0]; in sg_alloc_append_table_from_pages()
469 pages++; in sg_alloc_append_table_from_pages()
483 !pages_are_mergeable(pages[i], pages[i - 1])) { in sg_alloc_append_table_from_pages()
499 !pages_are_mergeable(pages[j], pages[j - 1])) in sg_alloc_append_table_from_pages()
516 sg_set_page(s, pages[cur_page], in sg_alloc_append_table_from_pages()
1096 struct page **pages; in extract_user_to_sg() local
1102 pages = (void *)sgtable->sgl + in extract_user_to_sg()
1104 pages -= sg_max; in extract_user_to_sg()
1119 struct page *page = *pages; in extract_user_to_sg()
[all …]
A Dtest_vmalloc.c367 struct page **pages; in vm_map_ram_test() local
371 pages = kcalloc(map_nr_pages, sizeof(struct page *), GFP_KERNEL); in vm_map_ram_test()
372 if (!pages) in vm_map_ram_test()
375 nr_allocated = alloc_pages_bulk(GFP_KERNEL, map_nr_pages, pages); in vm_map_ram_test()
381 v_ptr = vm_map_ram(pages, map_nr_pages, NUMA_NO_NODE); in vm_map_ram_test()
388 __free_page(pages[i]); in vm_map_ram_test()
390 kfree(pages); in vm_map_ram_test()
A DKconfig.kfence45 pages are required; with one containing the object and two adjacent
46 ones used as guard pages.
80 pages, resulting in spurious use-after-frees. The main purpose of
A Dalloc_tag.c420 struct page **next_page = vm_module_tags->pages + vm_module_tags->nr_pages; in vm_module_tags_populate()
668 vm_module_tags->pages = kmalloc_array(get_vm_area_size(vm_module_tags) >> PAGE_SHIFT, in alloc_mod_tags_mem()
670 if (!vm_module_tags->pages) { in alloc_mod_tags_mem()
689 __free_page(vm_module_tags->pages[i]); in free_mod_tags_mem()
690 kfree(vm_module_tags->pages); in free_mod_tags_mem()
A DKconfig.debug975 mechanism for non-highmem pages and on non-highmem systems.
3175 tristate "Test freeing pages"
3178 freeing a block of pages and a speculative page reference.
/lib/tests/
A Dkunit_iov_iter.c52 struct page **pages; in iov_kunit_create_buffer() local
57 KUNIT_ASSERT_NOT_ERR_OR_NULL(test, pages); in iov_kunit_create_buffer()
58 *ppages = pages; in iov_kunit_create_buffer()
60 got = alloc_pages_bulk(GFP_KERNEL, npages, pages); in iov_kunit_create_buffer()
62 release_pages(pages, got); in iov_kunit_create_buffer()
220 struct page **pages, size_t npages, in iov_kunit_load_bvec() argument
237 page = pages[pr->page]; in iov_kunit_load_bvec()
382 struct page **pages, size_t npages) in iov_kunit_load_folioq() argument
396 folioq_append(p, page_folio(pages[i])); in iov_kunit_load_folioq()
550 struct page **pages, size_t npages) in iov_kunit_load_xarray() argument
[all …]
/lib/raid6/
A Ds390vx.uc95 /* P/Q data pages */
A Dint.uc117 /* P/Q data pages */
A Dneon.uc105 /* P/Q data pages */

Completed in 23 milliseconds