/linux-6.3-rc2/drivers/crypto/ |
A D | omap-crypto.c | 41 sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset); in omap_crypto_copy_sg_lists() 105 if (page_zonenum(sg_page(sg)) != ZONE_DMA) in omap_crypto_check_sg() 181 srcb = kmap_atomic(sg_page(src)) + srco + src->offset; in omap_crypto_copy_data() 182 dstb = kmap_atomic(sg_page(dst)) + dsto + dst->offset; in omap_crypto_copy_data() 186 flush_dcache_page(sg_page(dst)); in omap_crypto_copy_data()
|
/linux-6.3-rc2/net/rds/ |
A D | message.c | 156 __free_page(sg_page(&rm->data.op_sg[i])); in rds_message_purge() 158 put_page(sg_page(&rm->data.op_sg[i])); in rds_message_purge() 400 put_page(sg_page(&rm->data.op_sg[i])); in rds_message_zcopy_from_user() 437 if (!sg_page(sg)) { in rds_message_copy_from_user() 450 nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off, in rds_message_copy_from_user() 487 ret = copy_page_to_iter(sg_page(sg), sg->offset + vec_off, in rds_message_inc_copy_to_user()
|
A D | page.c | 104 get_page(sg_page(scat)); in rds_page_remainder_alloc() 147 ret ? NULL : sg_page(scat), ret ? 0 : scat->offset, in rds_page_remainder_alloc()
|
A D | tcp_send.c | 119 sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit() 123 rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
|
/linux-6.3-rc2/samples/kfifo/ |
A D | dma-example.c | 77 i, sg_page(&sg[i]), sg[i].offset, sg[i].length); in example_init() 106 i, sg_page(&sg[i]), sg[i].offset, sg[i].length); in example_init()
|
/linux-6.3-rc2/drivers/target/tcm_fc/ |
A D | tfc_io.c | 84 page = sg_page(sg); in ft_queue_data_in() 102 page = sg_page(sg); in ft_queue_data_in() 279 page = sg_page(sg); in ft_recv_write_data() 287 page = sg_page(sg); in ft_recv_write_data()
|
/linux-6.3-rc2/tools/virtio/linux/ |
A D | scatterlist.h | 67 static inline struct page *sg_page(struct scatterlist *sg) in sg_page() function 158 return page_to_phys(sg_page(sg)) + sg->offset; in sg_phys()
|
/linux-6.3-rc2/include/crypto/ |
A D | scatterwalk.h | 51 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page() 78 page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); in scatterwalk_pagedone()
|
/linux-6.3-rc2/include/linux/ |
A D | scatterlist.h | 144 static inline struct page *sg_page(struct scatterlist *sg) in sg_page() function 329 return page_to_phys(sg_page(sg)) + sg->offset; in sg_phys() 344 return page_address(sg_page(sg)) + sg->offset; in sg_virt() 522 return nth_page(sg_page(piter->sg), piter->sg_pgoffset); in sg_page_iter_page()
|
A D | devcoredump.h | 32 page = sg_page(iter); in _devcd_free_sgtable()
|
/linux-6.3-rc2/drivers/target/ |
A D | target_core_sbc.c | 408 unsigned char *addr = kmap_atomic(sg_page(sg)); in compare_and_write_do_cmp() 1172 paddr = kmap_atomic(sg_page(psg)) + psg->offset; in sbc_dif_generate() 1173 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1188 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1201 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1286 paddr = kmap_atomic(sg_page(psg)) + psg->offset; in sbc_dif_copy_prot() 1290 addr = kmap_atomic(sg_page(sg)) + sg->offset + offset; in sbc_dif_copy_prot() 1329 paddr = kmap_atomic(sg_page(psg)) + psg->offset; in sbc_dif_verify() 1330 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_verify() 1346 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_verify() [all …]
|
A D | target_core_iblock.c | 458 buf = kmap(sg_page(sg)) + sg->offset; in iblock_execute_zero_out() 466 kunmap(sg_page(sg)); in iblock_execute_zero_out() 535 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_write_same() 786 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_rw()
|
/linux-6.3-rc2/drivers/gpu/drm/i915/gem/ |
A D | i915_gem_phys.c | 104 void *vaddr = sg_page(pages->sgl); in i915_gem_object_put_pages_phys() 147 void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; in i915_gem_object_pwrite_phys() 178 void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; in i915_gem_object_pread_phys()
|
A D | i915_gem_internal.c | 25 if (sg_page(sg)) in internal_free_pages() 26 __free_pages(sg_page(sg), get_order(sg->length)); in internal_free_pages()
|
/linux-6.3-rc2/drivers/crypto/qce/ |
A D | dma.c | 57 if (!sg_page(sg)) in qce_sgtable_add() 67 sg_set_page(sg, sg_page(new_sgl), new_len, new_sgl->offset); in qce_sgtable_add()
|
/linux-6.3-rc2/drivers/dma-buf/heaps/ |
A D | system_heap.c | 77 sg_set_page(new_sg, sg_page(sg), sg->length, sg->offset); in dup_sg_table() 297 struct page *page = sg_page(sg); in system_heap_dma_buf_release() 411 struct page *p = sg_page(sg); in system_heap_allocate()
|
/linux-6.3-rc2/drivers/infiniband/hw/mthca/ |
A D | mthca_memfree.c | 73 __free_pages(sg_page(&chunk->mem[i]), in mthca_free_icm_pages() 83 lowmem_page_address(sg_page(&chunk->mem[i])), in mthca_free_icm_coherent() 309 page = sg_page(&chunk->mem[i]); in mthca_table_find() 495 unpin_user_page(sg_page(&db_tab->page[i].mem)); in mthca_map_user_db() 562 unpin_user_page(sg_page(&db_tab->page[i].mem)); in mthca_cleanup_user_db_tab()
|
/linux-6.3-rc2/crypto/ |
A D | scatterwalk.c | 88 sg_set_page(dst, sg_page(src), src->length - len, src->offset + len); in scatterwalk_ffwd()
|
A D | af_alg.c | 740 struct page *page = sg_page(sg + i); in af_alg_pull_tsgl() 813 if (!sg_page(sg)) in af_alg_free_areq_sgls() 815 put_page(sg_page(sg)); in af_alg_free_areq_sgls() 1028 err = memcpy_from_msg(page_address(sg_page(sg)) + in af_alg_sendmsg() 1077 err = memcpy_from_msg(page_address(sg_page(sg + i)), in af_alg_sendmsg() 1080 __free_page(sg_page(sg + i)); in af_alg_sendmsg()
|
/linux-6.3-rc2/drivers/gpu/drm/armada/ |
A D | armada_gem.c | 443 if (sg_page(sg)) in armada_gem_prime_map_dma_buf() 444 put_page(sg_page(sg)); in armada_gem_prime_map_dma_buf() 466 put_page(sg_page(sg)); in armada_gem_prime_unmap_dma_buf()
|
/linux-6.3-rc2/drivers/gpu/drm/i915/selftests/ |
A D | scatterlist.c | 52 struct page *page = sg_page(sg); in expect_pfn_sg() 256 GEM_BUG_ON(page_to_pfn(sg_page(sg)) != pfn); in alloc_table()
|
/linux-6.3-rc2/drivers/crypto/ccp/ |
A D | ccp-crypto-main.c | 305 if (!sg_page(sg)) in ccp_crypto_sg_table_add() 311 sg_set_page(sg, sg_page(sg_add), sg_add->length, in ccp_crypto_sg_table_add()
|
/linux-6.3-rc2/drivers/scsi/ |
A D | libiscsi_tcp.c | 131 if (!recv && sendpage_ok(sg_page(sg))) in iscsi_tcp_segment_map() 136 segment->sg_mapped = kmap_atomic(sg_page(sg)); in iscsi_tcp_segment_map() 140 segment->sg_mapped = kmap(sg_page(sg)); in iscsi_tcp_segment_map() 152 kunmap(sg_page(segment->sg)); in iscsi_tcp_segment_unmap() 209 sg_set_page(&sg, sg_page(segment->sg), copied, in iscsi_tcp_segment_done()
|
/linux-6.3-rc2/net/sunrpc/auth_gss/ |
A D | gss_krb5_crypto.c | 400 in_page = sg_page(sg); in encryptor() 404 sg_set_page(&desc->outfrags[desc->fragno], sg_page(sg), sg->length, in encryptor() 430 sg_set_page(&desc->outfrags[0], sg_page(sg), fraglen, in encryptor() 492 sg_set_page(&desc->frags[desc->fragno], sg_page(sg), sg->length, in decryptor() 515 sg_set_page(&desc->frags[0], sg_page(sg), fraglen, in decryptor()
|
/linux-6.3-rc2/kernel/dma/ |
A D | debug.c | 1293 check_for_stack(dev, sg_page(s), s->offset); in debug_dma_map_sg() 1294 if (!PageHighMem(sg_page(s))) in debug_dma_map_sg() 1305 entry->pfn = page_to_pfn(sg_page(s)); in debug_dma_map_sg() 1352 .pfn = page_to_pfn(sg_page(s)), in debug_dma_unmap_sg() 1525 .pfn = page_to_pfn(sg_page(s)), in debug_dma_sync_sg_for_cpu() 1557 .pfn = page_to_pfn(sg_page(s)), in debug_dma_sync_sg_for_device()
|