Searched refs:base_pa (Results 1 – 10 of 10) sorted by relevance
| /drivers/iommu/iommufd/ |
| A D | viommu.c | 291 struct iommufd_viommu *viommu, phys_addr_t *base_pa) in iommufd_hw_queue_alloc_phys() argument 341 *base_pa = (page_to_pfn(pages[0]) << PAGE_SHIFT) + offset; in iommufd_hw_queue_alloc_phys() 363 phys_addr_t base_pa; in iommufd_hw_queue_alloc_ioctl() local 407 access = iommufd_hw_queue_alloc_phys(cmd, viommu, &base_pa); in iommufd_hw_queue_alloc_ioctl() 420 rc = viommu->ops->hw_queue_init_phys(hw_queue, cmd->index, base_pa); in iommufd_hw_queue_alloc_ioctl()
|
| /drivers/net/ethernet/amd/pds_core/ |
| A D | core.c | 162 static void pdsc_q_map(struct pdsc_queue *q, void *base, dma_addr_t base_pa) in pdsc_q_map() argument 168 q->base_pa = base_pa; in pdsc_q_map() 176 static void pdsc_cq_map(struct pdsc_cq *cq, void *base, dma_addr_t base_pa) in pdsc_cq_map() argument 182 cq->base_pa = base_pa; in pdsc_cq_map() 351 cidi.notifyq_cq_base = cpu_to_le64(pdsc->notifyqcq.cq.base_pa); in pdsc_core_init()
|
| A D | core.h | 65 dma_addr_t base_pa; /* must be page aligned */ member 112 dma_addr_t base_pa; /* must be page aligned */ member
|
| A D | debugfs.c | 141 debugfs_create_x64("base_pa", 0400, cq_dentry, &cq->base_pa); in pdsc_debugfs_add_qcq()
|
| /drivers/virt/coco/efi_secret/ |
| A D | efi_secret.c | 155 if (!secret_area->base_pa || secret_area->size < sizeof(struct secret_header)) { in efi_secret_map_area() 158 secret_area->base_pa, secret_area->size); in efi_secret_map_area() 163 s->secret_data = ioremap_encrypted(secret_area->base_pa, secret_area->size); in efi_secret_map_area()
|
| /drivers/net/ethernet/pensando/ionic/ |
| A D | ionic_dev.h | 266 dma_addr_t base_pa; member 298 dma_addr_t base_pa; member 372 void ionic_cq_map(struct ionic_cq *cq, void *base, dma_addr_t base_pa);
|
| A D | ionic_lif.c | 650 new->q.base_pa = ALIGN(new->q_base_pa, PAGE_SIZE); in ionic_qcq_alloc() 654 new->cq.base_pa = ALIGN(new->q_base_pa + q_size, PAGE_SIZE); in ionic_qcq_alloc() 667 new->q.base_pa = ALIGN(new->q_base_pa, PAGE_SIZE); in ionic_qcq_alloc() 706 new->cq.base_pa = ALIGN(new->cq_base_pa, PAGE_SIZE); in ionic_qcq_alloc() 848 .ring_base = cpu_to_le64(q->base_pa), in ionic_lif_txq_init() 849 .cq_ring_base = cpu_to_le64(cq->base_pa), in ionic_lif_txq_init() 912 .ring_base = cpu_to_le64(q->base_pa), in ionic_lif_rxq_init() 913 .cq_ring_base = cpu_to_le64(cq->base_pa), in ionic_lif_rxq_init() 2907 swap(a->q.base_pa, b->q.base_pa); in ionic_swap_queues() 2926 swap(a->cq.base_pa, b->cq.base_pa); in ionic_swap_queues() [all …]
|
| A D | ionic_dev.c | 694 .q_init.ring_base = cpu_to_le64(q->base_pa), in ionic_dev_cmd_adminq_init() 695 .q_init.cq_ring_base = cpu_to_le64(cq->base_pa), in ionic_dev_cmd_adminq_init()
|
| A D | ionic_debugfs.c | 169 debugfs_create_x64("base_pa", 0400, cq_dentry, &cq->base_pa); in ionic_debugfs_add_qcq()
|
| /drivers/net/ethernet/microsoft/mana/ |
| A D | hw_channel.c | 448 u8 *base_pa; in mana_hwc_alloc_dma_buf() local 469 base_pa = (u8 *)dma_buf->mem_info.dma_handle; in mana_hwc_alloc_dma_buf() 475 hwc_wr->buf_sge_addr = base_pa + i * max_msg_size; in mana_hwc_alloc_dma_buf()
|
Completed in 31 milliseconds