| /drivers/pci/controller/cadence/ |
| A D | pcie-cadence.c | 31 u64 cpu_addr, u64 pci_addr, size_t size) in cdns_pcie_set_outbound_region() argument 96 cpu_addr = pcie->ops->cpu_addr_fixup(pcie, cpu_addr); in cdns_pcie_set_outbound_region() 99 (lower_32_bits(cpu_addr) & GENMASK(31, 8)); in cdns_pcie_set_outbound_region() 100 addr1 = upper_32_bits(cpu_addr); in cdns_pcie_set_outbound_region() 109 u32 r, u64 cpu_addr) in cdns_pcie_set_outbound_region_for_normal_msg() argument 127 cpu_addr = pcie->ops->cpu_addr_fixup(pcie, cpu_addr); in cdns_pcie_set_outbound_region_for_normal_msg() 130 (lower_32_bits(cpu_addr) & GENMASK(31, 8)); in cdns_pcie_set_outbound_region_for_normal_msg() 131 addr1 = upper_32_bits(cpu_addr); in cdns_pcie_set_outbound_region_for_normal_msg()
|
| A D | pcie-cadence-host.c | 250 u64 cpu_addr, u64 size, in cdns_pcie_host_bar_ib_config() argument 264 addr1 = upper_32_bits(cpu_addr); in cdns_pcie_host_bar_ib_config() 277 if (size + cpu_addr >= SZ_4G) { in cdns_pcie_host_bar_ib_config() 344 u64 cpu_addr, pci_addr, size, winsize; in cdns_pcie_host_bar_config() local 351 cpu_addr = entry->res->start; in cdns_pcie_host_bar_config() 358 pci_addr, cpu_addr); in cdns_pcie_host_bar_config() 394 cpu_addr); in cdns_pcie_host_bar_config() 407 cpu_addr += winsize; in cdns_pcie_host_bar_config() 516 u64 cpu_addr = cfg_res->start; in cdns_pcie_host_init_address_translation() local 535 cpu_addr = pcie->ops->cpu_addr_fixup(pcie, cpu_addr); in cdns_pcie_host_init_address_translation() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_doorbell_mgr.c | 42 return readl(adev->doorbell.cpu_addr + index); in amdgpu_mm_rdoorbell() 65 writel(v, adev->doorbell.cpu_addr + index); in amdgpu_mm_wdoorbell() 86 return atomic64_read((atomic64_t *)(adev->doorbell.cpu_addr + index)); in amdgpu_mm_rdoorbell64() 109 atomic64_set((atomic64_t *)(adev->doorbell.cpu_addr + index), v); in amdgpu_mm_wdoorbell64() 171 (void **)&adev->doorbell.cpu_addr); in amdgpu_doorbell_create_kernel_doorbells() 243 (void **)&adev->doorbell.cpu_addr); in amdgpu_doorbell_fini()
|
| A D | amdgpu_isp.c | 251 void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_alloc() argument 269 if (WARN_ON(!cpu_addr)) in isp_kernel_buffer_alloc() 289 cpu_addr); in isp_kernel_buffer_alloc() 290 if (!cpu_addr || ret) { in isp_kernel_buffer_alloc() 311 void isp_kernel_buffer_free(void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_free() argument 315 amdgpu_bo_free_kernel(bo, gpu_addr, cpu_addr); in isp_kernel_buffer_free()
|
| A D | amdgpu_fence.c | 72 if (drv->cpu_addr) in amdgpu_fence_write() 73 *drv->cpu_addr = cpu_to_le32(seq); in amdgpu_fence_write() 89 if (drv->cpu_addr) in amdgpu_fence_read() 90 seq = le32_to_cpu(*drv->cpu_addr); in amdgpu_fence_read() 451 ring->fence_drv.cpu_addr = ring->fence_cpu_addr; in amdgpu_fence_driver_start_ring() 456 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring() 489 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring() 980 le32_to_cpu(*(ring->fence_drv.cpu_addr + 2))); in amdgpu_debugfs_fence_info_show() 983 le32_to_cpu(*(ring->fence_drv.cpu_addr + 4))); in amdgpu_debugfs_fence_info_show() 986 le32_to_cpu(*(ring->fence_drv.cpu_addr + 6))); in amdgpu_debugfs_fence_info_show()
|
| A D | amdgpu_object.c | 241 u64 *gpu_addr, void **cpu_addr) in amdgpu_bo_create_reserved() argument 294 if (cpu_addr) { in amdgpu_bo_create_reserved() 340 u64 *gpu_addr, void **cpu_addr) in amdgpu_bo_create_kernel() argument 345 gpu_addr, cpu_addr); in amdgpu_bo_create_kernel() 451 cpu_addr); in amdgpu_bo_create_kernel_at() 462 if (cpu_addr) in amdgpu_bo_create_kernel_at() 476 if (cpu_addr) { in amdgpu_bo_create_kernel_at() 504 void **cpu_addr) in amdgpu_bo_free_kernel() argument 512 if (cpu_addr) in amdgpu_bo_free_kernel() 523 if (cpu_addr) in amdgpu_bo_free_kernel() [all …]
|
| /drivers/remoteproc/ |
| A D | ti_k3_common.c | 337 if (!kproc->rmem[0].cpu_addr) { in k3_get_loaded_rsc_table() 350 return (__force struct resource_table *)kproc->rmem[0].cpu_addr; in k3_get_loaded_rsc_table() 383 va = kproc->mem[i].cpu_addr + offset; in k3_rproc_da_to_va() 390 va = kproc->mem[i].cpu_addr + offset; in k3_rproc_da_to_va() 402 va = kproc->rmem[i].cpu_addr + offset; in k3_rproc_da_to_va() 442 kproc->mem[i].cpu_addr = devm_ioremap_wc(dev, res->start, in k3_rproc_of_get_memories() 444 if (!kproc->mem[i].cpu_addr) { in k3_rproc_of_get_memories() 455 kproc->mem[i].size, kproc->mem[i].cpu_addr, in k3_rproc_of_get_memories() 524 kproc->rmem[i].cpu_addr = devm_ioremap_wc(dev, rmem->base, rmem->size); in k3_reserved_mem_init() 525 if (!kproc->rmem[i].cpu_addr) { in k3_reserved_mem_init() [all …]
|
| A D | keystone_remoteproc.c | 35 void __iomem *cpu_addr; member 271 va = ksproc->mem[i].cpu_addr + offset; in keystone_rproc_da_to_va() 279 va = ksproc->mem[i].cpu_addr + offset; in keystone_rproc_da_to_va() 313 ksproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in keystone_rproc_of_get_memories() 314 if (IS_ERR(ksproc->mem[i].cpu_addr)) { in keystone_rproc_of_get_memories() 317 return PTR_ERR(ksproc->mem[i].cpu_addr); in keystone_rproc_of_get_memories() 325 memset((__force void *)ksproc->mem[i].cpu_addr, 0, in keystone_rproc_of_get_memories()
|
| A D | wkup_m3_rproc.c | 36 void __iomem *cpu_addr; member 107 va = (__force void *)(wkupm3->mem[i].cpu_addr + offset); in wkup_m3_rproc_da_to_va() 187 wkupm3->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in wkup_m3_rproc_probe() 188 if (IS_ERR(wkupm3->mem[i].cpu_addr)) { in wkup_m3_rproc_probe() 191 ret = PTR_ERR(wkupm3->mem[i].cpu_addr); in wkup_m3_rproc_probe()
|
| A D | da8xx_remoteproc.c | 48 void __iomem *cpu_addr; member 215 drproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in da8xx_rproc_get_internal_memories() 216 if (IS_ERR(drproc->mem[i].cpu_addr)) { in da8xx_rproc_get_internal_memories() 219 return PTR_ERR(drproc->mem[i].cpu_addr); in da8xx_rproc_get_internal_memories() 228 drproc->mem[i].size, drproc->mem[i].cpu_addr, in da8xx_rproc_get_internal_memories()
|
| A D | st_slim_rproc.c | 143 fw_rev = readl(slim_rproc->mem[ST_SLIM_DMEM].cpu_addr + in slim_rproc_start() 188 va = (__force void *)slim_rproc->mem[i].cpu_addr; in slim_rproc_da_to_va() 252 slim_rproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in st_slim_rproc_alloc() 253 if (IS_ERR(slim_rproc->mem[i].cpu_addr)) { in st_slim_rproc_alloc() 255 err = PTR_ERR(slim_rproc->mem[i].cpu_addr); in st_slim_rproc_alloc()
|
| /drivers/media/common/saa7146/ |
| A D | saa7146_core.c | 403 dev->d_rps0.cpu_addr = dma_alloc_coherent(&pci->dev, SAA7146_RPS_MEM, in saa7146_init_one() 406 if (!dev->d_rps0.cpu_addr) in saa7146_init_one() 412 if (!dev->d_rps1.cpu_addr) in saa7146_init_one() 415 dev->d_i2c.cpu_addr = dma_alloc_coherent(&pci->dev, SAA7146_RPS_MEM, in saa7146_init_one() 417 if (!dev->d_i2c.cpu_addr) in saa7146_init_one() 464 dma_free_coherent(&pci->dev, SAA7146_RPS_MEM, dev->d_i2c.cpu_addr, in saa7146_init_one() 467 dma_free_coherent(&pci->dev, SAA7146_RPS_MEM, dev->d_rps1.cpu_addr, in saa7146_init_one() 470 dma_free_coherent(&pci->dev, SAA7146_RPS_MEM, dev->d_rps0.cpu_addr, in saa7146_init_one() 493 { dev->d_i2c.cpu_addr, dev->d_i2c.dma_handle }, in saa7146_remove_one() 494 { dev->d_rps1.cpu_addr, dev->d_rps1.dma_handle }, in saa7146_remove_one() [all …]
|
| /drivers/pci/controller/mobiveil/ |
| A D | pcie-mobiveil.c | 137 u64 cpu_addr, u64 pci_addr, u32 type, u64 size) in program_ib_windows() argument 157 mobiveil_csr_writel(pcie, lower_32_bits(cpu_addr), in program_ib_windows() 159 mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr), in program_ib_windows() 174 u64 cpu_addr, u64 pci_addr, u32 type, u64 size) in program_ob_windows() argument 203 lower_32_bits(cpu_addr) & (~AXI_WINDOW_ALIGN_MASK), in program_ob_windows() 205 mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr), in program_ob_windows()
|
| /drivers/of/ |
| A D | address.c | 234 port = pci_address_to_pio(range->cpu_addr); in of_pci_range_to_resource() 241 start = range->cpu_addr; in of_pci_range_to_resource() 826 u64 bus_addr, cpu_addr, size; in of_pci_range_parser_one() local 834 cpu_addr = of_translate_address(parser->node, in of_pci_range_parser_one() 841 cpu_addr != range->cpu_addr + range->size) in of_pci_range_parser_one() 925 if (range.cpu_addr == OF_BAD_ADDR) { in of_dma_get_range() 948 range.bus_addr, range.cpu_addr, range.size); in of_dma_get_range() 949 if (range.cpu_addr == OF_BAD_ADDR) in of_dma_get_range() 951 r->cpu_start = range.cpu_addr; in of_dma_get_range() 986 if (range.cpu_addr + range.size > cpu_end) in of_dma_get_max_cpu_address() [all …]
|
| /drivers/dma/ |
| A D | st_fdma.h | 185 readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 190 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 197 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 207 readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 212 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
|
| /drivers/accel/ivpu/ |
| A D | ivpu_gem.h | 88 static inline u32 cpu_to_vpu_addr(struct ivpu_bo *bo, void *cpu_addr) in cpu_to_vpu_addr() argument 90 if (cpu_addr < ivpu_bo_vaddr(bo)) in cpu_to_vpu_addr() 93 if (cpu_addr >= (ivpu_bo_vaddr(bo) + ivpu_bo_size(bo))) in cpu_to_vpu_addr() 96 return bo->vpu_addr + (cpu_addr - ivpu_bo_vaddr(bo)); in cpu_to_vpu_addr()
|
| /drivers/iommu/ |
| A D | dma-iommu.c | 1574 if (is_vmalloc_addr(cpu_addr)) { in __iommu_dma_free() 1585 page = virt_to_page(cpu_addr); in __iommu_dma_free() 1608 void *cpu_addr; in iommu_dma_alloc_pages() local 1621 if (!cpu_addr) in iommu_dma_alloc_pages() 1627 cpu_addr = page_address(page); in iommu_dma_alloc_pages() 1631 memset(cpu_addr, 0, alloc_size); in iommu_dma_alloc_pages() 1632 return cpu_addr; in iommu_dma_alloc_pages() 1644 void *cpu_addr; in iommu_dma_alloc() local 1659 if (!cpu_addr) in iommu_dma_alloc() 1669 return cpu_addr; in iommu_dma_alloc() [all …]
|
| /drivers/pci/controller/dwc/ |
| A D | pcie-tegra194-acpi.c | 49 int index, int type, u64 cpu_addr, in program_outbound_atu() argument 52 atu_reg_write(pcie_ecam, index, lower_32_bits(cpu_addr), in program_outbound_atu() 54 atu_reg_write(pcie_ecam, index, upper_32_bits(cpu_addr), in program_outbound_atu() 58 atu_reg_write(pcie_ecam, index, lower_32_bits(cpu_addr + size - 1), in program_outbound_atu()
|
| /drivers/pci/controller/ |
| A D | pci-xgene.c | 363 u64 cpu_addr, u64 pci_addr) in xgene_pcie_setup_ob_reg() argument 385 xgene_pcie_writel(port, offset, lower_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 386 xgene_pcie_writel(port, offset + 0x04, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 484 u64 cpu_addr = range->cpu_addr; in xgene_pcie_setup_ib_reg() local 501 bar_low = pcie_bar_low_val((u32)cpu_addr, flags); in xgene_pcie_setup_ib_reg() 507 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); in xgene_pcie_setup_ib_reg() 517 xgene_pcie_writel(port, IBAR3L + 0x4, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ib_reg() 542 u64 end = range.cpu_addr + range.size - 1; in xgene_pcie_parse_map_dma_ranges() 545 range.flags, range.cpu_addr, end, range.pci_addr); in xgene_pcie_parse_map_dma_ranges()
|
| A D | pcie-rcar.c | 102 void rcar_pcie_set_inbound(struct rcar_pcie *pcie, u64 cpu_addr, in rcar_pcie_set_inbound() argument 112 rcar_pci_write_reg(pcie, lower_32_bits(cpu_addr), PCIELAR(idx)); in rcar_pcie_set_inbound() 118 rcar_pci_write_reg(pcie, upper_32_bits(cpu_addr), PCIELAR(idx + 1)); in rcar_pcie_set_inbound()
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_fence.c | 72 if (drv->cpu_addr) in radeon_fence_write() 73 *drv->cpu_addr = cpu_to_le32(seq); in radeon_fence_write() 94 if (drv->cpu_addr) in radeon_fence_read() 95 seq = le32_to_cpu(*drv->cpu_addr); in radeon_fence_read() 776 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 783 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring() 796 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 821 rdev->fence_drv[ring].cpu_addr = NULL; in radeon_fence_driver_init_ring()
|
| /drivers/net/wireless/ath/wcn36xx/ |
| A D | dxe.c | 178 wcn_ch->cpu_addr = dma_alloc_coherent(dev, size, &wcn_ch->dma_addr, in wcn36xx_dxe_init_descs() 180 if (!wcn_ch->cpu_addr) in wcn36xx_dxe_init_descs() 183 cur_dxe = wcn_ch->cpu_addr; in wcn36xx_dxe_init_descs() 233 dma_free_coherent(dev, size,wcn_ch->cpu_addr, wcn_ch->dma_addr); in wcn36xx_dxe_deinit_descs() 689 void *cpu_addr; in wcn36xx_dxe_allocate_mem_pools() local 698 cpu_addr = dma_alloc_coherent(wcn->dev, s, in wcn36xx_dxe_allocate_mem_pools() 701 if (!cpu_addr) in wcn36xx_dxe_allocate_mem_pools() 704 wcn->mgmt_mem_pool.virt_addr = cpu_addr; in wcn36xx_dxe_allocate_mem_pools() 713 cpu_addr = dma_alloc_coherent(wcn->dev, s, in wcn36xx_dxe_allocate_mem_pools() 716 if (!cpu_addr) in wcn36xx_dxe_allocate_mem_pools() [all …]
|
| /drivers/net/ethernet/toshiba/ |
| A D | ps3_gelic_net.c | 164 card->rx_chain.head->link.cpu_addr, 0); in gelic_card_enable_rxdmac() 229 = cpu_to_be32(descr->next->link.cpu_addr); in gelic_card_reset_chain() 291 for (descr = descr_in; descr && descr->link.cpu_addr; in gelic_card_free_chain() 295 descr->link.cpu_addr = 0; in gelic_card_free_chain() 333 descr->link.cpu_addr, descr->link.size, in gelic_card_init_chain() 350 cpu_to_be32(descr->next->link.cpu_addr); in gelic_card_init_chain() 381 dma_addr_t cpu_addr; in gelic_descr_prepare_rx() local 408 if (dma_mapping_error(ctodev(card), cpu_addr)) { in gelic_descr_prepare_rx() 830 descr->link.cpu_addr, 0); in gelic_card_kick_txdma() 884 cpu_to_be32(descr->link.cpu_addr); in gelic_net_xmit() [all …]
|
| /drivers/net/ethernet/broadcom/ |
| A D | bcm4908_enet.c | 71 void *cpu_addr; member 175 ring->cpu_addr = dma_alloc_coherent(dev, size, &ring->dma_addr, GFP_KERNEL); in bcm4908_dma_alloc_buf_descs() 176 if (!ring->cpu_addr) in bcm4908_dma_alloc_buf_descs() 179 if (((uintptr_t)ring->cpu_addr) & (0x40 - 1)) { in bcm4908_dma_alloc_buf_descs() 191 dma_free_coherent(dev, size, ring->cpu_addr, ring->dma_addr); in bcm4908_dma_alloc_buf_descs() 192 ring->cpu_addr = NULL; in bcm4908_dma_alloc_buf_descs() 204 if (rx_ring->cpu_addr) in bcm4908_enet_dma_free() 205 dma_free_coherent(dev, size, rx_ring->cpu_addr, rx_ring->dma_addr); in bcm4908_enet_dma_free() 209 if (tx_ring->cpu_addr) in bcm4908_enet_dma_free() 210 dma_free_coherent(dev, size, tx_ring->cpu_addr, tx_ring->dma_addr); in bcm4908_enet_dma_free()
|
| /drivers/rpmsg/ |
| A D | virtio_rpmsg_bus.c | 170 rpmsg_sg_init(struct scatterlist *sg, void *cpu_addr, unsigned int len) in rpmsg_sg_init() argument 172 if (is_vmalloc_addr(cpu_addr)) { in rpmsg_sg_init() 174 sg_set_page(sg, vmalloc_to_page(cpu_addr), len, in rpmsg_sg_init() 175 offset_in_page(cpu_addr)); in rpmsg_sg_init() 177 WARN_ON(!virt_addr_valid(cpu_addr)); in rpmsg_sg_init() 178 sg_init_one(sg, cpu_addr, len); in rpmsg_sg_init() 916 void *cpu_addr = vrp->rbufs + i * vrp->buf_size; in rpmsg_probe() local 918 rpmsg_sg_init(&sg, cpu_addr, vrp->buf_size); in rpmsg_probe() 920 err = virtqueue_add_inbuf(vrp->rvq, &sg, 1, cpu_addr, in rpmsg_probe()
|