Home
last modified time | relevance | path

Searched refs:dma_address (Results 1 – 25 of 63) sorted by relevance

123

/drivers/gpu/drm/ttm/
A Dttm_tt.c127 sizeof(*ttm->dma_address), GFP_KERNEL); in ttm_dma_tt_alloc_page_directory()
131 ttm->dma_address = (void *)(ttm->pages + ttm->num_pages); in ttm_dma_tt_alloc_page_directory()
137 ttm->dma_address = kvcalloc(ttm->num_pages, sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory()
139 if (!ttm->dma_address) in ttm_sg_tt_alloc_page_directory()
159 ttm->dma_address = NULL; in ttm_tt_init_fields()
199 kvfree(ttm->dma_address); in ttm_tt_fini()
201 ttm->dma_address = NULL; in ttm_tt_fini()
A Dttm_pool.c672 dma_addr_t *dma_addr = tt->dma_address ? in ttm_pool_free_range()
673 tt->dma_address + i : NULL; in ttm_pool_free_range()
685 alloc->dma_addr = tt->dma_address; in ttm_pool_alloc_state_init()
914 dma_addr_t *dma_addr = tt->dma_address ? &restore->first_dma : NULL; in ttm_pool_drop_backed_up()
989 if (tt->dma_address || flags->purge) { in ttm_pool_backup()
1001 if (tt->dma_address) in ttm_pool_backup()
1002 ttm_pool_unmap(pool, tt->dma_address[i], in ttm_pool_backup()
/drivers/xen/
A Dswiotlb-xen.c340 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg()
355 sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg), in xen_swiotlb_map_sg()
357 if (sg->dma_address == DMA_MAPPING_ERROR) in xen_swiotlb_map_sg()
377 xen_swiotlb_sync_single_for_cpu(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_cpu()
390 xen_swiotlb_sync_single_for_device(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_device()
A Dgrant-dma-ops.c245 xen_grant_dma_unmap_page(dev, s->dma_address, sg_dma_len(s), dir, in xen_grant_dma_unmap_sg()
260 s->dma_address = xen_grant_dma_map_page(dev, sg_page(s), s->offset, in xen_grant_dma_map_sg()
262 if (s->dma_address == DMA_MAPPING_ERROR) in xen_grant_dma_map_sg()
/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_ttm_buffer.c112 viter->dma_address = &__vmw_piter_dma_addr; in vmw_piter_start()
118 viter->dma_address = &__vmw_piter_sg_addr; in vmw_piter_start()
185 vsgt->addrs = vmw_tt->dma_ttm.dma_address; in vmw_ttm_map_dma()
372 ttm->dma_address, in vmw_ttm_populate()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ttm.c253 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
256 dma_addr_t dma_address; in amdgpu_ttm_map_buffer() local
258 dma_address = mm_cur->start; in amdgpu_ttm_map_buffer()
259 dma_address += adev->vm_manager.vram_base_offset; in amdgpu_ttm_map_buffer()
262 amdgpu_gart_map(adev, i << PAGE_SHIFT, 1, &dma_address, in amdgpu_ttm_map_buffer()
264 dma_address += PAGE_SIZE; in amdgpu_ttm_map_buffer()
823 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr()
881 1, &gtt->ttm.dma_address[page_idx], flags); in amdgpu_ttm_gart_bind_gfx9_mqd()
889 &gtt->ttm.dma_address[page_idx + 1], in amdgpu_ttm_gart_bind_gfx9_mqd()
909 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind()
[all …]
A Damdgpu_vram_mgr.c737 dma_unmap_resource(dev, sg->dma_address, in amdgpu_vram_mgr_alloc_sgt()
765 dma_unmap_resource(dev, sg->dma_address, in amdgpu_vram_mgr_free_sgt()
A Damdgpu_gmc.c118 *addr = bo->tbo.ttm->dma_address[0]; in amdgpu_gmc_get_pde_for_bo()
197 if (bo->ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr()
200 return adev->gmc.agp_start + bo->ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
/drivers/gpu/drm/ttm/tests/
A Dttm_tt_test.c63 KUNIT_ASSERT_NULL(test, tt->dma_address); in ttm_tt_init_basic()
124 KUNIT_ASSERT_NOT_NULL(test, tt->dma_address); in ttm_tt_fini_sg()
127 KUNIT_ASSERT_NULL(test, tt->dma_address); in ttm_tt_fini_sg()
A Dttm_pool_test.c227 dma1 = tt->dma_address[0]; in ttm_pool_alloc_basic_dma_addr()
228 dma2 = tt->dma_address[tt->num_pages - 1]; in ttm_pool_alloc_basic_dma_addr()
/drivers/hid/amd-sfh-hid/
A Damd_sfh_client.c171 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_resume()
252 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_hid_client_init()
293 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_hid_client_init()
A Damd_sfh_common.h40 dma_addr_t dma_address; member
A Damd_sfh_pcie.c71 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG1); in amd_start_sensor_v2()
162 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG2); in amd_start_sensor()
/drivers/mmc/host/
A Dwmt-sdmmc.c572 u32 dma_address; in wmt_mci_request() local
632 dma_address = priv->dma_desc_device_addr + 16; in wmt_mci_request()
640 dma_address, 0); in wmt_mci_request()
644 dma_address += 16; in wmt_mci_request()
/drivers/dma/
A Dimx-dma.c271 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
274 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
817 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg()
821 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg()
878 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
/drivers/atm/
A Dnicstar.h309 u32 dma_address; member
346 u32 dma_address; member
/drivers/scsi/lpfc/
A Dlpfc_hw4.h71 struct dma_address { struct
1233 struct dma_address page[LPFC_MAX_EQ_PAGE];
1306 struct dma_address lwpd;
1360 struct dma_address page[LPFC_MAX_CQ_PAGE];
1473 struct dma_address page[1];
1523 struct dma_address page[LPFC_MAX_WQ_PAGE_V0];
1557 struct dma_address page[LPFC_MAX_WQ_PAGE-1];
1681 struct dma_address page[LPFC_MAX_RQ_PAGE];
1733 struct dma_address page[1];
1799 struct dma_address page[LPFC_MAX_MQ_PAGE];
[all …]
/drivers/net/fddi/skfp/
A Dskfddi.c1114 dma_addr_t dma_address; in send_queued_packets() local
1179 dma_address = dma_map_single(&(&bp->pdev)->dev, skb->data, in send_queued_packets()
1183 txd->txd_os.dma_addr = dma_address; // save dma mapping in send_queued_packets()
1185 hwm_tx_frag(smc, skb->data, dma_address, skb->len, in send_queued_packets()
1189 dma_unmap_single(&(&bp->pdev)->dev, dma_address, in send_queued_packets()
/drivers/gpu/drm/nouveau/
A Dnouveau_bo.c698 if (!ttm_dma || !ttm_dma->dma_address) in nouveau_bo_sync_for_device()
721 ttm_dma->dma_address[i], in nouveau_bo_sync_for_device()
734 if (!ttm_dma || !ttm_dma->dma_address) in nouveau_bo_sync_for_cpu()
757 dma_sync_single_for_cpu(drm->dev->dev, ttm_dma->dma_address[i], in nouveau_bo_sync_for_cpu()
1430 drm_prime_sg_to_dma_addr_array(ttm->sg, ttm_dma->dma_address, in nouveau_ttm_tt_populate()
A Dnouveau_mem.c114 args.dma = tt->dma_address; in nouveau_mem_host()
/drivers/gpu/drm/radeon/
A Dradeon_ttm.c371 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_pin_userptr()
446 ttm->pages, gtt->ttm.dma_address, flags); in radeon_ttm_backend_bind()
545 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_populate()
/drivers/gpu/drm/xe/
A Dxe_ttm_vram_mgr.c419 dma_unmap_resource(dev, sg->dma_address, in xe_ttm_vram_mgr_alloc_sgt()
437 dma_unmap_resource(dev, sg->dma_address, in xe_ttm_vram_mgr_free_sgt()
/drivers/usb/serial/
A Dio_ti.c91 u16 dma_address; member
545 status = read_ram(port->port->serial->dev, port->dma_address, in tx_active()
2566 edge_port->dma_address = UMPD_OEDB1_ADDRESS; in edge_port_probe()
2570 edge_port->dma_address = UMPD_OEDB2_ADDRESS; in edge_port_probe()
2581 edge_port->dma_address); in edge_port_probe()
/drivers/net/ethernet/xilinx/
A Dxilinx_axienet.h465 dma_addr_t dma_address; member
/drivers/crypto/
A Dsahara.c494 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
510 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
786 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()

Completed in 107 milliseconds

123