Lines Matching refs:dma_coherent

672 	void *dma_coherent;  in arcmsr_alloc_io_queue()  local
679 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
681 if (!dma_coherent) { in arcmsr_alloc_io_queue()
686 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
687 acb->pmuB = (struct MessageUnit_B *)dma_coherent; in arcmsr_alloc_io_queue()
693 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
695 if (!dma_coherent) { in arcmsr_alloc_io_queue()
700 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
701 acb->pmuD = (struct MessageUnit_D *)dma_coherent; in arcmsr_alloc_io_queue()
709 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
711 if (!dma_coherent){ in arcmsr_alloc_io_queue()
716 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
717 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
730 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
732 if (!dma_coherent) { in arcmsr_alloc_io_queue()
737 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
738 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
753 void *dma_coherent; in arcmsr_alloc_ccb_pool() local
780 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->uncache_size, &dma_coherent_handle, GFP_KERNEL); in arcmsr_alloc_ccb_pool()
781 if(!dma_coherent){ in arcmsr_alloc_ccb_pool()
785 acb->dma_coherent = dma_coherent; in arcmsr_alloc_ccb_pool()
787 memset(dma_coherent, 0, acb->uncache_size); in arcmsr_alloc_ccb_pool()
789 ccb_tmp = dma_coherent; in arcmsr_alloc_ccb_pool()
791 acb->vir2phy_offset = (unsigned long)dma_coherent - (unsigned long)dma_coherent_handle; in arcmsr_alloc_ccb_pool()
2030 dma_free_coherent(&acb->pdev->dev, acb->uncache_size, acb->dma_coherent, acb->dma_coherent_handle); in arcmsr_free_ccb_pool()