Lines Matching refs:buf
40 struct vb2_vmalloc_buf *buf; in vb2_vmalloc_alloc() local
42 buf = kzalloc(sizeof(*buf), GFP_KERNEL | vb->vb2_queue->gfp_flags); in vb2_vmalloc_alloc()
43 if (!buf) in vb2_vmalloc_alloc()
46 buf->size = size; in vb2_vmalloc_alloc()
47 buf->vaddr = vmalloc_user(buf->size); in vb2_vmalloc_alloc()
48 if (!buf->vaddr) { in vb2_vmalloc_alloc()
49 pr_debug("vmalloc of size %ld failed\n", buf->size); in vb2_vmalloc_alloc()
50 kfree(buf); in vb2_vmalloc_alloc()
54 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_vmalloc_alloc()
55 buf->handler.refcount = &buf->refcount; in vb2_vmalloc_alloc()
56 buf->handler.put = vb2_vmalloc_put; in vb2_vmalloc_alloc()
57 buf->handler.arg = buf; in vb2_vmalloc_alloc()
59 refcount_set(&buf->refcount, 1); in vb2_vmalloc_alloc()
60 return buf; in vb2_vmalloc_alloc()
65 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_put() local
67 if (refcount_dec_and_test(&buf->refcount)) { in vb2_vmalloc_put()
68 vfree(buf->vaddr); in vb2_vmalloc_put()
69 kfree(buf); in vb2_vmalloc_put()
76 struct vb2_vmalloc_buf *buf; in vb2_vmalloc_get_userptr() local
81 buf = kzalloc(sizeof(*buf), GFP_KERNEL); in vb2_vmalloc_get_userptr()
82 if (!buf) in vb2_vmalloc_get_userptr()
85 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_vmalloc_get_userptr()
87 buf->size = size; in vb2_vmalloc_get_userptr()
89 buf->dma_dir == DMA_FROM_DEVICE || in vb2_vmalloc_get_userptr()
90 buf->dma_dir == DMA_BIDIRECTIONAL); in vb2_vmalloc_get_userptr()
95 buf->vec = vec; in vb2_vmalloc_get_userptr()
107 buf->vaddr = (__force void *) in vb2_vmalloc_get_userptr()
110 buf->vaddr = vm_map_ram(frame_vector_pages(vec), n_pages, -1); in vb2_vmalloc_get_userptr()
113 if (!buf->vaddr) in vb2_vmalloc_get_userptr()
115 buf->vaddr += offset; in vb2_vmalloc_get_userptr()
116 return buf; in vb2_vmalloc_get_userptr()
121 kfree(buf); in vb2_vmalloc_get_userptr()
128 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_put_userptr() local
129 unsigned long vaddr = (unsigned long)buf->vaddr & PAGE_MASK; in vb2_vmalloc_put_userptr()
134 if (!buf->vec->is_pfns) { in vb2_vmalloc_put_userptr()
135 n_pages = frame_vector_count(buf->vec); in vb2_vmalloc_put_userptr()
138 if (buf->dma_dir == DMA_FROM_DEVICE || in vb2_vmalloc_put_userptr()
139 buf->dma_dir == DMA_BIDIRECTIONAL) { in vb2_vmalloc_put_userptr()
140 pages = frame_vector_pages(buf->vec); in vb2_vmalloc_put_userptr()
146 iounmap((__force void __iomem *)buf->vaddr); in vb2_vmalloc_put_userptr()
148 vb2_destroy_framevec(buf->vec); in vb2_vmalloc_put_userptr()
149 kfree(buf); in vb2_vmalloc_put_userptr()
154 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_vaddr() local
156 if (!buf->vaddr) { in vb2_vmalloc_vaddr()
161 return buf->vaddr; in vb2_vmalloc_vaddr()
166 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_num_users() local
167 return refcount_read(&buf->refcount); in vb2_vmalloc_num_users()
172 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_mmap() local
175 if (!buf) { in vb2_vmalloc_mmap()
180 ret = remap_vmalloc_range(vma, buf->vaddr, 0); in vb2_vmalloc_mmap()
194 vma->vm_private_data = &buf->handler; in vb2_vmalloc_mmap()
216 struct vb2_vmalloc_buf *buf = dbuf->priv; in vb2_vmalloc_dmabuf_ops_attach() local
217 int num_pages = PAGE_ALIGN(buf->size) / PAGE_SIZE; in vb2_vmalloc_dmabuf_ops_attach()
220 void *vaddr = buf->vaddr; in vb2_vmalloc_dmabuf_ops_attach()
313 struct vb2_vmalloc_buf *buf = dbuf->priv; in vb2_vmalloc_dmabuf_ops_vmap() local
315 iosys_map_set_vaddr(map, buf->vaddr); in vb2_vmalloc_dmabuf_ops_vmap()
340 struct vb2_vmalloc_buf *buf = buf_priv; in vb2_vmalloc_get_dmabuf() local
345 exp_info.size = buf->size; in vb2_vmalloc_get_dmabuf()
347 exp_info.priv = buf; in vb2_vmalloc_get_dmabuf()
349 if (WARN_ON(!buf->vaddr)) in vb2_vmalloc_get_dmabuf()
357 refcount_inc(&buf->refcount); in vb2_vmalloc_get_dmabuf()
370 struct vb2_vmalloc_buf *buf = mem_priv; in vb2_vmalloc_map_dmabuf() local
374 ret = dma_buf_vmap_unlocked(buf->dbuf, &map); in vb2_vmalloc_map_dmabuf()
377 buf->vaddr = map.vaddr; in vb2_vmalloc_map_dmabuf()
384 struct vb2_vmalloc_buf *buf = mem_priv; in vb2_vmalloc_unmap_dmabuf() local
385 struct iosys_map map = IOSYS_MAP_INIT_VADDR(buf->vaddr); in vb2_vmalloc_unmap_dmabuf()
387 dma_buf_vunmap_unlocked(buf->dbuf, &map); in vb2_vmalloc_unmap_dmabuf()
388 buf->vaddr = NULL; in vb2_vmalloc_unmap_dmabuf()
393 struct vb2_vmalloc_buf *buf = mem_priv; in vb2_vmalloc_detach_dmabuf() local
394 struct iosys_map map = IOSYS_MAP_INIT_VADDR(buf->vaddr); in vb2_vmalloc_detach_dmabuf()
396 if (buf->vaddr) in vb2_vmalloc_detach_dmabuf()
397 dma_buf_vunmap_unlocked(buf->dbuf, &map); in vb2_vmalloc_detach_dmabuf()
399 kfree(buf); in vb2_vmalloc_detach_dmabuf()
407 struct vb2_vmalloc_buf *buf; in vb2_vmalloc_attach_dmabuf() local
412 buf = kzalloc(sizeof(*buf), GFP_KERNEL); in vb2_vmalloc_attach_dmabuf()
413 if (!buf) in vb2_vmalloc_attach_dmabuf()
416 buf->dbuf = dbuf; in vb2_vmalloc_attach_dmabuf()
417 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_vmalloc_attach_dmabuf()
418 buf->size = size; in vb2_vmalloc_attach_dmabuf()
420 return buf; in vb2_vmalloc_attach_dmabuf()