Lines Matching refs:node

176 	struct gk20a_instobj *node = gk20a_instobj(memory);  in gk20a_instobj_acquire_dma()  local
177 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_acquire_dma()
182 return node->vaddr; in gk20a_instobj_acquire_dma()
188 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_acquire_iommu() local
189 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_acquire_iommu()
197 if (node->base.vaddr) { in gk20a_instobj_acquire_iommu()
198 if (!node->use_cpt) { in gk20a_instobj_acquire_iommu()
200 list_del(&node->vaddr_node); in gk20a_instobj_acquire_iommu()
209 node->base.vaddr = vmap(node->pages, size >> PAGE_SHIFT, VM_MAP, in gk20a_instobj_acquire_iommu()
211 if (!node->base.vaddr) { in gk20a_instobj_acquire_iommu()
222 node->use_cpt++; in gk20a_instobj_acquire_iommu()
225 return node->base.vaddr; in gk20a_instobj_acquire_iommu()
231 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_release_dma() local
232 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_release_dma()
243 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_release_iommu() local
244 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_release_iommu()
250 if (WARN_ON(node->use_cpt == 0)) in gk20a_instobj_release_iommu()
254 if (--node->use_cpt == 0) in gk20a_instobj_release_iommu()
255 list_add_tail(&node->vaddr_node, &imem->vaddr_lru); in gk20a_instobj_release_iommu()
267 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_rd32() local
269 return node->vaddr[offset / 4]; in gk20a_instobj_rd32()
275 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_wr32() local
277 node->vaddr[offset / 4] = data; in gk20a_instobj_wr32()
284 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_map() local
286 .memory = &node->base.memory, in gk20a_instobj_map()
288 .mem = node->mn, in gk20a_instobj_map()
297 struct gk20a_instobj_dma *node = gk20a_instobj_dma(memory); in gk20a_instobj_dtor_dma() local
298 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_dma()
301 if (unlikely(!node->base.vaddr)) in gk20a_instobj_dtor_dma()
304 dma_free_attrs(dev, (u64)node->base.mn->length << PAGE_SHIFT, in gk20a_instobj_dtor_dma()
305 node->base.vaddr, node->handle, imem->attrs); in gk20a_instobj_dtor_dma()
308 return node; in gk20a_instobj_dtor_dma()
314 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_dtor_iommu() local
315 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_iommu()
317 struct nvkm_mm_node *r = node->base.mn; in gk20a_instobj_dtor_iommu()
326 if (node->base.vaddr) in gk20a_instobj_dtor_iommu()
327 gk20a_instobj_iommu_recycle_vaddr(node); in gk20a_instobj_dtor_iommu()
335 for (i = 0; i < node->base.mn->length; i++) { in gk20a_instobj_dtor_iommu()
338 dma_unmap_page(dev, node->dma_addrs[i], PAGE_SIZE, in gk20a_instobj_dtor_iommu()
340 __free_page(node->pages[i]); in gk20a_instobj_dtor_iommu()
349 return node; in gk20a_instobj_dtor_iommu()
386 struct gk20a_instobj_dma *node; in gk20a_instobj_ctor_dma() local
390 if (!(node = kzalloc(sizeof(*node), GFP_KERNEL))) in gk20a_instobj_ctor_dma()
392 *_node = &node->base; in gk20a_instobj_ctor_dma()
394 nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.base.memory); in gk20a_instobj_ctor_dma()
395 node->base.base.memory.ptrs = &gk20a_instobj_ptrs; in gk20a_instobj_ctor_dma()
397 node->base.vaddr = dma_alloc_attrs(dev, npages << PAGE_SHIFT, in gk20a_instobj_ctor_dma()
398 &node->handle, GFP_KERNEL, in gk20a_instobj_ctor_dma()
400 if (!node->base.vaddr) { in gk20a_instobj_ctor_dma()
406 if (unlikely(node->handle & (align - 1))) in gk20a_instobj_ctor_dma()
409 &node->handle, align); in gk20a_instobj_ctor_dma()
412 node->r.type = 12; in gk20a_instobj_ctor_dma()
413 node->r.offset = node->handle >> 12; in gk20a_instobj_ctor_dma()
414 node->r.length = (npages << PAGE_SHIFT) >> 12; in gk20a_instobj_ctor_dma()
416 node->base.mn = &node->r; in gk20a_instobj_ctor_dma()
424 struct gk20a_instobj_iommu *node; in gk20a_instobj_ctor_iommu() local
435 if (!(node = kzalloc(sizeof(*node) + ((sizeof(node->pages[0]) + in gk20a_instobj_ctor_iommu()
436 sizeof(*node->dma_addrs)) * npages), GFP_KERNEL))) in gk20a_instobj_ctor_iommu()
438 *_node = &node->base; in gk20a_instobj_ctor_iommu()
439 node->dma_addrs = (void *)(node->pages + npages); in gk20a_instobj_ctor_iommu()
441 nvkm_memory_ctor(&gk20a_instobj_func_iommu, &node->base.base.memory); in gk20a_instobj_ctor_iommu()
442 node->base.base.memory.ptrs = &gk20a_instobj_ptrs; in gk20a_instobj_ctor_iommu()
453 node->pages[i] = p; in gk20a_instobj_ctor_iommu()
460 node->dma_addrs[i] = dma_adr; in gk20a_instobj_ctor_iommu()
477 ret = iommu_map(imem->domain, offset, node->dma_addrs[i], in gk20a_instobj_ctor_iommu()
494 node->base.mn = r; in gk20a_instobj_ctor_iommu()
503 for (i = 0; i < npages && node->pages[i] != NULL; i++) { in gk20a_instobj_ctor_iommu()
504 dma_addr_t dma_addr = node->dma_addrs[i]; in gk20a_instobj_ctor_iommu()
508 __free_page(node->pages[i]); in gk20a_instobj_ctor_iommu()
520 struct gk20a_instobj *node = NULL; in gk20a_instobj_new() local
532 align, &node); in gk20a_instobj_new()
535 align, &node); in gk20a_instobj_new()
536 *pmemory = node ? &node->base.memory : NULL; in gk20a_instobj_new()
540 node->imem = imem; in gk20a_instobj_new()
543 size, align, (u64)node->mn->offset << 12); in gk20a_instobj_new()