Home
last modified time | relevance | path

Searched refs:pt (Results 1 – 25 of 136) sorted by relevance

123456

/drivers/dma/amd/ptdma/
A Dptdma-pci.c38 struct pt_device *pt; in pt_alloc_struct() local
40 pt = devm_kzalloc(dev, sizeof(*pt), GFP_KERNEL); in pt_alloc_struct()
42 if (!pt) in pt_alloc_struct()
44 pt->dev = dev; in pt_alloc_struct()
48 return pt; in pt_alloc_struct()
118 pt->pt_irq = 0; in pt_free_irqs()
131 if (!pt) in pt_pci_probe()
166 pt->io_regs = iomap_table[pt->dev_vdata->bar]; in pt_pci_probe()
191 if (pt->dev_vdata) in pt_pci_probe()
210 if (!pt) in pt_pci_remove()
[all …]
A Dptdma-dmaengine.c92 struct pt_device *pt = desc->pt; in pt_do_cleanup() local
171 pt = pt_cmd->pt; in pt_dma_start_desc()
204 pt = chan->pt; in pt_handle_active_desc()
276 pt = chan->pt; in pt_cmd_callback()
328 desc->pt = chan->pt; in pt_alloc_dma_desc()
388 struct pt_device *pt = chan->pt; in pt_create_desc() local
399 pt_cmd->pt = pt; in pt_create_desc()
456 pt = chan->pt; in pt_issue_pending()
489 struct pt_device *pt = chan->pt; in pt_tx_status() local
628 chan->pt = pt; in pt_dmaengine_register()
[all …]
A Dptdma-dev.c166 struct pt_device *pt = data; in pt_core_irq_handler() local
170 pt->total_interrupts++; in pt_core_irq_handler()
181 struct device *dev = pt->dev; in pt_core_init()
201 cmd_q->pt = pt; in pt_core_init()
234 ret = request_irq(pt->pt_irq, pt_core_irq_handler, 0, dev_name(pt->dev), pt); in pt_core_init()
261 ptdma_debugfs_setup(pt); in pt_core_init()
266 free_irq(pt->pt_irq, pt); in pt_core_init()
279 struct device *dev = pt->dev; in pt_core_destroy()
284 pt_dmaengine_unregister(pt); in pt_core_destroy()
297 free_irq(pt->pt_irq, pt); in pt_core_destroy()
[all …]
A Dptdma-debugfs.c26 struct pt_device *pt = s->private; in pt_debugfs_info_show() local
32 if (pt->ver == AE4_DMA_VERSION) { in pt_debugfs_info_show()
33 ae4 = container_of(pt, struct ae4_device, pt); in pt_debugfs_info_show()
57 struct pt_device *pt = s->private; in pt_debugfs_stats_show() local
67 struct pt_device *pt; in pt_debugfs_queue_show() local
75 pt = cmd_q->pt; in pt_debugfs_queue_show()
76 if (pt->ver == AE4_DMA_VERSION) { in pt_debugfs_queue_show()
113 debugfs_create_file("info", 0400, pt->dma_dev.dbg_dev_root, pt, in ptdma_debugfs_setup()
116 debugfs_create_file("stats", 0400, pt->dma_dev.dbg_dev_root, pt, in ptdma_debugfs_setup()
121 ae4 = container_of(pt, struct ae4_device, pt); in ptdma_debugfs_setup()
[all …]
A Dptdma.h165 struct pt_device *pt; member
177 struct pt_device *pt; member
186 struct pt_device *pt; member
191 struct pt_device *pt; member
315 int pt_dmaengine_register(struct pt_device *pt);
316 void pt_dmaengine_unregister(struct pt_device *pt);
318 void ptdma_debugfs_setup(struct pt_device *pt);
319 int pt_core_init(struct pt_device *pt);
320 void pt_core_destroy(struct pt_device *pt);
331 iowrite32(0, pt->cmd_q.reg_control + 0x000C); in pt_core_disable_queue_interrupts()
[all …]
/drivers/gpu/drm/i915/gt/
A Dintel_ppgtt.c20 pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); in alloc_pt()
26 kfree(pt); in alloc_pt()
32 return pt; in alloc_pt()
77 container_of(pt, typeof(*pd), pt); in free_px()
252 pt->stash = stash->pt[0]; in i915_vm_alloc_pt_stash()
253 stash->pt[0] = pt; in i915_vm_alloc_pt_stash()
268 pd->pt.stash = stash->pt[1]; in i915_vm_alloc_pt_stash()
269 stash->pt[1] = &pd->pt; in i915_vm_alloc_pt_stash()
283 for (pt = stash->pt[n]; pt; pt = pt->stash) { in i915_vm_map_pt_stash()
300 while ((pt = stash->pt[n])) { in i915_vm_free_pt_stash()
[all …]
A Dgen6_ppgtt.c23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
103 vaddr = px_vaddr(pt); in gen6_ppgtt_clear_range()
153 struct i915_page_table *pt; in gen6_flush_pd() local
187 if (!pt) { in gen6_alloc_va_range()
190 pt = stash->pt[0]; in gen6_alloc_va_range()
197 stash->pt[0] = pt->stash; in gen6_alloc_va_range()
199 pd->entry[pde] = pt; in gen6_alloc_va_range()
201 pt = pd->entry[pde]; in gen6_alloc_va_range()
263 if (pt) in gen6_ppgtt_free_pd()
309 if (!pt || atomic_read(&pt->used)) in pd_vma_unbind()
[all …]
A Dgen8_ppgtt.c284 if (pt->is_compact) { in __gen8_ppgtt_clear()
291 vaddr = px_vaddr(pt); in __gen8_ppgtt_clear()
301 free_px(vm, pt, lvl); in __gen8_ppgtt_clear()
342 if (!pt) { in __gen8_ppgtt_alloc()
348 pt = stash->pt[!!lvl]; in __gen8_ppgtt_alloc()
355 stash->pt[!!lvl] = pt->stash; in __gen8_ppgtt_alloc()
359 pt = pd->entry[idx]; in __gen8_ppgtt_alloc()
429 fn(vm, pt, data); in __gen8_ppgtt_foreach()
779 vaddr = px_vaddr(pt); in gen8_ppgtt_insert_entry()
809 vaddr = px_vaddr(pt); in xehp_ppgtt_insert_entry_lm()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
A Dbase.c44 const int slot = pt->base >> pt->ptp->shift; in nvkm_mmu_ptp_put()
61 kfree(pt); in nvkm_mmu_ptp_put()
71 if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptp_get()
106 pt->memory = pt->ptp->pt->memory; in nvkm_mmu_ptp_get()
108 pt->addr = pt->ptp->pt->addr + pt->base; in nvkm_mmu_ptp_get()
109 return pt; in nvkm_mmu_ptp_get()
144 if (pt) { in nvkm_mmu_ptc_put()
156 list_add_tail(&pt->head, &pt->ptc->item); in nvkm_mmu_ptc_put()
191 if (pt) { in nvkm_mmu_ptc_get()
202 if (!(pt = kmalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptc_get()
[all …]
A Dvmmnv44.c33 tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); in nv44_vmm_pgt_fill()
34 tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); in nv44_vmm_pgt_fill()
35 tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); in nv44_vmm_pgt_fill()
36 tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); in nv44_vmm_pgt_fill()
66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
116 nvkm_kmap(pt->memory); in nv44_vmm_pgt_dma()
140 nvkm_done(pt->memory); in nv44_vmm_pgt_dma()
150 nvkm_kmap(pt->memory); in nv44_vmm_pgt_unmap()
[all …]
A Dvmmgp100.c40 nvkm_kmap(pt->memory); in gp100_vmm_pfn_unmap()
42 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_unmap()
43 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_unmap()
61 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_clear()
62 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_clear()
242 if (pgt->pt[0] && !gp100_vmm_pde(pgt->pt[0], &data[0])) in gp100_vmm_pd0_pde()
244 if (pgt->pt[1] && !gp100_vmm_pde(pgt->pt[1], &data[1])) in gp100_vmm_pd0_pde()
276 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 16 + 0); in gp100_vmm_pd0_pfn_unmap()
277 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 16 + 4); in gp100_vmm_pd0_pfn_unmap()
297 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 16 + 0); in gp100_vmm_pd0_pfn_clear()
[all …]
A Dvmmgh100.c38 nvkm_kmap(pt->memory); in gh100_vmm_pgt_dma()
44 nvkm_done(pt->memory); in gh100_vmm_pgt_dma()
60 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gh100_vmm_pgt_sparse() argument
113 gh100_vmm_pde(struct nvkm_mmu_pt *pt, u64 *data) in gh100_vmm_pde() argument
115 switch (nvkm_memory_target(pt->memory)) { in gh100_vmm_pde()
133 *data |= pt->addr; in gh100_vmm_pde()
141 struct nvkm_mmu_pt *pd = pgd->pt[0]; in gh100_vmm_pd0_pde()
144 if (pgt->pt[0] && !gh100_vmm_pde(pgt->pt[0], &data[0])) in gh100_vmm_pd0_pde()
146 if (pgt->pt[1] && !gh100_vmm_pde(pgt->pt[1], &data[1])) in gh100_vmm_pd0_pde()
182 struct nvkm_mmu_pt *pd = pgd->pt[0]; in gh100_vmm_pd1_pde()
[all …]
A Dvmmgf100.c44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
70 nvkm_kmap(pt->memory); in gf100_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma()
76 nvkm_done(pt->memory); in gf100_vmm_pgt_dma()
109 struct nvkm_mmu_pt *pd = pgd->pt[0]; in gf100_vmm_pgd_pde()
110 struct nvkm_mmu_pt *pt; in gf100_vmm_pgd_pde() local
113 if ((pt = pgt->pt[0])) { in gf100_vmm_pgd_pde()
124 data |= pt->addr >> 8; in gf100_vmm_pgd_pde()
127 if ((pt = pgt->pt[1])) { in gf100_vmm_pgd_pde()
[all …]
A Dvmmnv41.c27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument
32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte()
38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument
49 nvkm_kmap(pt->memory); in nv41_vmm_pgt_dma()
52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma()
54 nvkm_done(pt->memory); in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument
[all …]
A Dvmmnv04.c28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument
33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte()
39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument
50 nvkm_kmap(pt->memory); in nv04_vmm_pgt_dma()
53 nvkm_done(pt->memory); in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
[all …]
/drivers/hv/
A Dmshv_eventfd.c228 synchronize_srcu(&pt->pt_irq_srcu); in mshv_irqfd_resampler_shutdown()
360 mshv_irqfd_update(pt, irqfd); in mshv_irqfd_routing_update()
397 irqfd->irqfd_partn = pt; in mshv_irqfd_assign()
445 rp->rsmplr_partn = pt; in mshv_irqfd_assign()
493 mshv_irqfd_update(pt, irqfd); in mshv_irqfd_assign()
642 __must_hold(&pt->mutex) in ioeventfd_check_collision()
658 __must_hold(&pt->mutex) in mshv_assign_ioeventfd()
750 __must_hold(&pt->mutex) in mshv_deassign_ioeventfd()
779 ioeventfd_release(p, pt->pt_id); in mshv_deassign_ioeventfd()
791 __must_hold(&pt->mutex) in mshv_set_unset_ioeventfd()
[all …]
/drivers/gpu/drm/gma500/
A Dmmu.c227 kfree(pt); in psb_mmu_free_pt()
249 if (pt) in psb_mmu_free_pagedir()
272 if (!pt) in psb_mmu_alloc_pt()
304 return pt; in psb_mmu_alloc_pt()
320 if (!pt) in psb_mmu_pt_alloc_map_lock()
343 pt->v = kmap_atomic(pt->p); in psb_mmu_pt_alloc_map_lock()
344 return pt; in psb_mmu_pt_alloc_map_lock()
356 if (!pt) { in psb_mmu_pt_map_lock()
360 pt->v = kmap_atomic(pt->p); in psb_mmu_pt_map_lock()
361 return pt; in psb_mmu_pt_map_lock()
[all …]
/drivers/gpu/drm/i915/selftests/
A Dscatterlist.c50 pfn = pt->start; in expect_pfn_sg()
51 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg()
72 if (pfn != pt->end) { in expect_pfn_sg()
88 pfn = pt->start; in expect_pfn_sg_page_iter()
89 for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { in expect_pfn_sg_page_iter()
120 pfn = pt->start; in expect_pfn_sgtiter()
239 pfn = pt->start; in alloc_table()
240 sg = pt->st.sgl; in alloc_table()
263 pt->st.nents = n; in alloc_table()
264 pt->end = pfn; in alloc_table()
[all …]
/drivers/dma-buf/
A Dsw_sync.c160 list_del(&pt->link); in timeline_fence_release()
252 struct sync_pt *pt; in sync_pt_create() local
254 pt = kzalloc(sizeof(*pt), GFP_KERNEL); in sync_pt_create()
255 if (!pt) in sync_pt_create()
282 kfree(pt); in sync_pt_create()
283 pt = other; in sync_pt_create()
299 return pt; in sync_pt_create()
348 struct sync_pt *pt; in sw_sync_ioctl_create_fence() local
361 if (!pt) { in sw_sync_ioctl_create_fence()
411 struct sync_pt *pt; in sw_sync_ioctl_get_deadline() local
[all …]
/drivers/gpu/drm/xe/
A Dxe_lmtt.c64 pt = kzalloc(struct_size(pt, entries, num_entries), GFP_KERNEL); in lmtt_pt_alloc()
65 if (!pt) { in lmtt_pt_alloc()
87 pt->bo = bo; in lmtt_pt_alloc()
88 return pt; in lmtt_pt_alloc()
91 kfree(pt); in lmtt_pt_alloc()
99 pt->level, (u64)xe_bo_main_addr(pt->bo, XE_PAGE_SIZE)); in lmtt_pt_free()
102 kfree(pt); in lmtt_pt_free()
312 if (!pt) in lmtt_destroy_pt()
328 if (!pt) in lmtt_drop_pages()
434 pt = pt->entries[lmtt->ops->lmtt_pte_index(addr, pt->level)]; in lmtt_leaf_pt()
[all …]
A Dxe_pt.c51 return container_of(pt, struct xe_pt_dir, pt); in as_xe_pt_dir()
112 pt = (dir) ? &dir->pt : NULL; in xe_pt_create()
114 pt = kzalloc(sizeof(*pt), GFP_KERNEL); in xe_pt_create()
116 if (!pt) in xe_pt_create()
213 if (pt->level > 0 && pt->num_live) { in xe_pt_destroy()
990 struct xe_pt *pt = entries[i].pt; in xe_pt_cancel_bind() local
1048 struct xe_pt *pt = entries[i].pt; in xe_pt_commit() local
1075 struct xe_pt *pt = entries[i].pt; in xe_pt_abort_bind() local
1105 struct xe_pt *pt = entries[i].pt; in xe_pt_commit_prepare_bind() local
1768 struct xe_pt *pt = entry->pt; in xe_pt_abort_unbind() local
[all …]
/drivers/gpu/drm/ttm/
A Dttm_pool.c287 spin_lock(&pt->lock); in ttm_pool_type_give()
289 spin_unlock(&pt->lock); in ttm_pool_type_give()
298 spin_lock(&pt->lock); in ttm_pool_type_take()
313 pt->pool = pool; in ttm_pool_type_init()
314 pt->caching = caching; in ttm_pool_type_init()
315 pt->order = order; in ttm_pool_type_init()
334 ttm_pool_free_page(pt->pool, pt->caching, pt->order, p); in ttm_pool_type_fini()
380 pt = list_first_entry(&shrinker_list, typeof(*pt), shrinker_list); in ttm_pool_shrink()
386 ttm_pool_free_page(pt->pool, pt->caching, pt->order, p); in ttm_pool_shrink()
475 if (pt) in ttm_pool_unmap_and_free()
[all …]
/drivers/media/common/saa7146/
A Dsaa7146_core.c186 if (0 != saa7146_pgtable_build_single(pci, pt, pt->slist, slen)) in saa7146_vmalloc_build_pgtable()
192 dma_unmap_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vmalloc_build_pgtable()
196 kfree(pt->slist); in saa7146_vmalloc_build_pgtable()
197 pt->slist = NULL; in saa7146_vmalloc_build_pgtable()
206 dma_unmap_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vfree_destroy_pgtable()
208 kfree(pt->slist); in saa7146_vfree_destroy_pgtable()
209 pt->slist = NULL; in saa7146_vfree_destroy_pgtable()
217 dma_free_coherent(&pci->dev, pt->size, pt->cpu, pt->dma); in saa7146_pgtable_free()
218 pt->cpu = NULL; in saa7146_pgtable_free()
231 pt->cpu = cpu; in saa7146_pgtable_alloc()
[all …]
/drivers/dma/amd/ae4dma/
A Dae4dma-dev.c53 struct pt_device *pt; in ae4_core_irq_handler() local
57 pt = cmd_q->pt; in ae4_core_irq_handler()
59 pt->total_interrupts++; in ae4_core_irq_handler()
91 struct pt_device *pt = &ae4->pt; in ae4_core_init() local
93 struct device *dev = pt->dev; in ae4_core_init()
97 writel(max_hw_q, pt->io_regs); in ae4_core_init()
105 cmd_q->pt = pt; in ae4_core_init()
107 cmd_q->reg_control = pt->io_regs + ((i + 1) * AE4_Q_SZ); in ae4_core_init()
110 dev_name(pt->dev), ae4cmd_q); in ae4_core_init()
150 ret = pt_dmaengine_register(pt); in ae4_core_init()
[all …]
A Dae4dma-pci.c16 struct pt_device *pt = &ae4->pt; in ae4_get_irqs() local
17 struct device *dev = pt->dev; in ae4_get_irqs()
59 struct pt_device *pt = &ae4->pt; in ae4_free_irqs() local
60 struct device *dev = pt->dev; in ae4_free_irqs()
78 struct pt_device *pt; in ae4_pci_probe() local
99 pt = &ae4->pt; in ae4_pci_probe()
100 pt->dev = dev; in ae4_pci_probe()
101 pt->ver = AE4_DMA_VERSION; in ae4_pci_probe()
103 pt->io_regs = pcim_iomap_table(pdev)[0]; in ae4_pci_probe()
104 if (!pt->io_regs) { in ae4_pci_probe()

Completed in 85 milliseconds

123456