Lines Matching refs:pgt

30 	struct nvkm_vmm_pt *pgt = *ppgt;  in nvkm_vmm_pt_del()  local
31 if (pgt) { in nvkm_vmm_pt_del()
32 kvfree(pgt->pde); in nvkm_vmm_pt_del()
33 kfree(pgt); in nvkm_vmm_pt_del()
44 struct nvkm_vmm_pt *pgt; in nvkm_vmm_pt_new() local
56 if (!(pgt = kzalloc(sizeof(*pgt) + lpte, GFP_KERNEL))) in nvkm_vmm_pt_new()
58 pgt->page = page ? page->shift : 0; in nvkm_vmm_pt_new()
59 pgt->sparse = sparse; in nvkm_vmm_pt_new()
62 pgt->pde = kvcalloc(pten, sizeof(*pgt->pde), GFP_KERNEL); in nvkm_vmm_pt_new()
63 if (!pgt->pde) { in nvkm_vmm_pt_new()
64 kfree(pgt); in nvkm_vmm_pt_new()
69 return pgt; in nvkm_vmm_pt_new()
143 struct nvkm_vmm_pt *pgt = it->pt[it->lvl]; in nvkm_vmm_unref_pdes() local
144 struct nvkm_mmu_pt *pt = pgt->pt[type]; in nvkm_vmm_unref_pdes()
154 pgt->pt[type] = NULL; in nvkm_vmm_unref_pdes()
155 if (!pgt->refs[!type]) { in nvkm_vmm_unref_pdes()
158 if (pgt->sparse) { in nvkm_vmm_unref_pdes()
191 if (!pgt->refs[!type]) in nvkm_vmm_unref_pdes()
192 nvkm_vmm_pt_del(&pgt); in nvkm_vmm_unref_pdes()
197 nvkm_vmm_unref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, in nvkm_vmm_unref_sptes() argument
211 pgt->pte[lpti] -= pten; in nvkm_vmm_unref_sptes()
216 if (!pgt->refs[0]) in nvkm_vmm_unref_sptes()
221 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPTES) { in nvkm_vmm_unref_sptes()
223 if (!(pgt->pte[ptei] & NVKM_VMM_PTE_SPTES)) in nvkm_vmm_unref_sptes()
235 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes()
237 if (pgt->pte[ptei] & NVKM_VMM_PTE_SPTES) in nvkm_vmm_unref_sptes()
239 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes()
242 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) { in nvkm_vmm_unref_sptes()
244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
252 pair->func->invalid(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
262 struct nvkm_vmm_pt *pgt = it->pt[0]; in nvkm_vmm_unref_ptes() local
267 dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes); in nvkm_vmm_unref_ptes()
272 desc->func->pfn_unmap(it->vmm, pgt->pt[type], ptei, ptes); in nvkm_vmm_unref_ptes()
277 pgt->refs[type] -= ptes; in nvkm_vmm_unref_ptes()
280 if (desc->type == SPT && (pgt->refs[0] || pgt->refs[1])) in nvkm_vmm_unref_ptes()
281 nvkm_vmm_unref_sptes(it, pgt, desc, ptei, ptes); in nvkm_vmm_unref_ptes()
284 if (!pgt->refs[type]) { in nvkm_vmm_unref_ptes()
296 nvkm_vmm_ref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, in nvkm_vmm_ref_sptes() argument
310 pgt->pte[lpti] += pten; in nvkm_vmm_ref_sptes()
315 if (!pgt->refs[0]) in nvkm_vmm_ref_sptes()
320 if (pgt->pte[pteb] & NVKM_VMM_PTE_VALID) { in nvkm_vmm_ref_sptes()
322 if (!(pgt->pte[ptei] & NVKM_VMM_PTE_VALID)) in nvkm_vmm_ref_sptes()
334 pgt->pte[ptei] |= NVKM_VMM_PTE_VALID; in nvkm_vmm_ref_sptes()
336 if (pgt->pte[ptei] & NVKM_VMM_PTE_VALID) in nvkm_vmm_ref_sptes()
338 pgt->pte[ptei] |= NVKM_VMM_PTE_VALID; in nvkm_vmm_ref_sptes()
341 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) { in nvkm_vmm_ref_sptes()
348 desc->func->sparse(vmm, pgt->pt[1], spti, sptc); in nvkm_vmm_ref_sptes()
351 pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_ref_sptes()
358 pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_ref_sptes()
368 struct nvkm_vmm_pt *pgt = it->pt[0]; in nvkm_vmm_ref_ptes() local
371 pgt->refs[type] += ptes; in nvkm_vmm_ref_ptes()
375 nvkm_vmm_ref_sptes(it, pgt, desc, ptei, ptes); in nvkm_vmm_ref_ptes()
382 struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes) in nvkm_vmm_sparse_ptes() argument
386 pgt->pde[ptei++] = NVKM_VMM_PDE_SPARSE; in nvkm_vmm_sparse_ptes()
389 memset(&pgt->pte[ptei], NVKM_VMM_PTE_SPARSE, ptes); in nvkm_vmm_sparse_ptes()
417 struct nvkm_vmm_pt *pgt = pgd->pde[pdei]; in nvkm_vmm_ref_hwpt() local
418 const bool zero = !pgt->sparse && !desc->func->invalid; in nvkm_vmm_ref_hwpt()
428 pgt->pt[type] = nvkm_mmu_ptc_get(mmu, size, desc->align, zero); in nvkm_vmm_ref_hwpt()
429 if (!pgt->pt[type]) { in nvkm_vmm_ref_hwpt()
438 pt = pgt->pt[type]; in nvkm_vmm_ref_hwpt()
440 if (desc->type == LPT && pgt->refs[1]) { in nvkm_vmm_ref_hwpt()
448 bool spte = pgt->pte[ptei] & NVKM_VMM_PTE_SPTES; in nvkm_vmm_ref_hwpt()
450 bool next = pgt->pte[ptei] & NVKM_VMM_PTE_SPTES; in nvkm_vmm_ref_hwpt()
456 if (pgt->sparse) in nvkm_vmm_ref_hwpt()
460 memset(&pgt->pte[pteb], 0x00, ptes); in nvkm_vmm_ref_hwpt()
464 pgt->pte[pteb++] |= NVKM_VMM_PTE_VALID; in nvkm_vmm_ref_hwpt()
468 if (pgt->sparse) { in nvkm_vmm_ref_hwpt()
469 nvkm_vmm_sparse_ptes(desc, pgt, 0, pten); in nvkm_vmm_ref_hwpt()
487 struct nvkm_vmm_pt *pgt = pgd->pde[pdei]; in nvkm_vmm_ref_swpt() local
489 pgt = nvkm_vmm_pt_new(desc, NVKM_VMM_PDE_SPARSED(pgt), it->page); in nvkm_vmm_ref_swpt()
490 if (!pgt) { in nvkm_vmm_ref_swpt()
496 pgd->pde[pdei] = pgt; in nvkm_vmm_ref_swpt()
532 struct nvkm_vmm_pt *pgt = it.pt[it.lvl]; in nvkm_vmm_iter() local
541 struct nvkm_vmm_pt *pgd = pgt; in nvkm_vmm_iter()
548 it.pt[it.lvl - 1] = pgt = pgd->pde[pdei]; in nvkm_vmm_iter()
556 if (ref && !pgt->refs[desc[it.lvl - 1].type == SPT]) { in nvkm_vmm_iter()
564 struct nvkm_mmu_pt *pt = pgt->pt[type]; in nvkm_vmm_iter()