Lines Matching refs:ppgtt

19 static void gen6_write_pde(const struct gen6_ppgtt *ppgtt,  in gen6_write_pde()  argument
23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
27 ppgtt->pd_addr + pde); in gen6_write_pde()
77 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local
86 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range()
94 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range()
115 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local
116 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries()
150 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument
152 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_flush_pd()
159 mutex_lock(&ppgtt->flush); in gen6_flush_pd()
162 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd()
165 ioread32(ppgtt->pd_addr + pde - 1); in gen6_flush_pd()
166 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd()
169 mutex_unlock(&ppgtt->flush); in gen6_flush_pd()
176 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_alloc_va_range() local
177 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_alloc_va_range()
211 if (flush && i915_vma_is_bound(ppgtt->vma, I915_VMA_GLOBAL_BIND)) { in gen6_alloc_va_range()
215 gen6_flush_pd(ppgtt, from, start); in gen6_alloc_va_range()
219 static int gen6_ppgtt_init_scratch(struct gen6_ppgtt *ppgtt) in gen6_ppgtt_init_scratch() argument
221 struct i915_address_space * const vm = &ppgtt->base.vm; in gen6_ppgtt_init_scratch()
254 static void gen6_ppgtt_free_pd(struct gen6_ppgtt *ppgtt) in gen6_ppgtt_free_pd() argument
256 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_ppgtt_free_pd()
262 free_pt(&ppgtt->base.vm, pt); in gen6_ppgtt_free_pd()
267 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_cleanup() local
269 gen6_ppgtt_free_pd(ppgtt); in gen6_ppgtt_cleanup()
272 if (ppgtt->base.pd) in gen6_ppgtt_cleanup()
273 free_pd(&ppgtt->base.vm, ppgtt->base.pd); in gen6_ppgtt_cleanup()
275 mutex_destroy(&ppgtt->flush); in gen6_ppgtt_cleanup()
285 struct gen6_ppgtt *ppgtt = vma_res->private; in pd_vma_bind() local
288 ppgtt->pp_dir = ggtt_offset * sizeof(gen6_pte_t) << 10; in pd_vma_bind()
289 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind()
291 gen6_flush_pd(ppgtt, 0, ppgtt->base.vm.total); in pd_vma_bind()
297 struct gen6_ppgtt *ppgtt = vma_res->private; in pd_vma_unbind() local
298 struct i915_page_directory * const pd = ppgtt->base.pd; in pd_vma_unbind()
302 if (!ppgtt->scan_for_unused_pt) in pd_vma_unbind()
306 gen6_for_all_pdes(pt, ppgtt->base.pd, pde) { in pd_vma_unbind()
310 free_pt(&ppgtt->base.vm, pt); in pd_vma_unbind()
314 ppgtt->scan_for_unused_pt = false; in pd_vma_unbind()
324 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); in gen6_ppgtt_pin() local
327 GEM_BUG_ON(!kref_read(&ppgtt->base.vm.ref)); in gen6_ppgtt_pin()
335 if (atomic_add_unless(&ppgtt->pin_count, 1, 0)) in gen6_ppgtt_pin()
339 err = i915_vm_lock_objects(&ppgtt->base.vm, ww); in gen6_ppgtt_pin()
348 if (!atomic_read(&ppgtt->pin_count)) { in gen6_ppgtt_pin()
349 err = i915_ggtt_pin(ppgtt->vma, ww, GEN6_PD_ALIGN, PIN_HIGH); in gen6_ppgtt_pin()
351 GEM_BUG_ON(ppgtt->vma->fence); in gen6_ppgtt_pin()
352 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(ppgtt->vma)); in gen6_ppgtt_pin()
355 atomic_inc(&ppgtt->pin_count); in gen6_ppgtt_pin()
378 gen6_alloc_top_pd(struct gen6_ppgtt *ppgtt) in gen6_alloc_top_pd() argument
380 struct i915_ggtt * const ggtt = ppgtt->base.vm.gt->ggtt; in gen6_alloc_top_pd()
388 pd->pt.base = __i915_gem_object_create_internal(ppgtt->base.vm.gt->i915, in gen6_alloc_top_pd()
397 pd->pt.base->base.resv = i915_vm_resv_get(&ppgtt->base.vm); in gen6_alloc_top_pd()
398 pd->pt.base->shares_resv_from = &ppgtt->base.vm; in gen6_alloc_top_pd()
400 ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL); in gen6_alloc_top_pd()
401 if (IS_ERR(ppgtt->vma)) { in gen6_alloc_top_pd()
402 err = PTR_ERR(ppgtt->vma); in gen6_alloc_top_pd()
403 ppgtt->vma = NULL; in gen6_alloc_top_pd()
408 ppgtt->vma->ops = &pd_vma_ops; in gen6_alloc_top_pd()
409 ppgtt->vma->private = ppgtt; in gen6_alloc_top_pd()
413 free_pd(&ppgtt->base.vm, pd); in gen6_alloc_top_pd()
419 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); in gen6_ppgtt_unpin() local
421 GEM_BUG_ON(!atomic_read(&ppgtt->pin_count)); in gen6_ppgtt_unpin()
422 if (atomic_dec_and_test(&ppgtt->pin_count)) in gen6_ppgtt_unpin()
423 i915_vma_unpin(ppgtt->vma); in gen6_ppgtt_unpin()
429 struct gen6_ppgtt *ppgtt; in gen6_ppgtt_create() local
432 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in gen6_ppgtt_create()
433 if (!ppgtt) in gen6_ppgtt_create()
436 mutex_init(&ppgtt->flush); in gen6_ppgtt_create()
438 ppgtt_init(&ppgtt->base, gt, 0); in gen6_ppgtt_create()
439 ppgtt->base.vm.pd_shift = ilog2(SZ_4K * SZ_4K / sizeof(gen6_pte_t)); in gen6_ppgtt_create()
440 ppgtt->base.vm.top = 1; in gen6_ppgtt_create()
442 ppgtt->base.vm.bind_async_flags = I915_VMA_LOCAL_BIND; in gen6_ppgtt_create()
443 ppgtt->base.vm.allocate_va_range = gen6_alloc_va_range; in gen6_ppgtt_create()
444 ppgtt->base.vm.clear_range = gen6_ppgtt_clear_range; in gen6_ppgtt_create()
445 ppgtt->base.vm.insert_entries = gen6_ppgtt_insert_entries; in gen6_ppgtt_create()
446 ppgtt->base.vm.cleanup = gen6_ppgtt_cleanup; in gen6_ppgtt_create()
448 ppgtt->base.vm.alloc_pt_dma = alloc_pt_dma; in gen6_ppgtt_create()
449 ppgtt->base.vm.alloc_scratch_dma = alloc_pt_dma; in gen6_ppgtt_create()
450 ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; in gen6_ppgtt_create()
452 err = gen6_ppgtt_init_scratch(ppgtt); in gen6_ppgtt_create()
456 ppgtt->base.pd = gen6_alloc_top_pd(ppgtt); in gen6_ppgtt_create()
457 if (IS_ERR(ppgtt->base.pd)) { in gen6_ppgtt_create()
458 err = PTR_ERR(ppgtt->base.pd); in gen6_ppgtt_create()
462 return &ppgtt->base; in gen6_ppgtt_create()
465 i915_vm_put(&ppgtt->base.vm); in gen6_ppgtt_create()