Searched refs:xe_vma_end (Results 1 – 6 of 6) sorted by relevance
688 xe_vma_end(vma), &xe_walk.base); in xe_pt_stage_bind()827 xe_vma_end(vma), &xe_walk.base); in xe_pt_zap_ptes()1520 .modified_end = xe_vma_end(vma), in xe_pt_stage_unbind()1526 xe_vma_end(vma), &xe_walk.base); in xe_pt_stage_unbind()1622 last = ALIGN(xe_vma_end(vma), 0x1ull << xe_pt_shift(level)) - 1; in xe_pt_update_ops_rfence_interval()1653 xe_vma_start(vma), xe_vma_end(vma) - 1); in bind_op_prepare()1720 xe_vma_start(vma), xe_vma_end(vma) - 1); in unbind_op_prepare()
118 static inline u64 xe_vma_end(struct xe_vma *vma) in xe_vma_end() function
96 __entry->end = xe_vma_end(vma) - 1;
413 xe_vma_end(vma), in xe_gt_tlb_invalidation_vma()
76 if (page_addr > xe_vma_end(vma) - 1 || in vma_matches()
2204 IS_ALIGNED(xe_vma_end(vma), in vm_bind_ioctl_ops_parse()2209 xe_vma_end(vma) - in vm_bind_ioctl_ops_parse()2211 op->remap.start = xe_vma_end(vma); in vm_bind_ioctl_ops_parse()2248 xe_vma_end(old) - in vm_bind_ioctl_ops_parse()
Completed in 21 milliseconds