Lines Matching refs:base

24 VmAddressRegion::VmAddressRegion(VmAspace& aspace, vaddr_t base, size_t size, uint32_t vmar_flags)  in VmAddressRegion()  argument
25 : VmAddressRegionOrMapping(base, size, vmar_flags | VMAR_CAN_RWX_FLAGS, in VmAddressRegion()
35 VmAddressRegion::VmAddressRegion(VmAddressRegion& parent, vaddr_t base, size_t size, in VmAddressRegion() argument
37 : VmAddressRegionOrMapping(base, size, vmar_flags, parent.aspace_.get(), in VmAddressRegion()
45 : VmAddressRegion(kernel_aspace, kernel_aspace.base(), kernel_aspace.size(), in VmAddressRegion()
64 auto vmar = new (&ac) VmAddressRegion(aspace, aspace.base(), aspace.size(), vmar_flags); in CreateRoot()
265 vaddr_t base, size_t size, uint32_t vmar_flags, in OverwriteVmMapping() argument
277 VmMapping(*this, base, size, vmar_flags, in OverwriteVmMapping()
283 zx_status_t status = UnmapInternalLocked(base, size, false /* can_destroy_regions */, in OverwriteVmMapping()
359 if (!itr.IsValid() || itr->base() > addr || addr > itr->base() + itr->size() - 1) { in FindRegionLocked()
396 bool VmAddressRegion::IsRangeAvailableLocked(vaddr_t base, size_t size) { in IsRangeAvailableLocked() argument
404 auto prev = subregions_.upper_bound(base); in IsRangeAvailableLocked()
409 if (add_overflow(prev->base(), prev->size() - 1, &prev_last_byte)) { in IsRangeAvailableLocked()
412 if (prev_last_byte >= base) { in IsRangeAvailableLocked()
419 if (add_overflow(base, size - 1, &last_byte)) { in IsRangeAvailableLocked()
422 if (next->base() <= last_byte) { in IsRangeAvailableLocked()
445 if (add_overflow(prev->base(), prev->size(), &gap_beg) || in CheckGapLocked()
455 if (gap_beg == next->base()) { in CheckGapLocked()
458 if (sub_overflow(next->base(), 1, &gap_end) || in CheckGapLocked()
568 itr = up->subregions_.upper_bound(curr->base()); in EnumerateChildrenLocked()
610 zx_status_t VmAddressRegion::Unmap(vaddr_t base, size_t size) { in Unmap() argument
614 if (size == 0 || !IS_PAGE_ALIGNED(base)) { in Unmap()
623 return UnmapInternalLocked(base, size, true /* can_destroy_regions */, in Unmap()
627 zx_status_t VmAddressRegion::UnmapAllowPartial(vaddr_t base, size_t size) { in UnmapAllowPartial() argument
631 if (size == 0 || !IS_PAGE_ALIGNED(base)) { in UnmapAllowPartial()
640 return UnmapInternalLocked(base, size, true /* can_destroy_regions */, in UnmapAllowPartial()
644 VmAddressRegion::ChildList::iterator VmAddressRegion::UpperBoundInternalLocked(vaddr_t base) { in UpperBoundInternalLocked() argument
647 auto itr = --subregions_.upper_bound(base); in UpperBoundInternalLocked()
650 } else if (base >= itr->base() + itr->size()) { in UpperBoundInternalLocked()
657 zx_status_t VmAddressRegion::UnmapInternalLocked(vaddr_t base, size_t size, in UnmapInternalLocked() argument
662 if (!is_in_range(base, size)) { in UnmapInternalLocked()
672 aspace_->vdso_code_mapping_->base() >= base && in UnmapInternalLocked()
673 aspace_->vdso_code_mapping_->base() - base < size) { in UnmapInternalLocked()
677 const vaddr_t end_addr = base + size; in UnmapInternalLocked()
679 auto begin = UpperBoundInternalLocked(base); in UnmapInternalLocked()
685 const vaddr_t itr_end = itr->base() + itr->size(); in UnmapInternalLocked()
687 itr->base() < base || itr_end > end_addr)) { in UnmapInternalLocked()
700 const vaddr_t curr_end = curr->base() + curr->size(); in UnmapInternalLocked()
701 const vaddr_t unmap_base = fbl::max(curr->base(), base); in UnmapInternalLocked()
705 if (unmap_base == curr->base() && unmap_size == curr->size()) { in UnmapInternalLocked()
728 __UNUSED bool intersects = GetIntersect(base, size, curr->base(), curr->size(), in UnmapInternalLocked()
735 begin = vmar->UpperBoundInternalLocked(base); in UnmapInternalLocked()
740 } else if (unmap_base == curr->base() && unmap_size == curr->size()) { in UnmapInternalLocked()
750 begin = up->subregions_.upper_bound(curr->base()); in UnmapInternalLocked()
770 zx_status_t VmAddressRegion::Protect(vaddr_t base, size_t size, uint new_arch_mmu_flags) { in Protect() argument
774 if (size == 0 || !IS_PAGE_ALIGNED(base)) { in Protect()
783 if (!is_in_range(base, size)) { in Protect()
791 const vaddr_t end_addr = base + size; in Protect()
797 auto begin = --subregions_.upper_bound(base); in Protect()
798 if (!begin.IsValid() || begin->base() + begin->size() <= base) { in Protect()
804 vaddr_t last_mapped = begin->base(); in Protect()
809 if (itr->base() != last_mapped) { in Protect()
819 last_mapped = itr->base() + itr->size(); in Protect()
821 if (last_mapped < base + size) { in Protect()
831 const vaddr_t curr_end = itr->base() + itr->size(); in Protect()
832 const vaddr_t protect_base = fbl::max(itr->base(), base); in Protect()
854 const vaddr_t base = 0; in LinearRegionAllocatorLocked() local
867 if (CheckGapLocked(before_iter, after_iter, spot, base, align, size, 0, arch_mmu_flags)) { in LinearRegionAllocatorLocked()
891 if (region.base() > prev_region_end) { in ForEachGap()
892 const size_t gap = region.base() - prev_region_end; in ForEachGap()
897 prev_region_end = ROUNDUP(region.base() + region.size(), align); in ForEachGap()
1027 vaddr_t base; in CompactRandomizedRegionAllocatorLocked() local
1028 if (sub_overflow(after_iter->base(), size, &base) || in CompactRandomizedRegionAllocatorLocked()
1029 sub_overflow(base, PAGE_SIZE * gap_pages, &base)) { in CompactRandomizedRegionAllocatorLocked()
1033 chosen_base = base; in CompactRandomizedRegionAllocatorLocked()
1039 vaddr_t base; in CompactRandomizedRegionAllocatorLocked() local
1040 if (add_overflow(before_iter->base(), before_iter->size(), &base) || in CompactRandomizedRegionAllocatorLocked()
1041 add_overflow(base, PAGE_SIZE * gap_pages, &base)) { in CompactRandomizedRegionAllocatorLocked()
1045 chosen_base = base; in CompactRandomizedRegionAllocatorLocked()