Lines Matching refs:base
25 VmMapping::VmMapping(VmAddressRegion& parent, vaddr_t base, size_t size, uint32_t vmar_flags, in VmMapping() argument
27 : VmAddressRegionOrMapping(base, size, vmar_flags, in VmMapping()
76 zx_status_t VmMapping::Protect(vaddr_t base, size_t size, uint new_arch_mmu_flags) { in Protect() argument
80 if (!IS_PAGE_ALIGNED(base)) { in Protect()
91 if (size == 0 || !is_in_range(base, size)) { in Protect()
95 return ProtectLocked(base, size, new_arch_mmu_flags); in Protect()
101 zx_status_t ProtectOrUnmap(const fbl::RefPtr<VmAspace>& aspace, vaddr_t base, size_t size, in ProtectOrUnmap() argument
104 return aspace->arch_aspace().Protect(base, size / PAGE_SIZE, new_arch_mmu_flags); in ProtectOrUnmap()
106 return aspace->arch_aspace().Unmap(base, size / PAGE_SIZE, nullptr); in ProtectOrUnmap()
112 zx_status_t VmMapping::ProtectLocked(vaddr_t base, size_t size, uint new_arch_mmu_flags) { in ProtectLocked() argument
114 DEBUG_ASSERT(size != 0 && IS_PAGE_ALIGNED(base) && IS_PAGE_ALIGNED(size)); in ProtectLocked()
140 if (base_ == base && size_ == size) { in ProtectLocked()
141 zx_status_t status = ProtectOrUnmap(aspace_, base, size, new_arch_mmu_flags); in ProtectLocked()
148 if (base_ == base) { in ProtectLocked()
152 new (&ac) VmMapping(*parent_, base + size, size_ - size, flags_, in ProtectLocked()
158 zx_status_t status = ProtectOrUnmap(aspace_, base, size, new_arch_mmu_flags); in ProtectLocked()
168 if (base_ + size_ == base + size) { in ProtectLocked()
173 new (&ac) VmMapping(*parent_, base, size, flags_, in ProtectLocked()
174 object_, object_offset_ + base - base_, in ProtectLocked()
180 zx_status_t status = ProtectOrUnmap(aspace_, base, size, new_arch_mmu_flags); in ProtectLocked()
189 const size_t left_size = base - base_; in ProtectLocked()
190 const size_t right_size = (base_ + size_) - (base + size); in ProtectLocked()
191 const uint64_t center_vmo_offset = object_offset_ + base - base_; in ProtectLocked()
196 new (&ac) VmMapping(*parent_, base, size, flags_, in ProtectLocked()
202 new (&ac) VmMapping(*parent_, base + size, right_size, flags_, in ProtectLocked()
208 zx_status_t status = ProtectOrUnmap(aspace_, base, size, new_arch_mmu_flags); in ProtectLocked()
219 zx_status_t VmMapping::Unmap(vaddr_t base, size_t size) { in Unmap() argument
220 LTRACEF("%p %#" PRIxPTR " %zu\n", this, base, size); in Unmap()
222 if (!IS_PAGE_ALIGNED(base)) { in Unmap()
238 if (size == 0 || !is_in_range(base, size)) { in Unmap()
243 if (base == base_ && size == size_) { in Unmap()
247 return UnmapLocked(base, size); in Unmap()
250 zx_status_t VmMapping::UnmapLocked(vaddr_t base, size_t size) { in UnmapLocked() argument
253 DEBUG_ASSERT(size != 0 && IS_PAGE_ALIGNED(size) && IS_PAGE_ALIGNED(base)); in UnmapLocked()
254 DEBUG_ASSERT(base >= base_ && base - base_ < size_); in UnmapLocked()
255 DEBUG_ASSERT(size_ - (base - base_) >= size); in UnmapLocked()
263 DEBUG_ASSERT(parent_->state_ != LifeCycleState::DEAD || (base == base_ && size == size_)); in UnmapLocked()
272 if (base_ == base || base + size == base_ + size_) { in UnmapLocked()
273 LTRACEF("unmapping base %#lx size %#zx\n", base, size); in UnmapLocked()
274 zx_status_t status = aspace_->arch_aspace().Unmap(base, size / PAGE_SIZE, nullptr); in UnmapLocked()
279 if (base_ == base && size_ != size) { in UnmapLocked()
295 const uint64_t vmo_offset = object_offset_ + (base + size) - base_; in UnmapLocked()
296 const vaddr_t new_base = base + size; in UnmapLocked()
308 LTRACEF("unmapping base %#lx size %#zx\n", base, size); in UnmapLocked()
309 zx_status_t status = aspace_->arch_aspace().Unmap(base, size / PAGE_SIZE, nullptr); in UnmapLocked()
315 size_ = base - base_; in UnmapLocked()
395 VmMappingCoalescer(VmMapping* mapping, vaddr_t base);
435 VmMappingCoalescer::VmMappingCoalescer(VmMapping* mapping, vaddr_t base) in VmMappingCoalescer() argument
436 : mapping_(mapping), base_(base), count_(0), aborted_(false) {} in VmMappingCoalescer()