/src/arch/aarch64/hypervisor/ |
A D | vm.c | 131 paddr_t end, mm_mode_t mode, struct mpool *ppool) in arch_vm_identity_prepare() argument 136 return mm_identity_prepare(ptable, begin, end, mode, ppool); in arch_vm_identity_prepare() 149 paddr_t end, mm_mode_t mode, struct mpool *ppool, in arch_vm_identity_commit() argument 155 mm_identity_commit(&vm_locked.vm->ptable, begin, end, mode, in arch_vm_identity_commit() 173 mm_vm_identity_commit(ptable, begin, end, mode, ppool, ipa); in arch_vm_identity_commit() 210 ipaddr_t end, mm_mode_t *mode) in arch_vm_mem_get_mode() argument 217 va_from_pa(pa_from_ipa(end)), mode); in arch_vm_mem_get_mode() 244 paddr_t end, mm_mode_t mode, in arch_vm_iommu_mm_prepare() argument 259 paddr_t end, mm_mode_t mode, in arch_vm_iommu_mm_commit() argument 271 mm_vm_identity_commit(ptable, begin, end, mode, ppool, ipa); in arch_vm_iommu_mm_commit() [all …]
|
A D | other_world.c | 55 params->ns_mem_ranges[i].end, in arch_other_world_vm_init() 71 params->ns_device_mem_ranges[i].end, in arch_other_world_vm_init()
|
/src/ |
A D | partition_pkg.c | 26 pa_addr(pkg->total.end)); in dump_partition_package() 28 pa_addr(pkg->pm.end)); in dump_partition_package() 30 pa_addr(pkg->img.end)); in dump_partition_package() 33 pa_addr(pkg->boot_info.end)); in dump_partition_package() 50 pkg->total.end = pa_add(pkg_start, total_mem_size); in partition_pkg_from_sp_pkg() 53 pkg->pm.end = pa_add(pkg->pm.begin, header.pm_size); in partition_pkg_from_sp_pkg() 68 pkg->hob.end = pa_init(0); in partition_pkg_from_sp_pkg() 73 pkg->total.end, MM_MODE_R, in partition_pkg_from_sp_pkg() 98 mem_range->end = pa_init(0); in partition_pkg_init_memory_range_from_te() 119 pkg->total.end = pa_add(pkg_start, tl->size); in partition_pkg_from_tl() [all …]
|
A D | mm.c | 164 va_init(end)); in mm_invalidate_tlb() 372 end = level_end; in mm_map_level() 376 while (begin < end) { in mm_map_level() 462 end, ptable_end); in mm_ptable_identity_map() 463 end = ptable_end; in mm_ptable_identity_map() 466 if (begin >= end) { in mm_ptable_identity_map() 470 begin, end); in mm_ptable_identity_map() 877 end = level_end; in mm_ptable_get_attrs_level() 933 if (begin >= end) { in mm_get_attrs() 937 begin, end); in mm_get_attrs() [all …]
|
A D | fdt_handler.c | 37 bool fdt_find_initrd(const struct fdt *fdt, paddr_t *begin, paddr_t *end) in fdt_find_initrd() argument 59 *end = pa_init(initrd_end); in fdt_find_initrd() 155 mem_ranges[mem_range_index].end = in fdt_find_memory_ranges() 222 paddr_t end = pa_add(begin, fdt_size(fdt)); in fdt_unmap() local 224 if (!mm_unmap(stage1_locked, begin, end, ppool)) { in fdt_unmap() 255 *size = pa_difference(mem_range.begin, mem_range.end); in fdt_get_memory_size()
|
A D | fdt_handler_test.cc | 110 EXPECT_THAT(pa_addr(params.mem_ranges[0].end), Eq(0x20000000)); in TEST() 112 EXPECT_THAT(pa_addr(params.mem_ranges[1].end), Eq(0x30010000)); in TEST() 114 EXPECT_THAT(pa_addr(params.mem_ranges[2].end), Eq(0x30030000)); in TEST()
|
A D | load.c | 69 paddr_t end, const struct manifest_vm *manifest_vm, in load_kernel() argument 83 if (pa_difference(begin, end) < size) { in load_kernel() 366 params->mem_ranges[i].end, in load_primary() 381 params->device_mem_ranges[i].end, in load_primary() 431 paddr_t end, size_t fdt_max_size, in load_secondary_fdt() argument 879 *found_end = mem_ranges[i].end; in carve_out_mem_range() 882 mem_ranges[i].end = *found_begin; in carve_out_mem_range() 914 .end = after[i].begin; in update_reserved_ranges() 917 if (pa_addr(after[i].end) < pa_addr(before[i].end)) { in update_reserved_ranges() 925 .begin = after[i].end; in update_reserved_ranges() [all …]
|
A D | boot_info.c | 105 pa_difference(pkg->boot_info.begin, pkg->boot_info.end); in ffa_boot_info_node() 144 pa_difference(pkg->pm.begin, pkg->pm.end); in ffa_boot_info_node() 167 pa_difference(pkg->hob.begin, pkg->hob.end); in ffa_boot_info_node()
|
A D | fdt_patch.c | 44 static bool add_mem_reservation(void *fdt, paddr_t begin, paddr_t end) in add_mem_reservation() argument 46 size_t len = pa_difference(begin, end); in add_mem_reservation() 131 rsv &= add_mem_reservation(fdt, range.begin, range.end); in fdt_patch()
|
A D | vm.c | 299 bool vm_identity_map(struct vm_locked vm_locked, paddr_t begin, paddr_t end, in vm_identity_map() argument 302 if (!vm_identity_prepare(vm_locked, begin, end, mode, ppool)) { in vm_identity_map() 306 vm_identity_commit(vm_locked, begin, end, mode, ppool, ipa); in vm_identity_map() 324 return arch_vm_identity_prepare(vm_locked, begin, end, mode, ppool); in vm_identity_prepare() 335 arch_vm_identity_commit(vm_locked, begin, end, mode, ppool, ipa); in vm_identity_commit() 344 bool vm_unmap(struct vm_locked vm_locked, paddr_t begin, paddr_t end, in vm_unmap() argument 347 return arch_vm_unmap(vm_locked, begin, end, ppool); in vm_unmap() 382 bool vm_mem_get_mode(struct vm_locked vm_locked, ipaddr_t begin, ipaddr_t end, in vm_mem_get_mode() argument 385 return arch_vm_mem_get_mode(vm_locked, begin, end, mode); in vm_mem_get_mode() 389 paddr_t end, mm_mode_t mode, struct mpool *ppool, in vm_iommu_mm_identity_map() argument [all …]
|
A D | ffa_memory.c | 426 ipaddr_t end = ipa_add(begin, size); in constituents_get_mode() local 446 __func__, begin.ipa, end.ipa); in constituents_get_mode() 1169 if (constituent == end) { in ffa_region_group_fragments_revert_protect() 1301 stage1_locked, begin, end, in clear_memory() 1304 size_t size = pa_difference(begin, end); in clear_memory() 1312 mm_unmap(stage1_locked, begin, end, ppool); in clear_memory() 1355 paddr_t end = pa_add(begin, size); in ffa_clear_memory_constituents() local 1381 ipa_addr(end) > ipa_addr(in_begin)); in is_memory_range_within() 1418 ipaddr_t end = ipa_add(begin, size - 1); in ffa_memory_check_overlap() local 1436 begin, end)) { in ffa_memory_check_overlap() [all …]
|
A D | init.c | 104 pa_addr(params->mem_ranges[i].end) - 1); in one_time_init()
|
A D | mpool_test.cc | 34 sort(allocs.begin(), allocs.end()); in check_allocs() 35 sort(chunks.begin(), chunks.end(), in check_allocs()
|
A D | manifest.c | 541 uintptr_t end = pa_addr(ranges[i].end); in dump_memory_ranges() local 543 align_up(pa_difference(ranges[i].begin, ranges[i].end), in dump_memory_ranges() 547 dlog(" [%lx - %lx (%zu pages)]\n", begin, end, page_count); in dump_memory_ranges() 613 mem_ranges[*mem_regions_index].end = in check_and_record_memory_used() 1550 total_mem_size = pa_difference(pkg.total.begin, pkg.total.end); in parse_ffa_partition_package() 1561 pm_size = pa_difference(pkg.pm.begin, pkg.pm.end); in parse_ffa_partition_package()
|
A D | manifest_test.cc | 81 stdout->insert(stdout->end(), buf.begin(), in exec() 389 std::copy(vec.begin(), vec.end(), manifest_dtb); in init() 401 params->mem_ranges[0].end = pa_init((uintpaddr_t)0x8ffffff); in boot_params_init() 412 params->mem_ranges[1].end = pa_init(mem_end); in boot_params_init() 417 params->ns_mem_ranges[0].end = pa_init((uintpaddr_t)0x8ffffff); in boot_params_init() 422 params->ns_device_mem_ranges[0].end = in boot_params_init() 428 params->device_mem_ranges[0].end = in boot_params_init()
|
A D | mm_test.cc | 68 std::end(table->entries)); in get_table() 1105 const paddr_t end = pa_add(begin, 4 * mm_entry_size(1)); in TEST_F() local 1108 ASSERT_TRUE(mm_vm_unmap(&ptable, begin, end, &ppool)); in TEST_F() 1111 ASSERT_TRUE(mm_vm_identity_map(&ptable, middle, end, mode, &ppool, in TEST_F()
|
/src/memory_protect/ |
A D | absent.c | 14 struct ffa_value arch_memory_protect(paddr_t begin, paddr_t end, in arch_memory_protect() argument 18 (void)end; in arch_memory_protect() 24 bool arch_memory_unprotect(paddr_t begin, paddr_t end) in arch_memory_unprotect() argument 27 (void)end; in arch_memory_unprotect()
|
/src/arch/aarch64/memory_protect/ |
A D | rme.c | 23 struct ffa_value arch_memory_protect(paddr_t begin, paddr_t end, in arch_memory_protect() argument 26 uintptr_t size = pa_difference(begin, end); in arch_memory_protect() 73 bool arch_memory_unprotect(paddr_t begin, paddr_t end) in arch_memory_unprotect() argument 75 uintptr_t size = pa_difference(begin, end); in arch_memory_unprotect()
|
/src/boot_flow/ |
A D | spmc.c | 40 paddr_t *end) in plat_boot_flow_get_initrd_range() argument 45 *end = pa_init(0); in plat_boot_flow_get_initrd_range()
|
A D | linux.c | 41 paddr_t *end) in plat_boot_flow_get_initrd_range() argument 43 return fdt_find_initrd(fdt, begin, end); in plat_boot_flow_get_initrd_range()
|
/src/iommu/ |
A D | absent.c | 30 paddr_t end, mm_mode_t mode) in plat_iommu_identity_map() argument 34 (void)end; in plat_iommu_identity_map()
|
/src/arch/fake/hypervisor/ |
A D | ffa.c | 546 paddr_t end, mm_mode_t mode, struct mpool *ppool) in arch_vm_identity_prepare() argument 550 (void)end; in arch_vm_identity_prepare() 558 paddr_t end, mm_mode_t mode, struct mpool *ppool, in arch_vm_identity_commit() argument 563 (void)end; in arch_vm_identity_commit() 569 bool arch_vm_unmap(struct vm_locked vm_locked, paddr_t begin, paddr_t end, in arch_vm_unmap() argument 574 (void)end; in arch_vm_unmap() 587 ipaddr_t end, mm_mode_t *mode) // NOLINT in arch_vm_mem_get_mode() argument 591 (void)end; in arch_vm_mem_get_mode() 627 paddr_t end, mm_mode_t mode, in arch_vm_iommu_mm_identity_map() argument 633 (void)end; in arch_vm_iommu_mm_identity_map()
|
/src/arch/aarch64/ |
A D | mm.c | 272 uintvaddr_t end = va_addr(va_end); in arch_mm_invalidate_stage1_range() local 283 if ((end - begin) > (MAX_TLBI_OPS * PAGE_SIZE)) { in arch_mm_invalidate_stage1_range() 291 end >>= 12; in arch_mm_invalidate_stage1_range() 293 for (it = begin; it < end; in arch_mm_invalidate_stage1_range() 327 uintpaddr_t end = ipa_addr(va_end); in arch_mm_invalidate_stage2_range() local 352 if ((end - begin) > (MAX_TLBI_OPS * PAGE_SIZE)) { in arch_mm_invalidate_stage2_range() 360 end >>= 12; in arch_mm_invalidate_stage2_range() 367 for (it = begin; it < end; in arch_mm_invalidate_stage2_range() 416 uintptr_t end = (uintptr_t)base + size; in arch_mm_flush_dcache() local 418 while (line_begin < end) { in arch_mm_flush_dcache()
|
/src/arch/aarch64/hftest/ |
A D | el0_entry.S | 9 .macro ffa_mem_perm_set start:req end:req perm:req 13 adrp x30, \end 14 add x30, x30, :lo12:\end
|
/src/arch/aarch64/plat/interrupts/ |
A D | gicv3.c | 535 gic_mem_ranges[0].end = pa_init(GICD_BASE + GICD_SIZE); in fdt_find_gics() 537 gic_mem_ranges[1].end = pa_init( in fdt_find_gics() 555 gic_mem_ranges[ic_reg_idx].end = pa_init(addr + len); in fdt_find_gics() 578 .end.pa; in fdt_find_gics() 786 gic_mem_ranges[gicd_idx].end, in gicv3_driver_init() 829 gic_mem_ranges[gicr_idx + num_gic_dist].end, in gicv3_driver_init() 862 gic_mem_ranges[chip_idx].end = pa_init(0); in plat_interrupts_controller_driver_init()
|