/xen-4.10.0-shim-comet/xen/include/xen/ |
A D | domain_page.h | 26 void *map_domain_page(mfn_t mfn); 47 #define __map_domain_page(pg) map_domain_page(_mfn(__page_to_mfn(pg))) 56 #define map_domain_page(mfn) __mfn_to_virt(mfn_x(mfn)) macro
|
/xen-4.10.0-shim-comet/xen/arch/x86/ |
A D | debug.c | 108 l4t = map_domain_page(mfn); in dbg_pv_va2mfn() 120 l3t = map_domain_page(mfn); in dbg_pv_va2mfn() 134 l2t = map_domain_page(mfn); in dbg_pv_va2mfn() 146 l1t = map_domain_page(mfn); in dbg_pv_va2mfn() 177 va = map_domain_page(mfn); in dbg_rw_guest_mem()
|
A D | x86_emulate.c | 33 memset(((stb).ptr = map_domain_page(_mfn(this_cpu(stubs.mfn)))) + \
|
A D | extable.c | 153 uint8_t *ptr = map_domain_page(_mfn(this_cpu(stubs.mfn))) + in stub_selftest()
|
A D | tboot.c | 162 pt_vaddr = (struct dma_pte *)map_domain_page(_mfn(paddr_to_pfn(pt_maddr))); in update_iommu_mac() 195 void *pg = map_domain_page(_mfn(mfn)); in update_pagetable_mac()
|
A D | traps.c | 1211 l4t = map_domain_page(_mfn(mfn)); in __page_fault_type() 1220 l3t = map_domain_page(_mfn(mfn)); in __page_fault_type() 1231 l2t = map_domain_page(_mfn(mfn)); in __page_fault_type() 1242 l1t = map_domain_page(_mfn(mfn)); in __page_fault_type()
|
/xen-4.10.0-shim-comet/xen/arch/x86/mm/ |
A D | paging.c | 94 mfn_t *node = map_domain_page(mfn); in paging_new_log_dirty_node() 153 l3 = map_domain_page(l4[i4]); in paging_free_log_dirty_bitmap() 160 l2 = map_domain_page(l3[i3]); in paging_free_log_dirty_bitmap() 310 l3 = map_domain_page(mfn); in paging_mark_pfn_dirty() 318 l2 = map_domain_page(mfn); in paging_mark_pfn_dirty() 326 l1 = map_domain_page(mfn); in paging_mark_pfn_dirty() 380 l4 = map_domain_page(mfn); in paging_mfn_is_dirty() 386 l3 = map_domain_page(mfn); in paging_mfn_is_dirty() 392 l2 = map_domain_page(mfn); in paging_mfn_is_dirty() 398 l1 = map_domain_page(mfn); in paging_mfn_is_dirty() [all …]
|
A D | p2m-pt.c | 159 l1_pgentry_t *l3_table = map_domain_page(l1e_get_mfn(*p2m_entry)); in p2m_free_entry() 245 l1_entry = map_domain_page(mfn); in p2m_next_level() 270 next = map_domain_page(l1e_get_mfn(*p2m_entry)); in p2m_next_level() 294 table = map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in p2m_pt_set_recalc_range() 356 table = map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in do_recalc() 541 table = map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in p2m_pt_set_entry() 778 l4_pgentry_t *l4e = map_domain_page(mfn); in p2m_pt_get_entry() 792 l3_pgentry_t *l3e = map_domain_page(mfn); in p2m_pt_get_entry() 834 l2e = map_domain_page(mfn); in p2m_pt_get_entry() 872 l1e = map_domain_page(mfn); in p2m_pt_get_entry() [all …]
|
A D | p2m-ept.c | 246 table = map_domain_page(mfn); in ept_set_middle_entry() 266 ept_entry_t *epte = map_domain_page(_mfn(ept_entry->mfn)); in ept_free_entry() 294 table = map_domain_page(_mfn(new_ept.mfn)); in ept_split_super_page() 382 *table = map_domain_page(_mfn(mfn)); in ept_next_level() 395 ept_entry_t *epte = map_domain_page(mfn); in ept_invalidate_emt() 437 table = map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in ept_invalidate_emt_range() 521 epte = map_domain_page(_mfn(mfn)); in resolve_misconfig() 721 table = map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in ept_set_entry() 919 map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in ept_get_entry() 1030 map_domain_page(pagetable_get_mfn(p2m_get_pagetable(p2m))); in ept_walk_table() [all …]
|
/xen-4.10.0-shim-comet/xen/arch/x86/x86_64/ |
A D | traps.c | 196 l4t = map_domain_page(_mfn(mfn)); in show_page_walk() 208 l3t = map_domain_page(_mfn(mfn)); in show_page_walk() 222 l2t = map_domain_page(_mfn(mfn)); in show_page_walk() 236 l1t = map_domain_page(_mfn(mfn)); in show_page_walk() 312 stub_page = map_domain_page(_mfn(this_cpu(stubs.mfn))); in subarch_percpu_traps_init()
|
A D | mm.c | 58 l4t = map_domain_page(_mfn(mfn)); in do_page_walk() 76 l2t = map_domain_page(_mfn(mfn)); in do_page_walk() 88 l1t = map_domain_page(_mfn(mfn)); in do_page_walk() 96 return map_domain_page(_mfn(mfn)) + (addr & ~PAGE_MASK); in do_page_walk() 1194 pl4e = map_domain_page(_mfn(mfn)); in handle_memadd_fault() 1203 pl3e = map_domain_page(_mfn(mfn)); in handle_memadd_fault() 1211 pl2e = map_domain_page(_mfn(mfn)); in handle_memadd_fault()
|
/xen-4.10.0-shim-comet/xen/common/ |
A D | tmem_xen.c | 71 return map_domain_page(_mfn(*pcli_mfn)); in cli_get_page() 98 tmem_va = map_domain_page(_mfn(tmem_mfn)); in tmem_copy_from_client() 168 tmem_va = map_domain_page(_mfn(tmem_mfn)); in tmem_copy_to_client()
|
A D | kimage.c | 500 for ( ptr = map_domain_page(_mfn(paddr_to_pfn(image->head))); \ 503 (unmap_domain_page(ptr), map_domain_page(_mfn(paddr_to_pfn(entry)))) \ 753 dest_va = map_domain_page(_mfn(dest_mfn)); in kimage_load_crash_segment() 871 page = map_domain_page(ind_mfn); in kimage_build_ind() 897 page = map_domain_page(mfn); in kimage_build_ind()
|
/xen-4.10.0-shim-comet/xen/drivers/passthrough/amd/ |
A D | iommu_map.c | 42 table = map_domain_page(_mfn(l1_mfn)); in clear_iommu_pte_present() 115 table = map_domain_page(_mfn(pt_mfn)); in set_iommu_pte_present() 349 table = map_domain_page(_mfn(pt_mfn)); in iommu_update_pde_count() 354 ntable = map_domain_page(_mfn(paddr_to_pfn(ntable_maddr))); in iommu_update_pde_count() 400 table = map_domain_page(_mfn(pt_mfn)); in iommu_merge_pages() 412 ntable = map_domain_page(_mfn(ntable_mfn)); in iommu_merge_pages() 467 next_table_vaddr = map_domain_page(_mfn(next_table_mfn)); in iommu_pde_from_gfn()
|
A D | iommu_guest.c | 204 log_base = map_domain_page(_mfn(mfn)); in guest_iommu_add_ppr_log() 253 log_base = map_domain_page(_mfn(mfn)); in guest_iommu_add_event_log() 378 vaddr = map_domain_page(get_gfn(d, gfn ,&p2mt)); in do_completion_wait() 426 dte_base = map_domain_page(_mfn(dte_mfn)); in do_invalidate_dte() 507 cmd_base = map_domain_page(_mfn(cmd_mfn)); in guest_iommu_process_command()
|
/xen-4.10.0-shim-comet/xen/arch/x86/mm/shadow/ |
A D | multi.c | 228 l3p = map_domain_page(gw->l3mfn); in shadow_check_gwalk() 235 l2p = map_domain_page(gw->l2mfn); in shadow_check_gwalk() 245 l1p = map_domain_page(gw->l1mfn); in shadow_check_gwalk() 263 l1p = map_domain_page(gw->l1mfn); in shadow_check_gl1e() 842 map = map_domain_page(mfn); in shadow_write_entries() 1575 l4e = map_domain_page(m4mfn); in sh_make_monitor_table() 2024 l3e = map_domain_page(m3mfn); in sh_destroy_monitor_table() 2316 snp = map_domain_page(snpmfn); in sh_resync_l1() 2317 gp = map_domain_page(gl1mfn); in sh_resync_l1() 2441 sl1p = map_domain_page(map_mfn); in sh_map_and_validate() [all …]
|
/xen-4.10.0-shim-comet/misc/coverity/ |
A D | model.c | 89 void *map_domain_page(unsigned long mfn) in map_domain_page() function
|
/xen-4.10.0-shim-comet/xen/arch/x86/pv/ |
A D | grant_table.c | 86 pl1e = map_domain_page(gl1mfn) + (addr & ~PAGE_MASK); in create_grant_pv_mapping() 241 pl1e = map_domain_page(gl1mfn) + (addr & ~PAGE_MASK); in replace_grant_pv_mapping()
|
A D | mm.c | 61 return (l1_pgentry_t *)map_domain_page(*gl1mfn) + l1_table_offset(linear); in map_guest_l1e()
|
A D | descriptor-tables.c | 170 gdt_pent = map_domain_page(_mfn(mfn)); in do_update_descriptor()
|
A D | domain.c | 38 l4tab = map_domain_page(mfn); in setup_compat_l4()
|
/xen-4.10.0-shim-comet/xen/drivers/passthrough/vtd/x86/ |
A D | vtd.c | 43 return map_domain_page(_mfn(paddr_to_pfn(maddr))); in map_vtd_domain_page()
|
/xen-4.10.0-shim-comet/xen/arch/x86/mm/hap/ |
A D | guest_walk.c | 90 top_map = map_domain_page(top_mfn); in hap_p2m_ga_to_gfn()
|
/xen-4.10.0-shim-comet/xen/include/asm-x86/ |
A D | page.h | 195 #define map_l1t_from_l2e(x) (l1_pgentry_t *)map_domain_page(l2e_get_mfn(x)) 196 #define map_l2t_from_l3e(x) (l2_pgentry_t *)map_domain_page(l3e_get_mfn(x)) 197 #define map_l3t_from_l4e(x) (l3_pgentry_t *)map_domain_page(l4e_get_mfn(x))
|
/xen-4.10.0-shim-comet/xen/arch/arm/ |
A D | mm.c | 224 mapping = map_domain_page(mfn_add(root_mfn, root_table)); in dump_pt_walk() 241 mapping = map_domain_page(_mfn(pte.walk.base)); in dump_pt_walk() 395 void *map_domain_page(mfn_t mfn) in map_domain_page() function 499 void *v = map_domain_page(_mfn(mfn)); in flush_page_to_ram()
|