| /hypervisor/quirks/ |
| A D | security_vm_fixup.c | 170 void *p = NULL; in efi_search_smbios_eps() local 177 if (p != NULL) { in efi_search_smbios_eps() 181 if (p != NULL) { in efi_search_smbios_eps() 187 return (p != NULL); in efi_search_smbios_eps() 220 static int is_smbios3_present(uint8_t *p) in is_smbios3_present() argument 223 (calculate_sum8(p, ((struct smbios3_entry_point *)p)->length)) == 0); in is_smbios3_present() 229 (calculate_sum8(p, ((struct smbios2_entry_point *)p)->length)) == 0); in is_smbios2_present() 236 uint8_t *p; in mem_search_smbios_eps() local 243 for (p = start; p < end; p += 16) { in mem_search_smbios_eps() 244 if (is_smbios3_present(p)) { in mem_search_smbios_eps() [all …]
|
| /hypervisor/include/arch/x86/asm/lib/ |
| A D | atomic.h | 73 static inline uint32_t atomic_readandclear32(uint32_t *p) in atomic_readandclear32() argument 75 return atomic_swap32(p, 0U); in atomic_readandclear32() 82 static inline uint64_t atomic_readandclear64(uint64_t *p) in atomic_readandclear64() argument 84 return atomic_swap64(p, 0UL); in atomic_readandclear64() 113 static inline int32_t atomic_add_return(int32_t *p, int32_t v) in atomic_add_return() argument 115 return (atomic_xadd32(p, v) + v); in atomic_add_return() 118 static inline int32_t atomic_sub_return(int32_t *p, int32_t v) in atomic_sub_return() argument 120 return (atomic_xadd32(p, -v) - v); in atomic_sub_return() 133 static inline int64_t atomic_add64_return(int64_t *p, int64_t v) in atomic_add64_return() argument 135 return (atomic_xadd64(p, v) + v); in atomic_add64_return() [all …]
|
| /hypervisor/arch/x86/guest/ |
| A D | hyperv.c | 68 struct HV_REFERENCE_TSC_PAGE *p; in hyperv_setup_tsc_page() local 74 p = (struct HV_REFERENCE_TSC_PAGE *)gpa2hva(vcpu->vm, ref_tsc_page->gpfn << PAGE_SHIFT); in hyperv_setup_tsc_page() 75 if (p != NULL) { in hyperv_setup_tsc_page() 77 p->tsc_scale = vcpu->vm->arch_vm.hyperv.tsc_scale; in hyperv_setup_tsc_page() 78 p->tsc_offset = vcpu->vm->arch_vm.hyperv.tsc_offset; in hyperv_setup_tsc_page() 80 tsc_seq = p->tsc_sequence + 1U; in hyperv_setup_tsc_page() 84 p->tsc_sequence = tsc_seq; in hyperv_setup_tsc_page()
|
| /hypervisor/debug/ |
| A D | npk_log.c | 139 const char *p = buf; in npk_log_write() local 156 for (sz = 0; sz >= 0; p += sz) in npk_log_write() 157 sz = npk_write(p, &(channel->Dn), buf + len - p); in npk_log_write()
|
| A D | printf.c | 21 const char *p = s; in charout() local 31 nchars += (s - p); in charout()
|
| /hypervisor/lib/ |
| A D | sprintf.c | 187 uint32_t p = 0U, w = 0U; in format_number() local 194 p = param->vars.precision - width; in format_number() 198 if (param->vars.width > (width + p)) { in format_number() 199 w = param->vars.width - (width + p); in format_number() 239 param->emit(PRINT_CMD_FILL, "0", p, param->data); in format_number() 550 char *p = param->dst + param->wrtn; in charmem() local 559 *p = *s; in charmem() 561 p++; in charmem() 573 (void)memset(p, (uint8_t)*s, n); in charmem()
|
| /hypervisor/common/ |
| A D | ptdev.c | 64 struct hlist_node *p; in find_ptirq_entry() local 72 hlist_for_each(p, b) { in find_ptirq_entry() 73 n = hlist_entry(p, struct ptirq_remapping_info, phys_link); in find_ptirq_entry() 83 hlist_for_each(p, b) { in find_ptirq_entry() 84 n = hlist_entry(p, struct ptirq_remapping_info, virt_link); in find_ptirq_entry()
|
| A D | hypercall.c | 138 uint32_t p = n; in nearest_pow2() local 141 p = fls32(2U*n - 1U); in nearest_pow2() 144 return p; in nearest_pow2()
|
| /hypervisor/include/arch/x86/asm/ |
| A D | mmu.h | 185 void flush_cacheline(const volatile void *p); 186 void flush_cache_range(const volatile void *p, uint64_t size);
|
| A D | cpu.h | 608 static inline void clflush(const volatile void *p) in clflush() argument 610 asm volatile ("clflush (%0)" :: "r"(p)); in clflush() 613 static inline void clflushopt(const volatile void *p) in clflushopt() argument 615 asm volatile ("clflushopt (%0)" :: "r"(p)); in clflushopt()
|
| A D | pgtable.h | 207 …void (*clflush_pagewalk)(const void *p); /**< Function to flush a page table entry from the cache.…
|
| A D | vtd.h | 723 void iommu_flush_cache(const void *p, uint32_t size);
|
| /hypervisor/arch/x86/ |
| A D | mmu.c | 348 void flush_cacheline(const volatile void *p) in flush_cacheline() argument 350 clflush(p); in flush_cacheline() 353 void flush_cache_range(const volatile void *p, uint64_t size) in flush_cache_range() argument 358 clflushopt(p + i); in flush_cache_range()
|
| A D | tsc.c | 102 static inline uint64_t tsc_read_hpet(uint64_t *p) in tsc_read_hpet() argument 107 *p = hpet_read(HPET_COUNTER); in tsc_read_hpet()
|
| A D | vtd.c | 265 void iommu_flush_cache(const void *p, uint32_t size) in iommu_flush_cache() argument 269 flush_cache_range(p, size); in iommu_flush_cache()
|
| /hypervisor/boot/guest/ |
| A D | elf_loader.c | 248 uint32_t *p = (uint32_t *)vm->sw.kernel_info.kernel_src_addr; in find_img_multiboot_header() local 252 if (p[i] == MULTIBOOT_HEADER_MAGIC) { in find_img_multiboot_header() 264 sum += p[j + i]; in find_img_multiboot_header() 268 ret = (struct multiboot_header *)(p + i); in find_img_multiboot_header()
|
| /hypervisor/dm/ |
| A D | io_req.c | 73 struct asyncio_desc *p; in asyncio_is_conflict() local 79 p = container_of(pos, struct asyncio_desc, list); in asyncio_is_conflict() 80 info = &(p->asyncio_info); in asyncio_is_conflict()
|