| /hypervisor/include/arch/x86/asm/lib/ |
| A D | spinlock.h | 25 (void)memset(lock, 0U, sizeof(spinlock_t)); in spinlock_init() 46 [head] "m"(lock->head), in spinlock_obtain() 47 [tail] "m"(lock->tail) in spinlock_obtain() 56 : [tail] "m" (lock->tail) in spinlock_release() 68 .macro spinlock_obtain lock 70 lea \lock, % rbx 83 .macro spinlock_release lock 84 lea \lock, % rbx 85 lock incl SYNC_SPINLOCK_TAIL_OFFSET(%rbx) 95 spinlock_obtain(lock); \ [all …]
|
| A D | bits.h | 185 #define build_bitmap_set(name, op_len, op_type, lock) \ argument 190 asm volatile(lock "or" op_len " %1,%0" \ 205 #define build_bitmap_clear(name, op_len, op_type, lock) \ argument 210 asm volatile(lock "and" op_len " %1,%0" \ 252 #define build_bitmap_testandset(name, op_len, op_type, lock) \ argument 258 asm volatile(lock "bts" op_len " %2,%1\n\tsbbl %0,%0" \ 276 #define build_bitmap_testandclear(name, op_len, op_type, lock) \ argument 282 asm volatile(lock "btr" op_len " %2,%1\n\tsbbl %0,%0" \
|
| /hypervisor/common/ |
| A D | event.c | 13 spinlock_init(&event->lock); in init_event() 22 spinlock_irqsave_obtain(&event->lock, &rflag); in reset_event() 25 spinlock_irqrestore_release(&event->lock, rflag); in reset_event() 39 spinlock_irqsave_obtain(&event->lock, &rflag); in wait_event() 44 spinlock_irqrestore_release(&event->lock, rflag); in wait_event() 46 spinlock_irqsave_obtain(&event->lock, &rflag); in wait_event() 50 spinlock_irqrestore_release(&event->lock, rflag); in wait_event() 57 spinlock_irqsave_obtain(&event->lock, &rflag); in signal_event() 62 spinlock_irqrestore_release(&event->lock, rflag); in signal_event()
|
| A D | irq.c | 89 spinlock_irqsave_obtain(&desc->lock, &rflags); in free_irq() 93 spinlock_irqrestore_release(&desc->lock, rflags); in free_irq() 143 spinlock_irqsave_obtain(&desc->lock, &rflags); in request_irq() 147 spinlock_irqrestore_release(&desc->lock, rflags); in request_irq() 167 spinlock_irqsave_obtain(&desc->lock, &rflags); in set_irq_trigger_mode() 173 spinlock_irqrestore_release(&desc->lock, rflags); in set_irq_trigger_mode() 214 spinlock_init(&desc->lock); in init_irq_descs()
|
| A D | hypercall.c | 1003 spinlock_obtain(&vpci->lock); in hcall_set_ptdev_intr_info() 1005 spinlock_release(&vpci->lock); in hcall_set_ptdev_intr_info() 1056 spinlock_obtain(&vpci->lock); in hcall_reset_ptdev_intr_info() 1058 spinlock_release(&vpci->lock); in hcall_reset_ptdev_intr_info()
|
| /hypervisor/arch/x86/ |
| A D | page.c | 30 spinlock_obtain(&pool->lock); in alloc_page() 43 spinlock_release(&pool->lock); in alloc_page() 67 spinlock_obtain(&pool->lock); in free_page() 71 spinlock_release(&pool->lock); in free_page()
|
| A D | vtd.c | 125 spinlock_t lock; member 366 spinlock_obtain(&(dmar_unit->lock)); in dmar_enable_intr_remapping() 385 spinlock_obtain(&(dmar_unit->lock)); in dmar_enable_translation() 405 spinlock_obtain(&(dmar_unit->lock)); in dmar_disable_intr_remapping() 420 spinlock_obtain(&(dmar_unit->lock)); in dmar_disable_translation() 437 spinlock_init(&dmar_unit->lock); in dmar_register_hrhd() 550 spinlock_obtain(&(dmar_unit->lock)); in dmar_issue_qi_request() 673 spinlock_obtain(&(dmar_unit->lock)); in dmar_set_intr_remap_table() 717 spinlock_obtain(&(dmar_unit->lock)); in dmar_set_root_table() 1283 spinlock_obtain(&dmar_unit->lock); in alloc_irtes() [all …]
|
| /hypervisor/debug/ |
| A D | logmsg.c | 23 spinlock_t lock; member 32 spinlock_init(&(logmsg_ctl.lock)); in init_logmsg() 84 spinlock_irqsave_obtain(&(logmsg_ctl.lock), &rflags); in do_logmsg() 89 spinlock_irqrestore_release(&(logmsg_ctl.lock), rflags); in do_logmsg()
|
| /hypervisor/dm/vpci/ |
| A D | vroot_port.c | 143 spinlock_obtain(&vm->vpci.lock); in create_vrp() 145 spinlock_release(&vm->vpci.lock); in create_vrp() 165 spinlock_obtain(&vpci->lock); in destroy_vrp() 167 spinlock_release(&vpci->lock); in destroy_vrp()
|
| A D | vpci.c | 266 spinlock_init(&vm->vpci.lock); in init_vpci() 652 spinlock_obtain(&vpci->lock); in vpci_read_cfg() 666 spinlock_release(&vpci->lock); in vpci_read_cfg() 679 spinlock_obtain(&vpci->lock); in vpci_write_cfg() 694 spinlock_release(&vpci->lock); in vpci_write_cfg() 810 spinlock_obtain(&service_vm->vpci.lock); in vpci_assign_pcidev() 830 spinlock_obtain(&tgt_vm->vpci.lock); in vpci_assign_pcidev() 867 spinlock_release(&tgt_vm->vpci.lock); in vpci_assign_pcidev() 875 spinlock_release(&service_vm->vpci.lock); in vpci_assign_pcidev() 900 spinlock_obtain(&vpci->lock); in vpci_deassign_pcidev() [all …]
|
| A D | vmcs9900.c | 181 spinlock_obtain(&vm->vpci.lock); in create_vmcs9900_vdev() 183 spinlock_release(&vm->vpci.lock); in create_vmcs9900_vdev() 209 spinlock_obtain(&vpci->lock); in destroy_vmcs9900_vdev() 211 spinlock_release(&vpci->lock); in destroy_vmcs9900_vdev()
|
| A D | ivshmem.c | 656 spinlock_obtain(&vm->vpci.lock); in create_ivshmem_vdev() 663 spinlock_release(&vm->vpci.lock); in create_ivshmem_vdev() 709 spinlock_obtain(&vpci->lock); in destroy_ivshmem_vdev() 711 spinlock_release(&vpci->lock); in destroy_ivshmem_vdev()
|
| /hypervisor/dm/ |
| A D | vpic.c | 555 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_set_irqline() 623 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_pending_intr() 643 spinlock_irqrestore_release(&(vpic->lock), rflags); in vpic_pending_intr() 678 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_intr_accepted() 695 spinlock_irqrestore_release(&(vpic->lock), rflags); in vpic_intr_accepted() 704 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_read() 730 spinlock_irqrestore_release(&(vpic->lock), rflags); in vpic_read() 745 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_write() 780 spinlock_irqrestore_release(&(vpic->lock), rflags); in vpic_write() 899 spinlock_irqsave_obtain(&(vpic->lock), &rflags); in vpic_elc_handler() [all …]
|
| A D | vioapic.c | 203 spinlock_irqsave_obtain(&(vioapic->lock), &rflags); in vioapic_set_irqline_lock() 205 spinlock_irqrestore_release(&(vioapic->lock), rflags); in vioapic_set_irqline_lock() 400 spinlock_irqsave_obtain(&(vioapic->lock), &rflags); in vioapic_mmio_rw() 429 spinlock_irqrestore_release(&(vioapic->lock), rflags); in vioapic_mmio_rw() 463 spinlock_irqsave_obtain(&(vioapic->lock), &rflags); in vioapic_process_eoi() 478 spinlock_irqrestore_release(&(vioapic->lock), rflags); in vioapic_process_eoi() 541 spinlock_init(&(vioapic->lock)); in vioapic_init()
|
| A D | vuart.c | 53 #define init_vuart_lock(vu) spinlock_init(&((vu)->lock)) 54 #define obtain_vuart_lock(vu, flags) spinlock_irqsave_obtain(&((vu)->lock), &(flags)) 55 #define release_vuart_lock(vu, flags) spinlock_irqrestore_release(&((vu)->lock), (flags))
|
| /hypervisor/include/common/ |
| A D | event.h | 6 spinlock_t lock; member
|
| A D | irq.h | 46 spinlock_t lock; member
|
| /hypervisor/include/arch/x86/asm/ |
| A D | page.h | 114 spinlock_t lock; /**< The spinlock to protect simultaneous access of the page pool. */ member
|
| /hypervisor/include/dm/ |
| A D | vioapic.h | 56 spinlock_t lock; member
|
| A D | vpic.h | 133 spinlock_t lock; member
|
| A D | vuart.h | 106 spinlock_t lock; /**< The spinlock to protect simultaneous access of all elements. */ member
|
| A D | vpci.h | 175 spinlock_t lock; member
|
| /hypervisor/arch/x86/guest/ |
| A D | ept.c | 180 spinlock_init(&ept_page_pool[vm_id].lock); in init_ept_pgtable()
|
| A D | vept.c | 552 spinlock_init(&sept_page_pool.lock); in init_vept()
|