Searched refs:vcpu_is_preempted (Results 1 – 16 of 16) sorted by relevance
21 #define vcpu_is_preempted vcpu_is_preempted macro22 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
60 #define vcpu_is_preempted vcpu_is_preempted macro61 static inline bool vcpu_is_preempted(long cpu) in vcpu_is_preempted() function
229 struct paravirt_callee_save vcpu_is_preempted; member
606 return PVOP_ALT_CALLEE1(bool, lock.vcpu_is_preempted, cpu, in pv_vcpu_is_preempted()
129 #define vcpu_is_preempted vcpu_is_preempted macro130 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
32 return pv_ops.lock.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()
265 .lock.vcpu_is_preempted =
643 if (!idle_cpu(cpu) && vcpu_is_preempted(cpu)) { in kvm_smp_send_call_func_ipi()826 pv_ops.lock.vcpu_is_preempted = in kvm_guest_init()
85 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted); in hv_init_spinlocks()
371 if (vcpu_is_preempted(owner)) in propagate_sleepy()391 if (node->sleepy || vcpu_is_preempted(prev_cpu)) { in yield_to_prev()403 if (vcpu_is_preempted(get_owner_cpu(val))) in yield_to_prev()691 if (vcpu_is_preempted(next_cpu)) in queued_spin_lock_mcs_queue()
25 #define vcpu_is_preempted arch_vcpu_is_preempted macro
143 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen); in xen_init_spinlocks()
147 vcpu_is_preempted(node_cpu(node->prev)))) in osq_lock()
2155 #ifndef vcpu_is_preempted2156 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function2176 return READ_ONCE(owner->on_cpu) && !vcpu_is_preempted(task_cpu(owner)); in owner_on_cpu()
191 if (!vcpu_is_preempted(tcpu_cpu)) in __diag_time_slice_end_directed()
231 if (vcpu_is_preempted(cpu)) in available_idle_cpu()
Completed in 39 milliseconds