Home
last modified time | relevance | path

Searched refs:per_cpu (Results 1 – 25 of 29) sorted by relevance

12

/hypervisor/common/
A Dschedule.c21 return (obj == &per_cpu(idle, pcpu_id)); in is_idle_thread()
41 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in obtain_schedule_lock()
47 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in release_schedule_lock()
53 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in get_scheduler()
67 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in init_sched()
69 per_cpu(mode_to_idle, pcpu_id) = IDLE_MODE_HLT; in init_sched()
70 per_cpu(mode_to_kick_pcpu, pcpu_id) = DEL_MODE_IPI; in init_sched()
95 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in deinit_sched()
145 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in sched_get_current()
154 struct sched_control *ctl = &per_cpu(sched_ctl, pcpu_id); in make_reschedule_request()
[all …]
A Dsoftirq.c32 bitmap_set_lock(nr, &per_cpu(softirq_pending, get_pcpu_id())); in fire_softirq()
38 &per_cpu(softirq_pending, cpu_id); in do_softirq_internal()
55 if (per_cpu(softirq_servicing, cpu_id) == 0U) { in do_softirq()
56 per_cpu(softirq_servicing, cpu_id) = 1U; in do_softirq()
63 per_cpu(softirq_servicing, cpu_id) = 0U; in do_softirq()
A Dtimer.c112 cpu_timer = &per_cpu(cpu_timers, pcpu_id); in add_timer()
176 cpu_timer = &per_cpu(cpu_timers, pcpu_id); in init_percpu_timer()
189 cpu_timer = &per_cpu(cpu_timers, pcpu_id); in timer_softirq()
A Dthermal.c19 vcpu = per_cpu(vcpu_array, pcpu_id)[idx]; in thermal_softirq()
A Dsched_noop.c12 struct sched_noop_control *noop_ctl = &per_cpu(sched_noop_ctl, ctl->pcpu_id); in sched_noop_init()
A Dsched_iorr.c106 struct sched_iorr_control *iorr_ctl = &per_cpu(sched_iorr_ctl, ctl->pcpu_id); in sched_iorr_add_timer()
132 struct sched_iorr_control *iorr_ctl = &per_cpu(sched_iorr_ctl, ctl->pcpu_id); in sched_iorr_init()
A Dhv_main.c97 struct thread_object *idle = &per_cpu(idle, pcpu_id); in run_idle_thread()
A Dsched_prio.c19 struct sched_prio_control *prio_ctl = &per_cpu(sched_prio_ctl, ctl->pcpu_id); in sched_prio_init()
A Dirq.c196 per_cpu(irq_count, get_pcpu_id())[irq]++; in do_irq()
A Dptdev.c127 …entry = get_first_item(&per_cpu(softirq_dev_entry_list, pcpu_id), struct ptirq_remapping_info, sof… in ptirq_dequeue_softirq()
/hypervisor/debug/
A Dprofiling.c322 sbuf = per_cpu(sbuf, get_pcpu_id())[ACRN_SEP]; in profiling_generate_data()
389 sbuf = per_cpu(sbuf, get_pcpu_id())[ACRN_SOCWATCH]; in profiling_generate_data()
738 per_cpu(profiling_info.ipi_cmd, i) = IPI_PMU_START; in profiling_start_pmu()
769 per_cpu(profiling_info.ipi_cmd, i) = IPI_PMU_STOP; in profiling_stop_pmu()
784 per_cpu(profiling_info.s_state, i).nofrozen_pmi); in profiling_stop_pmu()
819 per_cpu(profiling_info.ipi_cmd, i) = IPI_MSR_OP; in profiling_msr_ops_all_cpus()
858 = per_cpu(lapic_id, i); in profiling_vm_list_info()
1041 per_cpu(profiling_info.soc_state, i) in profiling_set_control()
1049 per_cpu(profiling_info.soc_state, i) in profiling_set_control()
1087 (per_cpu(profiling_info.s_state, i).pmu_state == in profiling_configure_pmi()
[all …]
A Dsbuf.c24 per_cpu(sbuf, pcpu_id)[sbuf_id] = (struct shared_buf *) hva; in sbuf_share_setup()
37 per_cpu(sbuf, pcpu_id)[sbuf_id] = 0U; in sbuf_reset()
A Dnpk_log.c105 per_cpu(npk_log_ref, i) = 0U; in npk_log_setup()
149 ref = (atomic_inc_return((int32_t *)&per_cpu(npk_log_ref, cpu_id)) - 1) in npk_log_write()
161 atomic_dec32(&per_cpu(npk_log_ref, cpu_id)); in npk_log_write()
A Dlogmsg.c62 buffer = per_cpu(logbuf, pcpu_id); in do_logmsg()
95 struct shared_buf *sbuf = per_cpu(sbuf, pcpu_id)[ACRN_HVLOG]; in do_logmsg()
A Dtrace.c44 if (per_cpu(sbuf, cpu_id)[ACRN_TRACE] == NULL) { in trace_check()
53 struct shared_buf *sbuf = per_cpu(sbuf, cpu_id)[ACRN_TRACE]; in trace_put()
/hypervisor/arch/x86/
A Dlapic.c116 per_cpu(lapic_ldr, pcpu_id) = (uint32_t) msr_read(MSR_IA32_EXT_APIC_LDR); in init_lapic()
206 icr.value_32.hi_32 = per_cpu(lapic_id, dest_pcpu_id); in send_startup_ipi()
265 icr.value_32.hi_32 = per_cpu(lapic_id, pcpu_id); in send_single_ipi()
290 icr.value_32.hi_32 = per_cpu(lapic_id, pcpu_id); in send_single_init()
299 if (per_cpu(mode_to_kick_pcpu, pcpu_id) == DEL_MODE_INIT) { in kick_pcpu()
A Dcpu.c74 per_cpu(lapic_id, i) = lapic_id_array[i]; in init_percpu_lapic_id()
92 per_cpu(boot_state, pcpu_id) = state; in pcpu_set_current_state()
347 if (per_cpu(lapic_id, i) == lapic_id) { in get_pcpu_id_from_lapic_id()
422 bitmap_set_lock(NEED_OFFLINE, &per_cpu(pcpu_flag, pcpu_id)); in make_pcpu_offline()
430 return bitmap_test_and_clear_lock(NEED_OFFLINE, &per_cpu(pcpu_flag, pcpu_id)); in need_offline()
473 if (per_cpu(mode_to_idle, pcpu_id) == IDLE_MODE_HLT) { in cpu_do_idle()
A Dnotify.c32 &per_cpu(smp_call_info, pcpu_id); in kick_notification()
60 smp_call = &per_cpu(smp_call_info, pcpu_id); in smp_call_function()
A Dpm.c170 per_cpu(tsc_suspend, get_pcpu_id()) = rdtsc(); in suspend_tsc()
175 msr_write(MSR_IA32_TIME_STAMP_COUNTER, per_cpu(tsc_suspend, get_pcpu_id())); in resume_tsc()
A Dtrampoline.c50 stack_sym_addr = (uint64_t)&per_cpu(stack, pcpu_id)[CONFIG_STACK_SIZE - 1]; in write_trampoline_stack_sym()
/hypervisor/include/arch/x86/asm/
A Dper_cpu.h81 #define per_cpu(name, pcpu_id) \ macro
85 #define get_cpu_var(name) per_cpu(name, get_pcpu_id())
/hypervisor/arch/x86/guest/
A Dvcpu.c217 per_cpu(whose_iwkey, pcpuid_from_vcpu(vcpu)) = NULL; in init_iwkey()
286 return per_cpu(ever_run_vcpu, pcpu_id); in get_ever_run_vcpu()
528 per_cpu(ever_run_vcpu, pcpu_id) = vcpu; in create_vcpu()
539 if (per_cpu(mode_to_idle, pcpu_id) == IDLE_MODE_HLT) { in create_vcpu()
540 per_cpu(mode_to_idle, pcpu_id) = IDLE_MODE_PAUSE; in create_vcpu()
543 per_cpu(mode_to_kick_pcpu, pcpu_id) = DEL_MODE_INIT; in create_vcpu()
545 per_cpu(mode_to_kick_pcpu, pcpu_id) = DEL_MODE_IPI; in create_vcpu()
546 per_cpu(mode_to_idle, pcpu_id) = IDLE_MODE_HLT; in create_vcpu()
549 per_cpu(mode_to_kick_pcpu, pcpu_id), is_using_init_ipi()); in create_vcpu()
599 per_cpu(vcpu_array, pcpu_id)[vm->vm_id] = vcpu; in create_vcpu()
[all …]
A Dvm_reset.c63 &per_cpu(shutdown_vm_bitmap, pcpuid_from_vcpu(vcpu))); in triple_fault_shutdown_vm()
113 &per_cpu(shutdown_vm_bitmap, pcpuid_from_vcpu(vcpu))); in handle_common_reset_reg_write()
243 uint64_t *vms = &per_cpu(shutdown_vm_bitmap, pcpu_id); in shutdown_vm_from_idle()
A Dpm.c170 bitmap_set_nolock(vm->vm_id, &per_cpu(shutdown_vm_bitmap, pcpu_id)); in enter_s5()
355 bitmap_set_nolock(vm->vm_id, &per_cpu(shutdown_vm_bitmap, pcpuid_from_vcpu(vcpu))); in prelaunched_vm_sleep_io_write()
A Dassign.c68 dest_cluster_id = per_cpu(lapic_ldr, pcpu_id) & X2APIC_LDR_CLUSTER_ID_MASK; in calculate_logical_dest_mask()
71 cluster_id = per_cpu(lapic_ldr, pcpu_id) & X2APIC_LDR_CLUSTER_ID_MASK; in calculate_logical_dest_mask()
73 logical_id_mask |= (per_cpu(lapic_ldr, pcpu_id) & X2APIC_LDR_LOGICAL_ID_MASK); in calculate_logical_dest_mask()

Completed in 34 milliseconds

12