Home
last modified time | relevance | path

Searched refs:prev_cpu (Results 1 – 10 of 10) sorted by relevance

/arch/x86/include/asm/trace/
A Dirq_vectors.h155 unsigned int prev_cpu),
157 TP_ARGS(irq, vector, cpu, prev_vector, prev_cpu),
164 __field( unsigned int, prev_cpu )
172 __entry->prev_cpu = prev_cpu;
178 __entry->prev_vector, __entry->prev_cpu)
185 unsigned int prev_cpu), \
186 TP_ARGS(irq, vector, cpu, prev_vector, prev_cpu), NULL, NULL); \
/arch/powerpc/lib/
A Dqspinlock.c260 static struct qnode *get_tail_qnode(struct qspinlock *lock, int prev_cpu) in get_tail_qnode() argument
262 struct qnodes *qnodesp = per_cpu_ptr(&qnodes, prev_cpu); in get_tail_qnode()
376 static __always_inline bool yield_to_prev(struct qspinlock *lock, struct qnode *node, int prev_cpu,… in yield_to_prev() argument
391 if (node->sleepy || vcpu_is_preempted(prev_cpu)) { in yield_to_prev()
418 yield_count = yield_count_of(prev_cpu); in yield_to_prev()
430 yield_to_preempted(prev_cpu, yield_count); in yield_to_prev()
575 int prev_cpu = decode_tail_cpu(old); in queued_spin_lock_mcs_queue() local
576 struct qnode *prev = get_tail_qnode(lock, prev_cpu); in queued_spin_lock_mcs_queue()
586 if (yield_to_prev(lock, node, prev_cpu, paravirt)) in queued_spin_lock_mcs_queue()
/arch/riscv/include/asm/
A Dswitch_to.h99 bool thread_migrated = smp_processor_id() != task->thread.prev_cpu; in switch_to_should_flush_icache()
108 #define __set_prev_cpu(thread) ((thread).prev_cpu = smp_processor_id())
A Dprocessor.h123 unsigned int prev_cpu; member
/arch/sparc/kernel/
A Dcpumap.c193 int n, id, cpu, prev_cpu, last_cpu, level; in build_cpuinfo_tree() local
204 prev_cpu = cpu = cpumask_first(cpu_online_mask); in build_cpuinfo_tree()
268 (cpu == last_cpu) ? cpu : prev_cpu; in build_cpuinfo_tree()
290 prev_cpu = cpu; in build_cpuinfo_tree()
/arch/x86/kernel/apic/
A Dvector.c31 unsigned int prev_cpu; member
174 apicd->prev_cpu = apicd->cpu; in apic_update_vector()
357 apicd->prev_cpu); in clear_irq_vector()
368 per_cpu(vector_irq, apicd->prev_cpu)[vector] = VECTOR_SHUTDOWN; in clear_irq_vector()
369 irq_matrix_free(vector_matrix, apicd->prev_cpu, vector, managed); in clear_irq_vector()
648 seq_printf(m, "%*sPrevious target: %5u\n", ind, "", apicd.prev_cpu); in x86_vector_debug_show()
894 unsigned int cpu = apicd->prev_cpu; in free_moved_vector()
934 if (!vector || (apicd->cpu != cpu && apicd->prev_cpu != cpu)) in apic_force_complete_move()
1088 unsigned int cpu = apicd->prev_cpu; in __vector_schedule_cleanup()
/arch/powerpc/kvm/
A Dbook3s_hv.c3088 vcpu->arch.prev_cpu = -1; in kvmppc_core_vcpu_create_hv()
3297 int prev_cpu; in kvmppc_prepare_radix_vcpu() local
3303 prev_cpu = nested->prev_cpu[vcpu->arch.nested_vcpu_id]; in kvmppc_prepare_radix_vcpu()
3305 prev_cpu = vcpu->arch.prev_cpu; in kvmppc_prepare_radix_vcpu()
3318 if (prev_cpu != pcpu) { in kvmppc_prepare_radix_vcpu()
3319 if (prev_cpu >= 0) { in kvmppc_prepare_radix_vcpu()
3320 if (cpu_first_tlb_thread_sibling(prev_cpu) != in kvmppc_prepare_radix_vcpu()
3322 radix_flush_cpu(kvm, prev_cpu, vcpu); in kvmppc_prepare_radix_vcpu()
3324 smp_call_function_single(prev_cpu, in kvmppc_prepare_radix_vcpu()
3328 nested->prev_cpu[vcpu->arch.nested_vcpu_id] = pcpu; in kvmppc_prepare_radix_vcpu()
[all …]
A Dbook3s_hv_nested.c744 memset(gp->prev_cpu, -1, sizeof(gp->prev_cpu)); in kvmhv_alloc_nested()
/arch/powerpc/include/asm/
A Dkvm_book3s_64.h34 short prev_cpu[NR_CPUS]; member
A Dkvm_host.h790 int prev_cpu; member

Completed in 41 milliseconds