Home
last modified time | relevance | path

Searched refs:kvm_vcpu_arch (Results 1 – 19 of 19) sorted by relevance

/arch/riscv/kernel/
A Dasm-offsets.c173 OFFSET(KVM_ARCH_HOST_RA, kvm_vcpu_arch, host_context.ra); in asm_offsets()
174 OFFSET(KVM_ARCH_HOST_SP, kvm_vcpu_arch, host_context.sp); in asm_offsets()
175 OFFSET(KVM_ARCH_HOST_GP, kvm_vcpu_arch, host_context.gp); in asm_offsets()
176 OFFSET(KVM_ARCH_HOST_TP, kvm_vcpu_arch, host_context.tp); in asm_offsets()
177 OFFSET(KVM_ARCH_HOST_T0, kvm_vcpu_arch, host_context.t0); in asm_offsets()
178 OFFSET(KVM_ARCH_HOST_T1, kvm_vcpu_arch, host_context.t1); in asm_offsets()
179 OFFSET(KVM_ARCH_HOST_T2, kvm_vcpu_arch, host_context.t2); in asm_offsets()
180 OFFSET(KVM_ARCH_HOST_S0, kvm_vcpu_arch, host_context.s0); in asm_offsets()
181 OFFSET(KVM_ARCH_HOST_S1, kvm_vcpu_arch, host_context.s1); in asm_offsets()
182 OFFSET(KVM_ARCH_HOST_A0, kvm_vcpu_arch, host_context.a0); in asm_offsets()
[all …]
/arch/mips/kernel/
A Dasm-offsets.c368 OFFSET(VCPU_FPR0, kvm_vcpu_arch, fpu.fpr[0]); in output_kvm_defines()
369 OFFSET(VCPU_FPR1, kvm_vcpu_arch, fpu.fpr[1]); in output_kvm_defines()
370 OFFSET(VCPU_FPR2, kvm_vcpu_arch, fpu.fpr[2]); in output_kvm_defines()
371 OFFSET(VCPU_FPR3, kvm_vcpu_arch, fpu.fpr[3]); in output_kvm_defines()
372 OFFSET(VCPU_FPR4, kvm_vcpu_arch, fpu.fpr[4]); in output_kvm_defines()
373 OFFSET(VCPU_FPR5, kvm_vcpu_arch, fpu.fpr[5]); in output_kvm_defines()
374 OFFSET(VCPU_FPR6, kvm_vcpu_arch, fpu.fpr[6]); in output_kvm_defines()
375 OFFSET(VCPU_FPR7, kvm_vcpu_arch, fpu.fpr[7]); in output_kvm_defines()
376 OFFSET(VCPU_FPR8, kvm_vcpu_arch, fpu.fpr[8]); in output_kvm_defines()
377 OFFSET(VCPU_FPR9, kvm_vcpu_arch, fpu.fpr[9]); in output_kvm_defines()
[all …]
/arch/mips/kvm/
A Dentry.c246 UASM_i_LW(&p, GPR_T0, offsetof(struct kvm_vcpu_arch, pc), GPR_K1); in kvm_mips_build_enter_guest()
304 UASM_i_SW(&p, GPR_K0, offsetof(struct kvm_vcpu_arch, host_entryhi), in kvm_mips_build_enter_guest()
352 UASM_i_LW(&p, GPR_K0, offsetof(struct kvm_vcpu_arch, hi), GPR_K1); in kvm_mips_build_enter_guest()
355 UASM_i_LW(&p, GPR_K0, offsetof(struct kvm_vcpu_arch, lo), GPR_K1); in kvm_mips_build_enter_guest()
543 UASM_i_SW(&p, GPR_T0, offsetof(struct kvm_vcpu_arch, hi), GPR_K1); in kvm_mips_build_exit()
546 UASM_i_SW(&p, GPR_T0, offsetof(struct kvm_vcpu_arch, lo), GPR_K1); in kvm_mips_build_exit()
575 uasm_i_sw(&p, GPR_K0, offsetof(struct kvm_vcpu_arch, in kvm_mips_build_exit()
581 uasm_i_sw(&p, GPR_K0, offsetof(struct kvm_vcpu_arch, in kvm_mips_build_exit()
611 uasm_i_sw(&p, GPR_T0, offsetof(struct kvm_vcpu_arch, fpu.fcr31), in kvm_mips_build_exit()
647 offsetof(struct kvm_vcpu_arch, host_pgd), GPR_K1); in kvm_mips_build_exit()
[all …]
A Dvz.c1081 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_vz_gpsi_cache()
1216 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gpsi()
1298 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_trap_vz_handle_gsfc()
A Demulate.c45 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc()
/arch/loongarch/include/asm/
A Dkvm_host.h169 struct kvm_vcpu_arch { struct
266 static inline bool kvm_guest_has_fpu(struct kvm_vcpu_arch *arch) in kvm_guest_has_fpu() argument
271 static inline bool kvm_guest_has_lsx(struct kvm_vcpu_arch *arch) in kvm_guest_has_lsx()
276 static inline bool kvm_guest_has_lasx(struct kvm_vcpu_arch *arch) in kvm_guest_has_lasx()
281 static inline bool kvm_guest_has_lbt(struct kvm_vcpu_arch *arch) in kvm_guest_has_lbt()
286 static inline bool kvm_guest_has_pmu(struct kvm_vcpu_arch *arch) in kvm_guest_has_pmu()
291 static inline int kvm_get_pmu_num(struct kvm_vcpu_arch *arch) in kvm_get_pmu_num()
310 static inline void update_pc(struct kvm_vcpu_arch *arch) in update_pc()
322 static inline bool kvm_is_ifetch_fault(struct kvm_vcpu_arch *arch) in kvm_is_ifetch_fault()
/arch/loongarch/kernel/
A Dasm-offsets.c287 OFFSET(VCPU_FCC, kvm_vcpu_arch, fpu.fcc); in output_kvm_defines()
288 OFFSET(VCPU_FCSR0, kvm_vcpu_arch, fpu.fcsr); in output_kvm_defines()
296 OFFSET(KVM_ARCH_HSP, kvm_vcpu_arch, host_sp); in output_kvm_defines()
297 OFFSET(KVM_ARCH_HTP, kvm_vcpu_arch, host_tp); in output_kvm_defines()
298 OFFSET(KVM_ARCH_HPGD, kvm_vcpu_arch, host_pgd); in output_kvm_defines()
299 OFFSET(KVM_ARCH_KVMPGD, kvm_vcpu_arch, kvm_pgd); in output_kvm_defines()
303 OFFSET(KVM_ARCH_GPC, kvm_vcpu_arch, pc); in output_kvm_defines()
304 OFFSET(KVM_ARCH_GGPR, kvm_vcpu_arch, gprs); in output_kvm_defines()
305 OFFSET(KVM_ARCH_HBADI, kvm_vcpu_arch, badi); in output_kvm_defines()
306 OFFSET(KVM_ARCH_HBADV, kvm_vcpu_arch, badv); in output_kvm_defines()
[all …]
/arch/mips/include/asm/
A Dkvm_host.h290 struct kvm_vcpu_arch { struct
698 static inline bool kvm_mips_guest_has_fpu(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_fpu()
710 static inline bool kvm_mips_guest_has_msa(struct kvm_vcpu_arch *vcpu) in kvm_mips_guest_has_msa()
776 void __kvm_save_fpu(struct kvm_vcpu_arch *vcpu);
777 void __kvm_restore_fpu(struct kvm_vcpu_arch *vcpu);
778 void __kvm_restore_fcsr(struct kvm_vcpu_arch *vcpu);
779 void __kvm_save_msa(struct kvm_vcpu_arch *vcpu);
780 void __kvm_restore_msa(struct kvm_vcpu_arch *vcpu);
781 void __kvm_restore_msa_upper(struct kvm_vcpu_arch *vcpu);
782 void __kvm_restore_msacsr(struct kvm_vcpu_arch *vcpu);
[all …]
/arch/riscv/include/asm/
A Dkvm_host.h182 struct kvm_vcpu_arch { struct
302 void __kvm_riscv_switch_to(struct kvm_vcpu_arch *vcpu_arch);
A Dkvm_nacl.h15 struct kvm_vcpu_arch;
48 void __kvm_riscv_nacl_switch_to(struct kvm_vcpu_arch *vcpu_arch,
/arch/riscv/kvm/
A Dtlb.c193 struct kvm_vcpu_arch *varch = &vcpu->arch; in vcpu_hfence_dequeue()
218 struct kvm_vcpu_arch *varch = &vcpu->arch; in vcpu_hfence_enqueue()
A Dvcpu.c366 struct kvm_vcpu_arch *v = &vcpu->arch; in kvm_riscv_vcpu_sync_interrupts()
/arch/arm64/include/asm/
A Dkvm_host.h793 struct kvm_vcpu_arch { struct
1408 static inline void kvm_arm_pvtime_vcpu_init(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_pvtime_vcpu_init()
1413 static inline bool kvm_arm_is_pvtime_enabled(struct kvm_vcpu_arch *vcpu_arch) in kvm_arm_is_pvtime_enabled()
/arch/s390/include/asm/
A Dkvm_host.h414 struct kvm_vcpu_arch { struct
/arch/powerpc/include/asm/
A Dkvm_host.h528 struct kvm_vcpu_arch { struct
/arch/x86/include/asm/
A Dkvm_host.h779 struct kvm_vcpu_arch { struct
/arch/powerpc/kvm/
A Dbook3s_hv.c4196 struct kvm_vcpu_arch *arch; in kvmhv_get_l1_to_l2_cs_time_vcpu()
4211 struct kvm_vcpu_arch *arch; in kvmhv_get_l2_to_l1_cs_time_vcpu()
4226 struct kvm_vcpu_arch *arch; in kvmhv_get_l2_runtime_agg_vcpu()
A Dbook3s_pr.c516 struct kvm_vcpu_arch *a = &vcpu->arch; in kvmppc_set_msr_pr()
/arch/x86/kvm/
A Dx86.c3205 struct kvm_vcpu_arch *vcpu = &v->arch; in kvm_guest_time_update()

Completed in 79 milliseconds