Home
last modified time | relevance | path

Searched refs:vcpu_cpsr (Results 1 – 14 of 14) sorted by relevance

/linux/arch/arm64/kvm/
A Ddebug.c50 (*vcpu_cpsr(vcpu) & DBG_SPSR_SS); in save_guest_debug_regs()
63 *vcpu_cpsr(vcpu) |= DBG_SPSR_SS; in restore_guest_debug_regs()
65 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in restore_guest_debug_regs()
212 *vcpu_cpsr(vcpu) |= DBG_SPSR_SS; in kvm_arm_setup_debug()
214 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_arm_setup_debug()
225 trace_kvm_arm_set_dreg32("SPSR_EL2", *vcpu_cpsr(vcpu)); in kvm_arm_setup_debug()
290 if (!(*vcpu_cpsr(vcpu) & DBG_SPSR_SS)) in kvm_arm_clear_debug()
A Dinject_fault.c32 switch(*vcpu_cpsr(vcpu) & PSR_MODE_MASK) { in pend_sync_exception()
59 unsigned long cpsr = *vcpu_cpsr(vcpu); in inject_abt64()
A Dtrace_arm.h379 __entry->spsr_el2 = *vcpu_cpsr(vcpu);
381 __entry->source_mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT);
A Dguest.c277 if (*vcpu_cpsr(vcpu) & PSR_MODE32_BIT) { in set_core_reg()
280 switch (*vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK) { in set_core_reg()
A Dat.c458 write_sysreg_s(*vcpu_cpsr(vcpu) & PSTATE_PAN, SYS_PSTATE_PAN); in at_s1e1p_fast()
815 pan = *vcpu_cpsr(vcpu) & PSR_PAN_BIT; in handle_at_slow()
A Demulate-nested.c2337 spsr = *vcpu_cpsr(vcpu); in kvm_check_illegal_exception_return()
2392 *vcpu_cpsr(vcpu) = spsr; in kvm_emulate_nested_eret()
2439 pstate = *vcpu_cpsr(vcpu); in kvm_inject_nested()
A Dsys_regs.c3800 cp, *vcpu_pc(vcpu), *vcpu_cpsr(vcpu)); in unhandled_cp_access()
4055 *vcpu_pc(vcpu), *vcpu_cpsr(vcpu)); in emulate_sys_reg()
/linux/arch/arm64/kvm/hyp/
A Daarch32.c76 cpsr = *vcpu_cpsr(vcpu); in kvm_condition_valid32()
113 unsigned long cpsr = *vcpu_cpsr(vcpu); in kvm_adjust_itstate()
133 *vcpu_cpsr(vcpu) = cpsr; in kvm_adjust_itstate()
145 is_thumb = !!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT); in kvm_skip_instr32()
A Dexception.c97 mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT); in enter_exception64()
126 old = *vcpu_cpsr(vcpu); in enter_exception64()
170 *vcpu_cpsr(vcpu) = new; in enter_exception64()
197 old = *vcpu_cpsr(vcpu); in get_except32_cpsr()
288 unsigned long spsr = *vcpu_cpsr(vcpu); in enter_exception32()
293 *vcpu_cpsr(vcpu) = get_except32_cpsr(vcpu, mode); in enter_exception32()
/linux/arch/arm64/kvm/hyp/include/hyp/
A Dadjust_pc.h22 *vcpu_cpsr(vcpu) &= ~PSR_BTYPE_MASK; in kvm_skip_instr()
26 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_skip_instr()
A Dswitch.h308 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_hyp_handle_mops()
309 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in kvm_hyp_handle_mops()
651 *vcpu_cpsr(vcpu) & DBG_SPSR_SS && in synchronize_vcpu_pstate()
653 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in synchronize_vcpu_pstate()
/linux/arch/arm64/include/asm/
A Dkvm_emulate.h130 static __always_inline unsigned long *vcpu_cpsr(const struct kvm_vcpu *vcpu) in vcpu_cpsr() function
137 return !!(*vcpu_cpsr(vcpu) & PSR_MODE32_BIT); in vcpu_mode_is_32bit()
150 *vcpu_cpsr(vcpu) |= PSR_AA32_T_BIT; in vcpu_set_thumb()
265 mode = *vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK; in vcpu_mode_priv()
269 mode = *vcpu_cpsr(vcpu) & PSR_MODE_MASK; in vcpu_mode_priv()
453 *vcpu_cpsr(vcpu) |= PSR_AA32_E_BIT; in kvm_vcpu_set_be()
464 return !!(*vcpu_cpsr(vcpu) & PSR_AA32_E_BIT); in kvm_vcpu_is_be()
/linux/arch/arm64/kvm/hyp/vhe/
A Dswitch.c446 u64 mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT); in early_exit_filter()
457 *vcpu_cpsr(vcpu) &= ~(PSR_MODE_MASK | PSR_MODE32_BIT); in early_exit_filter()
458 *vcpu_cpsr(vcpu) |= mode; in early_exit_filter()
/linux/arch/arm64/kvm/hyp/nvhe/
A Dsys_regs.c40 *vcpu_cpsr(vcpu) = read_sysreg_el2(SYS_SPSR); in inject_undef64()
49 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in inject_undef64()

Completed in 707 milliseconds