Home
last modified time | relevance | path

Searched refs:vcpu_cpsr (Results 1 – 15 of 15) sorted by relevance

/arch/arm64/kvm/hyp/
A Daarch32.c76 cpsr = *vcpu_cpsr(vcpu); in kvm_condition_valid32()
113 unsigned long cpsr = *vcpu_cpsr(vcpu); in kvm_adjust_itstate()
133 *vcpu_cpsr(vcpu) = cpsr; in kvm_adjust_itstate()
145 is_thumb = !!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT); in kvm_skip_instr32()
A Dexception.c99 mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT); in enter_exception64()
128 old = *vcpu_cpsr(vcpu); in enter_exception64()
172 *vcpu_cpsr(vcpu) = new; in enter_exception64()
199 old = *vcpu_cpsr(vcpu); in get_except32_cpsr()
290 unsigned long spsr = *vcpu_cpsr(vcpu); in enter_exception32()
295 *vcpu_cpsr(vcpu) = get_except32_cpsr(vcpu, mode); in enter_exception32()
/arch/arm64/kvm/
A Ddebug.c163 if (*vcpu_cpsr(vcpu) & DBG_SPSR_SS) in kvm_vcpu_load_debug()
169 *vcpu_cpsr(vcpu) |= DBG_SPSR_SS; in kvm_vcpu_load_debug()
171 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_vcpu_load_debug()
194 if (!(*vcpu_cpsr(vcpu) & DBG_SPSR_SS)) in kvm_vcpu_put_debug()
200 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_vcpu_put_debug()
202 *vcpu_cpsr(vcpu) |= DBG_SPSR_SS; in kvm_vcpu_put_debug()
A Dinject_fault.c30 switch(*vcpu_cpsr(vcpu) & PSR_MODE_MASK) { in exception_target_el()
107 unsigned long cpsr = *vcpu_cpsr(vcpu); in inject_abt64()
218 return (*vcpu_cpsr(vcpu) & PSR_A_BIT) && in kvm_sea_target_is_el2()
275 return (*vcpu_cpsr(vcpu) & PSR_A_BIT) && !effective_sctlr2_nmea(vcpu); in serror_is_masked()
293 return *vcpu_cpsr(vcpu) & PSR_A_BIT; in kvm_serror_target_is_el2()
A Dtrace_arm.h379 __entry->spsr_el2 = *vcpu_cpsr(vcpu);
381 __entry->source_mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT);
A Dhandle_exit.c204 *vcpu_cpsr(vcpu) |= DBG_SPSR_SS; in kvm_handle_guest_debug()
A Dguest.c277 if (*vcpu_cpsr(vcpu) & PSR_MODE32_BIT) { in set_core_reg()
280 switch (*vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK) { in set_core_reg()
A Dat.c525 write_sysreg_s(*vcpu_cpsr(vcpu) & PSTATE_PAN, SYS_PSTATE_PAN); in at_s1e1p_fast()
1141 (*vcpu_cpsr(vcpu) & PSR_PAN_BIT); in handle_at_slow()
A Demulate-nested.c2657 spsr = *vcpu_cpsr(vcpu); in kvm_check_illegal_exception_return()
2706 *vcpu_cpsr(vcpu) = spsr; in kvm_emulate_nested_eret()
2760 pstate = *vcpu_cpsr(vcpu); in kvm_inject_nested()
A Dsys_regs.c4463 cp, *vcpu_pc(vcpu), *vcpu_cpsr(vcpu)); in unhandled_cp_access()
4722 *vcpu_pc(vcpu), *vcpu_cpsr(vcpu)); in emulate_sys_reg()
/arch/arm64/kvm/hyp/include/hyp/
A Dadjust_pc.h22 *vcpu_cpsr(vcpu) &= ~PSR_BTYPE_MASK; in kvm_skip_instr()
26 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_skip_instr()
A Dswitch.h547 *vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS; in kvm_hyp_handle_mops()
548 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in kvm_hyp_handle_mops()
987 *vcpu_cpsr(vcpu) & DBG_SPSR_SS && in synchronize_vcpu_pstate()
989 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in synchronize_vcpu_pstate()
/arch/arm64/include/asm/
A Dkvm_emulate.h153 static __always_inline unsigned long *vcpu_cpsr(const struct kvm_vcpu *vcpu) in vcpu_cpsr() function
160 return !!(*vcpu_cpsr(vcpu) & PSR_MODE32_BIT); in vcpu_mode_is_32bit()
173 *vcpu_cpsr(vcpu) |= PSR_AA32_T_BIT; in vcpu_set_thumb()
306 mode = *vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK; in vcpu_mode_priv()
310 mode = *vcpu_cpsr(vcpu) & PSR_MODE_MASK; in vcpu_mode_priv()
512 *vcpu_cpsr(vcpu) |= PSR_AA32_E_BIT; in kvm_vcpu_set_be()
523 return !!(*vcpu_cpsr(vcpu) & PSR_AA32_E_BIT); in kvm_vcpu_is_be()
/arch/arm64/kvm/hyp/vhe/
A Dswitch.c539 u64 mode = *vcpu_cpsr(vcpu) & (PSR_MODE_MASK | PSR_MODE32_BIT); in fixup_guest_exit()
550 *vcpu_cpsr(vcpu) &= ~(PSR_MODE_MASK | PSR_MODE32_BIT); in fixup_guest_exit()
551 *vcpu_cpsr(vcpu) |= mode; in fixup_guest_exit()
/arch/arm64/kvm/hyp/nvhe/
A Dsys_regs.c255 *vcpu_cpsr(vcpu) = read_sysreg_el2(SYS_SPSR); in inject_undef64()
264 write_sysreg_el2(*vcpu_cpsr(vcpu), SYS_SPSR); in inject_undef64()

Completed in 49 milliseconds