Searched refs:X86_CR0_TS (Results 1 – 15 of 15) sorted by relevance
57 v->arch.pv_vcpu.ctrlreg[0] |= X86_CR0_TS; in do_fpu_taskswitch()62 v->arch.pv_vcpu.ctrlreg[0] &= ~X86_CR0_TS; in do_fpu_taskswitch()
708 *val = (read_cr0() & ~X86_CR0_TS) | curr->arch.pv_vcpu.ctrlreg[0]; in read_cr()752 if ( (val ^ read_cr0()) & ~X86_CR0_TS ) in write_cr()758 do_fpu_taskswitch(!!(val & X86_CR0_TS)); in write_cr()
201 if ( !(n1cr0 & X86_CR0_TS) && (n1vmcb->_cr0 & X86_CR0_TS) ) { in nestedsvm_fpu_vmentry()205 vcr0 |= X86_CR0_TS; in nestedsvm_fpu_vmentry()207 } else if ( !(vcr0 & X86_CR0_TS) && (n2vmcb->_cr0 & X86_CR0_TS) ) { in nestedsvm_fpu_vmentry()210 vcr0 &= ~X86_CR0_TS; in nestedsvm_fpu_vmentry()220 if ( !(guest_cr0 & X86_CR0_TS) && (n2vmcb->_cr0 & X86_CR0_TS) ) { in nestedsvm_fpu_vmexit()223 n1vmcb->_cr0 |= X86_CR0_TS; in nestedsvm_fpu_vmexit()225 } else if ( !(n1cr0 & X86_CR0_TS) && (n1vmcb->_cr0 & X86_CR0_TS) ) { in nestedsvm_fpu_vmexit()228 n1vmcb->_cr0 &= ~X86_CR0_TS; in nestedsvm_fpu_vmexit()
478 ASSERT(read_cr0() & X86_CR0_TS); in svm_fpu_leave()486 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in svm_fpu_leave()491 vmcb_set_cr0(n1vmcb, vmcb_get_cr0(n1vmcb) | X86_CR0_TS); in svm_fpu_leave()546 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in svm_update_guest_cr()549 hw_cr0_mask |= X86_CR0_TS; in svm_update_guest_cr()550 else if ( vmcb_get_cr0(vmcb) & X86_CR0_TS ) in svm_update_guest_cr()1767 if ( v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS ) in svm_fpu_dirty_intercept()1770 vmcb_set_cr0(n1vmcb, vmcb_get_cr0(n1vmcb) & ~X86_CR0_TS); in svm_fpu_dirty_intercept()1774 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in svm_fpu_dirty_intercept()1775 vmcb_set_cr0(vmcb, vmcb_get_cr0(vmcb) & ~X86_CR0_TS); in svm_fpu_dirty_intercept()
36 #define X86_CR0_TS 0x00000008 /* Task Switched (RW) */ macro
315 write_cr0(X86_CR0_TS|read_cr0()); in stts()
734 cr0 &= ~X86_CR0_TS; in handle_xsetbv()737 if ( cr0 & X86_CR0_TS ) in handle_xsetbv()771 if ( cr0 & X86_CR0_TS ) in read_bndcfgu()798 if ( cr0 & X86_CR0_TS ) in xstate_set_init()
1719 if ( curr->arch.pv_vcpu.ctrlreg[0] & X86_CR0_TS ) in do_device_not_available()1722 curr->arch.pv_vcpu.ctrlreg[0] &= ~X86_CR0_TS; in do_device_not_available()
956 v->arch.pv_vcpu.ctrlreg[0] &= X86_CR0_TS; in arch_set_info_guest()957 v->arch.pv_vcpu.ctrlreg[0] |= read_cr0() & ~X86_CR0_TS; in arch_set_info_guest()
57 andl $~(X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax
970 v->arch.hvm_vmx.host_cr0 &= ~X86_CR0_TS; in vmx_fpu_enter()977 ASSERT(read_cr0() & X86_CR0_TS); in vmx_fpu_leave()979 if ( !(v->arch.hvm_vmx.host_cr0 & X86_CR0_TS) ) in vmx_fpu_leave()981 v->arch.hvm_vmx.host_cr0 |= X86_CR0_TS; in vmx_fpu_leave()991 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in vmx_fpu_leave()993 v->arch.hvm_vcpu.hw_cr[0] |= X86_CR0_TS; in vmx_fpu_leave()1581 if ( !(v->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in vmx_update_guest_cr()1584 hw_cr0_mask |= X86_CR0_TS; in vmx_update_guest_cr()2562 if ( !(curr->arch.hvm_vcpu.guest_cr[0] & X86_CR0_TS) ) in vmx_fpu_dirty_intercept()2564 curr->arch.hvm_vcpu.hw_cr[0] &= ~X86_CR0_TS; in vmx_fpu_dirty_intercept()[all …]
2534 if ( cr0_gh_mask & X86_CR0_TS ) in nvmx_n2_vmexit_handler()2540 set_vvmcs(v, GUEST_CR0, (guest_cr0 & ~X86_CR0_TS)); in nvmx_n2_vmexit_handler()2549 old_val &= X86_CR0_PE|X86_CR0_MP|X86_CR0_EM|X86_CR0_TS; in nvmx_n2_vmexit_handler()2551 (X86_CR0_PE|X86_CR0_MP|X86_CR0_EM|X86_CR0_TS); in nvmx_n2_vmexit_handler()
1149 v->arch.hvm_vmx.host_cr0 = read_cr0() | X86_CR0_TS; in construct_vmcs()
926 X86_CR0_TS | X86_CR0_ET | X86_CR0_NE | \3070 v->arch.hvm_vcpu.guest_cr[0] |= X86_CR0_TS; in hvm_task_switch()
1067 generate_exception_if((cr0 & X86_CR0_TS) && in _get_fpu()5387 (rc = ops->write_cr(0, dst.val & ~X86_CR0_TS, ctxt)) != X86EMUL_OKAY ) in x86_emulate()
Completed in 54 milliseconds