Lines Matching refs:tss
2908 struct tss32 tss; in hvm_task_switch() local
2955 if ( tr.limit < (sizeof(tss)-1) ) in hvm_task_switch()
2962 &tss, prev_tr.base, sizeof(tss), PFEC_page_present, &pfinfo); in hvm_task_switch()
2972 tss.eip = regs->eip; in hvm_task_switch()
2973 tss.eflags = eflags; in hvm_task_switch()
2974 tss.eax = regs->eax; in hvm_task_switch()
2975 tss.ecx = regs->ecx; in hvm_task_switch()
2976 tss.edx = regs->edx; in hvm_task_switch()
2977 tss.ebx = regs->ebx; in hvm_task_switch()
2978 tss.esp = regs->esp; in hvm_task_switch()
2979 tss.ebp = regs->ebp; in hvm_task_switch()
2980 tss.esi = regs->esi; in hvm_task_switch()
2981 tss.edi = regs->edi; in hvm_task_switch()
2984 tss.es = segr.sel; in hvm_task_switch()
2986 tss.cs = segr.sel; in hvm_task_switch()
2988 tss.ss = segr.sel; in hvm_task_switch()
2990 tss.ds = segr.sel; in hvm_task_switch()
2992 tss.fs = segr.sel; in hvm_task_switch()
2994 tss.gs = segr.sel; in hvm_task_switch()
2996 tss.ldt = segr.sel; in hvm_task_switch()
2998 rc = hvm_copy_to_guest_linear(prev_tr.base + offsetof(typeof(tss), eip), in hvm_task_switch()
2999 &tss.eip, in hvm_task_switch()
3000 offsetof(typeof(tss), trace) - in hvm_task_switch()
3001 offsetof(typeof(tss), eip), in hvm_task_switch()
3009 &tss, tr.base, sizeof(tss), PFEC_page_present, &pfinfo); in hvm_task_switch()
3019 new_cpl = tss.eflags & X86_EFLAGS_VM ? 3 : tss.cs & 3; in hvm_task_switch()
3021 if ( hvm_load_segment_selector(x86_seg_ldtr, tss.ldt, new_cpl, 0) ) in hvm_task_switch()
3024 rc = hvm_set_cr3(tss.cr3, 1); in hvm_task_switch()
3030 regs->rip = tss.eip; in hvm_task_switch()
3031 regs->rflags = tss.eflags | X86_EFLAGS_MBS; in hvm_task_switch()
3032 regs->rax = tss.eax; in hvm_task_switch()
3033 regs->rcx = tss.ecx; in hvm_task_switch()
3034 regs->rdx = tss.edx; in hvm_task_switch()
3035 regs->rbx = tss.ebx; in hvm_task_switch()
3036 regs->rsp = tss.esp; in hvm_task_switch()
3037 regs->rbp = tss.ebp; in hvm_task_switch()
3038 regs->rsi = tss.esi; in hvm_task_switch()
3039 regs->rdi = tss.edi; in hvm_task_switch()
3042 if ( hvm_load_segment_selector(x86_seg_es, tss.es, new_cpl, tss.eflags) || in hvm_task_switch()
3043 hvm_load_segment_selector(x86_seg_cs, tss.cs, new_cpl, tss.eflags) || in hvm_task_switch()
3044 hvm_load_segment_selector(x86_seg_ss, tss.ss, new_cpl, tss.eflags) || in hvm_task_switch()
3045 hvm_load_segment_selector(x86_seg_ds, tss.ds, new_cpl, tss.eflags) || in hvm_task_switch()
3046 hvm_load_segment_selector(x86_seg_fs, tss.fs, new_cpl, tss.eflags) || in hvm_task_switch()
3047 hvm_load_segment_selector(x86_seg_gs, tss.gs, new_cpl, tss.eflags) ) in hvm_task_switch()
3053 tss.back_link = prev_tr.sel; in hvm_task_switch()
3055 rc = hvm_copy_to_guest_linear(tr.base + offsetof(typeof(tss), back_link), in hvm_task_switch()
3056 &tss.back_link, sizeof(tss.back_link), 0, in hvm_task_switch()
3109 if ( (tss.trace & 1) && !exn_raised ) in hvm_task_switch()