Lines Matching refs:ctxt

560         if ( hvmemul_ctxt->ctxt.addr_size < 64 )  in hvmemul_map_linear_addr()
576 x86_emul_pagefault(pfinfo.ec, pfinfo.linear, &hvmemul_ctxt->ctxt); in hvmemul_map_linear_addr()
697 reverse = (hvmemul_ctxt->ctxt.regs->eflags & X86_EFLAGS_DF) && (*reps > 1); in hvmemul_linear_to_phys()
715 x86_emul_pagefault(pfec, addr, &hvmemul_ctxt->ctxt); in hvmemul_linear_to_phys()
740 x86_emul_pagefault(pfec, addr & PAGE_MASK, &hvmemul_ctxt->ctxt); in hvmemul_linear_to_phys()
795 if ( (hvmemul_ctxt->ctxt.regs->eflags & X86_EFLAGS_DF) && (*reps > 1) ) in hvmemul_virtual_to_linear()
807 if ( hvmemul_ctxt->ctxt.addr_size != 64 ) in hvmemul_virtual_to_linear()
833 : TRAP_gp_fault, 0, &hvmemul_ctxt->ctxt); in hvmemul_virtual_to_linear()
1064 x86_emul_pagefault(pfinfo.ec, pfinfo.linear, &hvmemul_ctxt->ctxt); in __hvmemul_read()
1086 struct x86_emulate_ctxt *ctxt) in hvmemul_read() argument
1089 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_read()
1096 container_of(ctxt, struct hvm_emulate_ctxt, ctxt)); in hvmemul_read()
1104 struct x86_emulate_ctxt *ctxt) in hvmemul_insn_fetch() argument
1107 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_insn_fetch()
1151 struct x86_emulate_ctxt *ctxt) in hvmemul_write() argument
1154 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_write()
1195 struct x86_emulate_ctxt *ctxt) in hvmemul_write_discard() argument
1207 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_ins_discard() argument
1219 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_movs_discard() argument
1230 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_stos_discard() argument
1241 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_outs_discard() argument
1252 struct x86_emulate_ctxt *ctxt) in hvmemul_cmpxchg_discard() argument
1261 struct x86_emulate_ctxt *ctxt) in hvmemul_read_io_discard() argument
1270 struct x86_emulate_ctxt *ctxt) in hvmemul_write_io_discard() argument
1278 struct x86_emulate_ctxt *ctxt) in hvmemul_write_msr_discard() argument
1284 struct x86_emulate_ctxt *ctxt) in hvmemul_wbinvd_discard() argument
1295 struct x86_emulate_ctxt *ctxt) in hvmemul_cmpxchg() argument
1298 return hvmemul_write(seg, offset, p_new, bytes, ctxt); in hvmemul_cmpxchg()
1303 struct x86_emulate_ctxt *ctxt) in hvmemul_validate() argument
1306 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_validate()
1308 return !hvmemul_ctxt->validate || hvmemul_ctxt->validate(state, ctxt) in hvmemul_validate()
1318 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_ins() argument
1321 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_ins()
1347 !!(ctxt->regs->eflags & X86_EFLAGS_DF), gpa); in hvmemul_rep_ins()
1356 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_outs_set_context() argument
1383 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_outs() argument
1386 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_outs()
1395 bytes_per_rep, reps, ctxt); in hvmemul_rep_outs()
1416 !!(ctxt->regs->eflags & X86_EFLAGS_DF), gpa); in hvmemul_rep_outs()
1426 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_movs() argument
1429 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_movs()
1435 int rc, df = !!(ctxt->regs->eflags & X86_EFLAGS_DF); in hvmemul_rep_movs()
1581 struct x86_emulate_ctxt *ctxt) in hvmemul_rep_stos() argument
1584 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_rep_stos()
1589 bool_t df = !!(ctxt->regs->eflags & X86_EFLAGS_DF); in hvmemul_rep_stos()
1698 struct x86_emulate_ctxt *ctxt) in hvmemul_read_segment() argument
1701 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_read_segment()
1715 struct x86_emulate_ctxt *ctxt) in hvmemul_write_segment() argument
1718 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_write_segment()
1735 struct x86_emulate_ctxt *ctxt) in hvmemul_read_io() argument
1738 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_read_io()
1752 struct x86_emulate_ctxt *ctxt) in hvmemul_write_io() argument
1760 struct x86_emulate_ctxt *ctxt) in hvmemul_read_cr() argument
1781 struct x86_emulate_ctxt *ctxt) in hvmemul_write_cr() argument
1811 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in hvmemul_write_cr()
1819 struct x86_emulate_ctxt *ctxt) in hvmemul_read_msr() argument
1824 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in hvmemul_read_msr()
1832 struct x86_emulate_ctxt *ctxt) in hvmemul_write_msr() argument
1837 x86_emul_hw_exception(TRAP_gp_fault, 0, ctxt); in hvmemul_write_msr()
1843 struct x86_emulate_ctxt *ctxt) in hvmemul_wbinvd() argument
1850 struct cpuid_leaf *res, struct x86_emulate_ctxt *ctxt) in hvmemul_cpuid() argument
1860 struct x86_emulate_ctxt *ctxt) in hvmemul_get_fpu() argument
1919 struct x86_emulate_ctxt *ctxt, in hvmemul_put_fpu() argument
2003 struct x86_emulate_ctxt *ctxt) in hvmemul_invlpg() argument
2006 container_of(ctxt, struct hvm_emulate_ctxt, ctxt); in hvmemul_invlpg()
2023 x86_emul_reset_event(ctxt); in hvmemul_invlpg()
2034 struct x86_emulate_ctxt *ctxt) in hvmemul_vmfunc() argument
2040 rc = hvm_funcs.altp2m_vcpu_emulate_vmfunc(ctxt->regs); in hvmemul_vmfunc()
2042 x86_emul_hw_exception(TRAP_invalid_op, X86_EVENT_NO_EC, ctxt); in hvmemul_vmfunc()
2101 const struct cpu_user_regs *regs = hvmemul_ctxt->ctxt.regs; in _hvm_emulate_one()
2112 switch ( rc = x86_emulate(&hvmemul_ctxt->ctxt, ops) ) in _hvm_emulate_one()
2130 if ( hvmemul_ctxt->ctxt.retire.singlestep ) in _hvm_emulate_one()
2136 if ( hvmemul_ctxt->ctxt.retire.mov_ss ) in _hvm_emulate_one()
2142 if ( hvmemul_ctxt->ctxt.retire.sti ) in _hvm_emulate_one()
2148 if ( hvmemul_ctxt->ctxt.retire.unblock_nmi ) in _hvm_emulate_one()
2157 if ( hvmemul_ctxt->ctxt.retire.hlt && in _hvm_emulate_one()
2187 struct hvm_emulate_ctxt ctxt; in hvm_emulate_one_mmio() local
2201 hvm_emulate_init_once(&ctxt, x86_insn_is_mem_write, in hvm_emulate_one_mmio()
2203 ctxt.ctxt.data = &mmio_ro_ctxt; in hvm_emulate_one_mmio()
2204 rc = _hvm_emulate_one(&ctxt, ops); in hvm_emulate_one_mmio()
2209 hvm_dump_emulation_state(XENLOG_G_WARNING, "MMCFG", &ctxt, rc); in hvm_emulate_one_mmio()
2212 hvm_inject_event(&ctxt.ctxt.event); in hvm_emulate_one_mmio()
2215 hvm_emulate_writeback(&ctxt); in hvm_emulate_one_mmio()
2275 hvm_inject_event(&ctx.ctxt.event); in hvm_emulate_one_vm_event()
2296 hvmemul_ctxt->ctxt.regs = regs; in hvm_emulate_init_once()
2297 hvmemul_ctxt->ctxt.vendor = curr->domain->arch.cpuid->x86_vendor; in hvm_emulate_init_once()
2298 hvmemul_ctxt->ctxt.force_writeback = true; in hvm_emulate_init_once()
2310 hvmemul_ctxt->ctxt.lma = hvm_long_mode_active(curr); in hvm_emulate_init_per_insn()
2312 if ( hvmemul_ctxt->ctxt.lma && in hvm_emulate_init_per_insn()
2314 hvmemul_ctxt->ctxt.addr_size = hvmemul_ctxt->ctxt.sp_size = 64; in hvm_emulate_init_per_insn()
2317 hvmemul_ctxt->ctxt.addr_size = in hvm_emulate_init_per_insn()
2319 hvmemul_ctxt->ctxt.sp_size = in hvm_emulate_init_per_insn()
2326 hvmemul_ctxt->insn_buf_eip = hvmemul_ctxt->ctxt.regs->rip; in hvm_emulate_init_per_insn()