Lines Matching refs:ghcb

37 static struct ghcb boot_ghcb_page __bss_decrypted __aligned(PAGE_SIZE);
43 static struct ghcb __initdata *boot_ghcb;
47 struct ghcb ghcb_page;
56 struct ghcb backup_ghcb;
83 struct ghcb *ghcb; member
169 static noinstr struct ghcb *__sev_get_ghcb(struct ghcb_state *state) in __sev_get_ghcb()
172 struct ghcb *ghcb; in __sev_get_ghcb() local
177 ghcb = &data->ghcb_page; in __sev_get_ghcb()
200 state->ghcb = &data->backup_ghcb; in __sev_get_ghcb()
203 *state->ghcb = *ghcb; in __sev_get_ghcb()
205 state->ghcb = NULL; in __sev_get_ghcb()
209 return ghcb; in __sev_get_ghcb()
450 static enum es_result vc_slow_virt_to_phys(struct ghcb *ghcb, struct es_em_ctxt *ctxt, in vc_slow_virt_to_phys() argument
491 struct ghcb *ghcb; in __sev_put_ghcb() local
496 ghcb = &data->ghcb_page; in __sev_put_ghcb()
498 if (state->ghcb) { in __sev_put_ghcb()
500 *ghcb = *state->ghcb; in __sev_put_ghcb()
502 state->ghcb = NULL; in __sev_put_ghcb()
508 vc_ghcb_invalidate(ghcb); in __sev_put_ghcb()
516 struct ghcb *ghcb; in __sev_es_nmi_complete() local
518 ghcb = __sev_get_ghcb(&state); in __sev_es_nmi_complete()
520 vc_ghcb_invalidate(ghcb); in __sev_es_nmi_complete()
521 ghcb_set_sw_exit_code(ghcb, SVM_VMGEXIT_NMI_COMPLETE); in __sev_es_nmi_complete()
522 ghcb_set_sw_exit_info_1(ghcb, 0); in __sev_es_nmi_complete()
523 ghcb_set_sw_exit_info_2(ghcb, 0); in __sev_es_nmi_complete()
525 sev_es_wr_ghcb_msr(__pa_nodebug(ghcb)); in __sev_es_nmi_complete()
535 struct ghcb *ghcb; in get_jump_table_addr() local
540 ghcb = __sev_get_ghcb(&state); in get_jump_table_addr()
542 vc_ghcb_invalidate(ghcb); in get_jump_table_addr()
543 ghcb_set_sw_exit_code(ghcb, SVM_VMGEXIT_AP_JUMP_TABLE); in get_jump_table_addr()
544 ghcb_set_sw_exit_info_1(ghcb, SVM_VMGEXIT_GET_AP_JUMP_TABLE); in get_jump_table_addr()
545 ghcb_set_sw_exit_info_2(ghcb, 0); in get_jump_table_addr()
547 sev_es_wr_ghcb_msr(__pa(ghcb)); in get_jump_table_addr()
550 if (ghcb_sw_exit_info_1_is_valid(ghcb) && in get_jump_table_addr()
551 ghcb_sw_exit_info_2_is_valid(ghcb)) in get_jump_table_addr()
552 ret = ghcb->save.sw_exit_info_2; in get_jump_table_addr()
626 static enum es_result vc_handle_msr(struct ghcb *ghcb, struct es_em_ctxt *ctxt) in vc_handle_msr() argument
635 ghcb_set_rcx(ghcb, regs->cx); in vc_handle_msr()
637 ghcb_set_rax(ghcb, regs->ax); in vc_handle_msr()
638 ghcb_set_rdx(ghcb, regs->dx); in vc_handle_msr()
641 ret = sev_es_ghcb_hv_call(ghcb, true, ctxt, SVM_EXIT_MSR, in vc_handle_msr()
645 regs->ax = ghcb->save.rax; in vc_handle_msr()
646 regs->dx = ghcb->save.rdx; in vc_handle_msr()
678 struct ghcb *ghcb; in sev_es_ap_hlt_loop() local
680 ghcb = __sev_get_ghcb(&state); in sev_es_ap_hlt_loop()
683 vc_ghcb_invalidate(ghcb); in sev_es_ap_hlt_loop()
684 ghcb_set_sw_exit_code(ghcb, SVM_VMGEXIT_AP_HLT_LOOP); in sev_es_ap_hlt_loop()
685 ghcb_set_sw_exit_info_1(ghcb, 0); in sev_es_ap_hlt_loop()
686 ghcb_set_sw_exit_info_2(ghcb, 0); in sev_es_ap_hlt_loop()
688 sev_es_wr_ghcb_msr(__pa(ghcb)); in sev_es_ap_hlt_loop()
692 if (ghcb_sw_exit_info_2_is_valid(ghcb) && in sev_es_ap_hlt_loop()
693 ghcb->save.sw_exit_info_2) in sev_es_ap_hlt_loop()
831 static enum es_result vc_do_mmio(struct ghcb *ghcb, struct es_em_ctxt *ctxt, in vc_do_mmio() argument
835 unsigned long ghcb_pa = __pa(ghcb); in vc_do_mmio()
846 res = vc_slow_virt_to_phys(ghcb, ctxt, (unsigned long)ref, &paddr); in vc_do_mmio()
858 ghcb_set_sw_scratch(ghcb, ghcb_pa + offsetof(struct ghcb, shared_buffer)); in vc_do_mmio()
860 return sev_es_ghcb_hv_call(ghcb, true, ctxt, exit_code, exit_info_1, exit_info_2); in vc_do_mmio()
863 static enum es_result vc_handle_mmio_twobyte_ops(struct ghcb *ghcb, in vc_handle_mmio_twobyte_ops() argument
881 ret = vc_do_mmio(ghcb, ctxt, bytes, true); in vc_handle_mmio_twobyte_ops()
892 memcpy(reg_data, ghcb->shared_buffer, bytes); in vc_handle_mmio_twobyte_ops()
903 ret = vc_do_mmio(ghcb, ctxt, bytes, true); in vc_handle_mmio_twobyte_ops()
913 u8 *val = (u8 *)ghcb->shared_buffer; in vc_handle_mmio_twobyte_ops()
917 u16 *val = (u16 *)ghcb->shared_buffer; in vc_handle_mmio_twobyte_ops()
923 memcpy(reg_data, ghcb->shared_buffer, bytes); in vc_handle_mmio_twobyte_ops()
1000 static enum es_result vc_handle_mmio(struct ghcb *ghcb, in vc_handle_mmio() argument
1021 memcpy(ghcb->shared_buffer, reg_data, bytes); in vc_handle_mmio()
1023 ret = vc_do_mmio(ghcb, ctxt, bytes, false); in vc_handle_mmio()
1033 memcpy(ghcb->shared_buffer, insn->immediate1.bytes, bytes); in vc_handle_mmio()
1035 ret = vc_do_mmio(ghcb, ctxt, bytes, false); in vc_handle_mmio()
1046 ret = vc_do_mmio(ghcb, ctxt, bytes, true); in vc_handle_mmio()
1058 memcpy(reg_data, ghcb->shared_buffer, bytes); in vc_handle_mmio()
1073 ret = vc_handle_mmio_twobyte_ops(ghcb, ctxt); in vc_handle_mmio()
1082 static enum es_result vc_handle_dr7_write(struct ghcb *ghcb, in vc_handle_dr7_write() argument
1109 ghcb_set_rax(ghcb, val); in vc_handle_dr7_write()
1110 ret = sev_es_ghcb_hv_call(ghcb, true, ctxt, SVM_EXIT_WRITE_DR7, 0, 0); in vc_handle_dr7_write()
1120 static enum es_result vc_handle_dr7_read(struct ghcb *ghcb, in vc_handle_dr7_read() argument
1137 static enum es_result vc_handle_wbinvd(struct ghcb *ghcb, in vc_handle_wbinvd() argument
1140 return sev_es_ghcb_hv_call(ghcb, true, ctxt, SVM_EXIT_WBINVD, 0, 0); in vc_handle_wbinvd()
1143 static enum es_result vc_handle_rdpmc(struct ghcb *ghcb, struct es_em_ctxt *ctxt) in vc_handle_rdpmc() argument
1147 ghcb_set_rcx(ghcb, ctxt->regs->cx); in vc_handle_rdpmc()
1149 ret = sev_es_ghcb_hv_call(ghcb, true, ctxt, SVM_EXIT_RDPMC, 0, 0); in vc_handle_rdpmc()
1153 if (!(ghcb_rax_is_valid(ghcb) && ghcb_rdx_is_valid(ghcb))) in vc_handle_rdpmc()
1156 ctxt->regs->ax = ghcb->save.rax; in vc_handle_rdpmc()
1157 ctxt->regs->dx = ghcb->save.rdx; in vc_handle_rdpmc()
1162 static enum es_result vc_handle_monitor(struct ghcb *ghcb, in vc_handle_monitor() argument
1172 static enum es_result vc_handle_mwait(struct ghcb *ghcb, in vc_handle_mwait() argument
1179 static enum es_result vc_handle_vmmcall(struct ghcb *ghcb, in vc_handle_vmmcall() argument
1184 ghcb_set_rax(ghcb, ctxt->regs->ax); in vc_handle_vmmcall()
1185 ghcb_set_cpl(ghcb, user_mode(ctxt->regs) ? 3 : 0); in vc_handle_vmmcall()
1188 x86_platform.hyper.sev_es_hcall_prepare(ghcb, ctxt->regs); in vc_handle_vmmcall()
1190 ret = sev_es_ghcb_hv_call(ghcb, true, ctxt, SVM_EXIT_VMMCALL, 0, 0); in vc_handle_vmmcall()
1194 if (!ghcb_rax_is_valid(ghcb)) in vc_handle_vmmcall()
1197 ctxt->regs->ax = ghcb->save.rax; in vc_handle_vmmcall()
1205 !x86_platform.hyper.sev_es_hcall_finish(ghcb, ctxt->regs)) in vc_handle_vmmcall()
1211 static enum es_result vc_handle_trap_ac(struct ghcb *ghcb, in vc_handle_trap_ac() argument
1225 struct ghcb *ghcb, in vc_handle_exitcode() argument
1232 result = vc_handle_dr7_read(ghcb, ctxt); in vc_handle_exitcode()
1235 result = vc_handle_dr7_write(ghcb, ctxt); in vc_handle_exitcode()
1238 result = vc_handle_trap_ac(ghcb, ctxt); in vc_handle_exitcode()
1242 result = vc_handle_rdtsc(ghcb, ctxt, exit_code); in vc_handle_exitcode()
1245 result = vc_handle_rdpmc(ghcb, ctxt); in vc_handle_exitcode()
1252 result = vc_handle_cpuid(ghcb, ctxt); in vc_handle_exitcode()
1255 result = vc_handle_ioio(ghcb, ctxt); in vc_handle_exitcode()
1258 result = vc_handle_msr(ghcb, ctxt); in vc_handle_exitcode()
1261 result = vc_handle_vmmcall(ghcb, ctxt); in vc_handle_exitcode()
1264 result = vc_handle_wbinvd(ghcb, ctxt); in vc_handle_exitcode()
1267 result = vc_handle_monitor(ghcb, ctxt); in vc_handle_exitcode()
1270 result = vc_handle_mwait(ghcb, ctxt); in vc_handle_exitcode()
1273 result = vc_handle_mmio(ghcb, ctxt); in vc_handle_exitcode()
1337 struct ghcb *ghcb; in vc_raw_handle_exception() local
1340 ghcb = __sev_get_ghcb(&state); in vc_raw_handle_exception()
1342 vc_ghcb_invalidate(ghcb); in vc_raw_handle_exception()
1346 result = vc_handle_exitcode(&ctxt, ghcb, error_code); in vc_raw_handle_exception()