Lines Matching refs:static_call
836 if (static_call(kvm_x86_get_cpl)(vcpu) <= required_cpl) in kvm_require_cpl()
957 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in kvm_set_cr0()
971 static_call(kvm_x86_set_cr0)(vcpu, cr0); in kvm_set_cr0()
1093 if (static_call(kvm_x86_get_cpl)(vcpu) != 0 || in kvm_emulate_xsetbv()
1118 static_call(kvm_x86_is_valid_cr4)(vcpu, cr4); in kvm_is_valid_cr4()
1189 static_call(kvm_x86_set_cr4)(vcpu, cr4); in kvm_set_cr4()
1330 static_call(kvm_x86_set_dr7)(vcpu, dr7); in kvm_update_dr7()
1667 return static_call(kvm_x86_get_msr_feature)(msr); in kvm_get_msr_feature()
1746 r = static_call(kvm_x86_set_efer)(vcpu, efer); in set_efer()
1872 return static_call(kvm_x86_set_msr)(vcpu, &msr); in __kvm_set_msr()
1914 ret = static_call(kvm_x86_get_msr)(vcpu, &msr); in __kvm_get_msr()
1982 return static_call(kvm_x86_complete_emulated_msr)(vcpu, vcpu->run->msr.error); in complete_fast_msr_access()
2046 return static_call(kvm_x86_complete_emulated_msr)(vcpu, r); in kvm_emulate_rdmsr()
2071 return static_call(kvm_x86_complete_emulated_msr)(vcpu, r); in kvm_emulate_wrmsr()
2581 static_call(kvm_x86_get_l2_tsc_offset)(vcpu), in kvm_vcpu_write_tsc_offset()
2582 static_call(kvm_x86_get_l2_tsc_multiplier)(vcpu)); in kvm_vcpu_write_tsc_offset()
2586 static_call(kvm_x86_write_tsc_offset)(vcpu, vcpu->arch.tsc_offset); in kvm_vcpu_write_tsc_offset()
2597 static_call(kvm_x86_get_l2_tsc_multiplier)(vcpu)); in kvm_vcpu_write_tsc_multiplier()
2602 static_call(kvm_x86_write_tsc_multiplier)( in kvm_vcpu_write_tsc_multiplier()
3421 static_call(kvm_x86_flush_tlb_all)(vcpu); in kvm_vcpu_flush_tlb_all()
3442 static_call(kvm_x86_flush_tlb_guest)(vcpu); in kvm_vcpu_flush_tlb_guest()
3455 static_call(kvm_x86_flush_tlb_current)(vcpu); in kvm_vcpu_flush_tlb_current()
4483 r = static_call(kvm_x86_has_emulated_msr)(kvm, MSR_IA32_SMBASE); in kvm_vm_ioctl_check_extension()
4725 if (static_call(kvm_x86_has_wbinvd_exit)()) in kvm_arch_vcpu_load()
4732 static_call(kvm_x86_vcpu_load)(vcpu, cpu); in kvm_arch_vcpu_load()
4827 vcpu->arch.preempted_in_kernel = !static_call(kvm_x86_get_cpl)(vcpu); in kvm_arch_vcpu_put()
4841 static_call(kvm_x86_vcpu_put)(vcpu); in kvm_arch_vcpu_put()
4965 static_call(kvm_x86_setup_mce)(vcpu); in kvm_vcpu_ioctl_x86_setup_mce()
5125 events->interrupt.shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
5129 events->nmi.masked = static_call(kvm_x86_get_nmi_mask)(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
5211 static_call(kvm_x86_set_interrupt_shadow)(vcpu, in kvm_vcpu_ioctl_x86_set_vcpu_events()
5217 static_call(kvm_x86_set_nmi_mask)(vcpu, events->nmi.masked); in kvm_vcpu_ioctl_x86_set_vcpu_events()
5530 return static_call(kvm_x86_enable_l2_tlb_flush)(vcpu); in kvm_vcpu_ioctl_enable_cap()
5999 ret = static_call(kvm_x86_set_tss_addr)(kvm, addr); in kvm_vm_ioctl_set_tss_addr()
6006 return static_call(kvm_x86_set_identity_map_addr)(kvm, ident_addr); in kvm_vm_ioctl_set_identity_map_addr()
6321 r = static_call(kvm_x86_vm_copy_enc_context_from)(kvm, cap->args[0]); in kvm_vm_ioctl_enable_cap()
6328 r = static_call(kvm_x86_vm_move_enc_context_from)(kvm, cap->args[0]); in kvm_vm_ioctl_enable_cap()
6961 r = static_call(kvm_x86_mem_enc_ioctl)(kvm, argp); in kvm_arch_vm_ioctl()
6975 r = static_call(kvm_x86_mem_enc_register_region)(kvm, ®ion); in kvm_arch_vm_ioctl()
6989 r = static_call(kvm_x86_mem_enc_unregister_region)(kvm, ®ion); in kvm_arch_vm_ioctl()
7116 if (!static_call(kvm_x86_has_emulated_msr)(NULL, emulated_msrs_all[i])) in kvm_init_msr_list()
7179 static_call(kvm_x86_set_segment)(vcpu, var, seg); in kvm_set_segment()
7185 static_call(kvm_x86_get_segment)(vcpu, var, seg); in kvm_get_segment()
7208 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_read()
7218 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_write()
7271 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_fetch_guest_virt()
7296 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_read_guest_virt()
7319 else if (static_call(kvm_x86_get_cpl)(vcpu) == 3) in emulator_read_std()
7364 else if (static_call(kvm_x86_get_cpl)(vcpu) == 3) in emulator_write_std()
7385 return static_call(kvm_x86_can_emulate_instruction)(vcpu, emul_type, in kvm_can_emulate_insn()
7434 u64 access = ((static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0) in vcpu_mmio_gva_to_gpa()
7851 return static_call(kvm_x86_get_segment_base)(vcpu, seg); in get_segment_base()
7864 if (static_call(kvm_x86_has_wbinvd_exit)()) { in kvm_emulate_wbinvd_noskip()
7969 return static_call(kvm_x86_get_cpl)(emul_to_vcpu(ctxt)); in emulator_get_cpl()
7974 static_call(kvm_x86_get_gdt)(emul_to_vcpu(ctxt), dt); in emulator_get_gdt()
7979 static_call(kvm_x86_get_idt)(emul_to_vcpu(ctxt), dt); in emulator_get_idt()
7984 static_call(kvm_x86_set_gdt)(emul_to_vcpu(ctxt), dt); in emulator_set_gdt()
7989 static_call(kvm_x86_set_idt)(emul_to_vcpu(ctxt), dt); in emulator_set_idt()
8139 return static_call(kvm_x86_check_intercept)(emul_to_vcpu(ctxt), info, stage, in emulator_intercept()
8182 static_call(kvm_x86_set_nmi_mask)(emul_to_vcpu(ctxt), masked); in emulator_set_nmi_mask()
8270 u32 int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in toggle_interruptibility()
8281 static_call(kvm_x86_set_interrupt_shadow)(vcpu, mask); in toggle_interruptibility()
8322 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in init_emulate_ctxt()
8378 static_call(kvm_x86_get_exit_info)(vcpu, (u32 *)&info[0], &info[1], in prepare_emulation_failure_exit()
8457 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()
8623 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in kvm_skip_emulated_instruction()
8626 r = static_call(kvm_x86_skip_emulated_instruction)(vcpu); in kvm_skip_emulated_instruction()
8658 shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in kvm_is_code_breakpoint_inhibited()
8934 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in x86_emulate_instruction()
9350 return static_call(kvm_x86_check_processor_compatibility)(); in kvm_x86_check_processor_compatibility()
9480 static_call(kvm_x86_hardware_unsetup)(); in __kvm_x86_vendor_init()
9522 static_call(kvm_x86_hardware_unsetup)(); in kvm_x86_vendor_exit()
9648 ulong vcpu_reasons = static_call(kvm_x86_vcpu_get_apicv_inhibit_reasons)(vcpu); in kvm_vcpu_apicv_activated()
9751 if (static_call(kvm_x86_get_cpl)(vcpu) != 0) { in kvm_emulate_hypercall()
9841 static_call(kvm_x86_patch_hypercall)(vcpu, instruction); in emulator_fix_hypercall()
9858 kvm_run->if_flag = static_call(kvm_x86_get_if_flag)(vcpu); in post_kvm_run_save()
9893 static_call(kvm_x86_update_cr8_intercept)(vcpu, tpr, max_irr); in update_cr8_intercept()
9916 static_call(kvm_x86_inject_exception)(vcpu); in kvm_inject_exception()
10002 static_call(kvm_x86_inject_nmi)(vcpu); in kvm_check_and_inject_events()
10004 static_call(kvm_x86_inject_irq)(vcpu, true); in kvm_check_and_inject_events()
10089 r = can_inject ? static_call(kvm_x86_smi_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10098 static_call(kvm_x86_enable_smi_window)(vcpu); in kvm_check_and_inject_events()
10103 r = can_inject ? static_call(kvm_x86_nmi_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10109 static_call(kvm_x86_inject_nmi)(vcpu); in kvm_check_and_inject_events()
10111 WARN_ON(static_call(kvm_x86_nmi_allowed)(vcpu, true) < 0); in kvm_check_and_inject_events()
10114 static_call(kvm_x86_enable_nmi_window)(vcpu); in kvm_check_and_inject_events()
10118 r = can_inject ? static_call(kvm_x86_interrupt_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10123 static_call(kvm_x86_inject_irq)(vcpu, false); in kvm_check_and_inject_events()
10124 WARN_ON(static_call(kvm_x86_interrupt_allowed)(vcpu, true) < 0); in kvm_check_and_inject_events()
10127 static_call(kvm_x86_enable_irq_window)(vcpu); in kvm_check_and_inject_events()
10168 if (static_call(kvm_x86_get_nmi_mask)(vcpu) || vcpu->arch.nmi_injected) in process_nmi()
10207 static_call(kvm_x86_refresh_apicv_exec_ctrl)(vcpu); in __kvm_vcpu_update_apicv()
10530 static_call(kvm_x86_msr_filter_changed)(vcpu); in vcpu_enter_guest()
10533 static_call(kvm_x86_update_cpu_dirty_logging)(vcpu); in vcpu_enter_guest()
10555 static_call(kvm_x86_enable_irq_window)(vcpu); in vcpu_enter_guest()
10570 static_call(kvm_x86_prepare_switch_to_guest)(vcpu); in vcpu_enter_guest()
10620 static_call(kvm_x86_request_immediate_exit)(vcpu); in vcpu_enter_guest()
10652 exit_fastpath = static_call(kvm_x86_vcpu_run)(vcpu); in vcpu_enter_guest()
10673 static_call(kvm_x86_sync_dirty_debug_regs)(vcpu); in vcpu_enter_guest()
10702 static_call(kvm_x86_handle_exit_irqoff)(vcpu); in vcpu_enter_guest()
10748 r = static_call(kvm_x86_handle_exit)(vcpu, exit_fastpath); in vcpu_enter_guest()
10754 static_call(kvm_x86_cancel_injection)(vcpu); in vcpu_enter_guest()
11068 r = static_call(kvm_x86_vcpu_pre_run)(vcpu); in kvm_arch_vcpu_ioctl_run()
11188 static_call(kvm_x86_get_idt)(vcpu, &dt); in __get_sregs_common()
11191 static_call(kvm_x86_get_gdt)(vcpu, &dt); in __get_sregs_common()
11386 static_call(kvm_x86_set_idt)(vcpu, &dt); in __set_sregs_common()
11389 static_call(kvm_x86_set_gdt)(vcpu, &dt); in __set_sregs_common()
11400 static_call(kvm_x86_set_efer)(vcpu, sregs->efer); in __set_sregs_common()
11403 static_call(kvm_x86_set_cr0)(vcpu, sregs->cr0); in __set_sregs_common()
11407 static_call(kvm_x86_set_cr4)(vcpu, sregs->cr4); in __set_sregs_common()
11577 static_call(kvm_x86_update_exception_bitmap)(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
11707 return static_call(kvm_x86_vcpu_precreate)(kvm); in kvm_arch_vcpu_precreate()
11796 r = static_call(kvm_x86_vcpu_create)(vcpu); in kvm_arch_vcpu_create()
11854 static_call(kvm_x86_vcpu_free)(vcpu); in kvm_arch_vcpu_destroy()
11975 static_call(kvm_x86_vcpu_reset)(vcpu, init_event); in kvm_vcpu_reset()
11994 static_call(kvm_x86_set_cr0)(vcpu, new_cr0); in kvm_vcpu_reset()
11995 static_call(kvm_x86_set_cr4)(vcpu, 0); in kvm_vcpu_reset()
11996 static_call(kvm_x86_set_efer)(vcpu, 0); in kvm_vcpu_reset()
11997 static_call(kvm_x86_update_exception_bitmap)(vcpu); in kvm_vcpu_reset()
12054 ret = static_call(kvm_x86_hardware_enable)(); in kvm_arch_hardware_enable()
12136 static_call(kvm_x86_hardware_disable)(); in kvm_arch_hardware_disable()
12162 static_call(kvm_x86_sched_in)(vcpu, cpu); in kvm_arch_sched_in()
12188 ret = static_call(kvm_x86_vm_init)(kvm); in kvm_arch_init_vm()
12693 static_call(kvm_x86_guest_apic_has_interrupt)(vcpu)); in kvm_guest_apic_has_interrupt()
12713 static_call(kvm_x86_nmi_allowed)(vcpu, false))) in kvm_vcpu_has_events()
12719 static_call(kvm_x86_smi_allowed)(vcpu, false))) in kvm_vcpu_has_events()
12750 static_call(kvm_x86_dy_apicv_has_pending_interrupt)(vcpu)) in kvm_arch_dy_has_pending_interrupt()
12791 return static_call(kvm_x86_interrupt_allowed)(vcpu, false); in kvm_arch_interrupt_allowed()
12817 rflags = static_call(kvm_x86_get_rflags)(vcpu); in kvm_get_rflags()
12829 static_call(kvm_x86_set_rflags)(vcpu, rflags); in __kvm_set_rflags()
12941 static_call(kvm_x86_get_cpl)(vcpu) == 0) in kvm_can_deliver_async_pf()
13100 ret = static_call(kvm_x86_pi_update_irte)(irqfd->kvm, in kvm_arch_irq_bypass_add_producer()
13125 ret = static_call(kvm_x86_pi_update_irte)(irqfd->kvm, prod->irq, irqfd->gsi, 0); in kvm_arch_irq_bypass_del_producer()
13136 return static_call(kvm_x86_pi_update_irte)(kvm, host_irq, guest_irq, set); in kvm_arch_update_irqfd_routing()