| /linux/arch/powerpc/kvm/ |
| A D | booke_emulate.c | 26 vcpu->arch.regs.nip = vcpu->arch.shared->srr0; in kvmppc_emul_rfi() 32 vcpu->arch.regs.nip = vcpu->arch.dsrr0; in kvmppc_emul_rfdi() 38 vcpu->arch.regs.nip = vcpu->arch.csrr0; in kvmppc_emul_rfci() 90 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 96 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 133 vcpu->arch.csrr0 = spr_val; in kvmppc_booke_emulate_mtspr() 136 vcpu->arch.csrr1 = spr_val; in kvmppc_booke_emulate_mtspr() 139 vcpu->arch.dsrr0 = spr_val; in kvmppc_booke_emulate_mtspr() 142 vcpu->arch.dsrr1 = spr_val; in kvmppc_booke_emulate_mtspr() 298 vcpu->arch.ivpr = spr_val; in kvmppc_booke_emulate_mtspr() [all …]
|
| A D | book3s_hv_tm.c | 29 vcpu->arch.tfiar = tfiar; in emulate_tx_failure() 31 vcpu->arch.texasr = (vcpu->arch.texasr & 0x3ffffff) | texasr; in emulate_tx_failure() 56 vcpu->arch.regs.nip -= 4; in kvmhv_p9_tm_emulation() 79 vcpu->arch.cfar = vcpu->arch.regs.nip; in kvmhv_p9_tm_emulation() 80 vcpu->arch.regs.nip = vcpu->arch.shregs.srr0; in kvmhv_p9_tm_emulation() 103 bescr = vcpu->arch.bescr; in kvmhv_p9_tm_emulation() 113 vcpu->arch.cfar = vcpu->arch.regs.nip; in kvmhv_p9_tm_emulation() 114 vcpu->arch.regs.nip = vcpu->arch.ebbrr; in kvmhv_p9_tm_emulation() 156 vcpu->arch.regs.ccr = (vcpu->arch.regs.ccr & 0x0fffffff) | in kvmhv_p9_tm_emulation() 203 vcpu->arch.regs.ccr = (vcpu->arch.regs.ccr & 0x0fffffff) | in kvmhv_p9_tm_emulation() [all …]
|
| A D | book3s_hv_p9_entry.c | 14 mtspr(SPRN_TAR, vcpu->arch.tar); in load_spr_state() 38 if (vcpu->arch.uamor != 0) in load_spr_state() 44 if (vcpu->arch.pspb != 0) in load_spr_state() 54 if (!(vcpu->arch.ctrl & 1)) in load_spr_state() 173 if (vcpu->arch.uamor != 0) in restore_p9_host_os_sprs() 181 if (vcpu->arch.pspb != 0) in restore_p9_host_os_sprs() 208 vcpu->arch.load_ebb++; in restore_p9_host_os_sprs() 332 lpid = kvm->arch.lpid; in switch_mmu_to_guest_hpt() 347 mtslb(vcpu->arch.slb[i].orige, vcpu->arch.slb[i].origv); in switch_mmu_to_guest_hpt() 561 vcpu->arch.ceded = 0; in kvmhv_vcpu_entry_p9() [all …]
|
| A D | emulate_loadstore.c | 86 vcpu->arch.mmio_vsx_offset = 0; in kvmppc_emulate_loadstore() 88 vcpu->arch.mmio_sp64_extend = 0; in kvmppc_emulate_loadstore() 89 vcpu->arch.mmio_sign_extend = 0; in kvmppc_emulate_loadstore() 91 vcpu->arch.mmio_vmx_offset = 0; in kvmppc_emulate_loadstore() 148 vcpu->arch.mmio_copy_type = in kvmppc_emulate_loadstore() 151 vcpu->arch.mmio_copy_type = in kvmppc_emulate_loadstore() 154 vcpu->arch.mmio_copy_type = in kvmppc_emulate_loadstore() 157 vcpu->arch.mmio_copy_type = in kvmppc_emulate_loadstore() 162 vcpu->arch.mmio_vmx_offset = in kvmppc_emulate_loadstore() 274 vcpu->arch.mmio_copy_type = in kvmppc_emulate_loadstore() [all …]
|
| A D | timing.c | 27 mutex_lock(&vcpu->arch.exit_timing_lock); in kvmppc_init_timing_stats() 29 vcpu->arch.last_exit_type = 0xDEAD; in kvmppc_init_timing_stats() 31 vcpu->arch.timing_count_type[i] = 0; in kvmppc_init_timing_stats() 32 vcpu->arch.timing_max_duration[i] = 0; in kvmppc_init_timing_stats() 34 vcpu->arch.timing_sum_duration[i] = 0; in kvmppc_init_timing_stats() 37 vcpu->arch.timing_last_exit = 0; in kvmppc_init_timing_stats() 38 vcpu->arch.timing_exit.tv64 = 0; in kvmppc_init_timing_stats() 39 vcpu->arch.timing_last_enter.tv64 = 0; in kvmppc_init_timing_stats() 50 vcpu->arch.timing_count_type[type]++; in add_exit_timing() 88 vcpu->arch.timing_last_exit = vcpu->arch.timing_exit.tv64; in kvmppc_update_timing_stats() [all …]
|
| A D | booke.c | 95 vcpu->arch.shared->msr); in kvmppc_dump_vcpu() 97 vcpu->arch.regs.ctr); in kvmppc_dump_vcpu() 189 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu() 233 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_DE; in kvmppc_vcpu_sync_debug() 530 vcpu->arch.regs.nip = vcpu->arch.ivpr | in kvmppc_booke_irqprio_deliver() 679 if ((vcpu->arch.tcr & TCR_DIE) && (vcpu->arch.tsr & TSR_DIS)) in update_timer_ints() 684 if ((vcpu->arch.tcr & TCR_WIE) && (vcpu->arch.tsr & TSR_WIS)) in update_timer_ints() 848 __func__, vcpu->arch.regs.nip, vcpu->arch.last_inst); in emulation_exit() 896 vcpu->arch.dbsr = 0; in kvmppc_handle_debug() 898 run->debug.arch.address = vcpu->arch.regs.nip; in kvmppc_handle_debug() [all …]
|
| A D | book3s_hv_p9_perf.c | 41 lp = vcpu->arch.vpa.pinned_addr; in switch_pmu_to_guest() 88 mtspr(SPRN_PMC1, vcpu->arch.pmc[0]); in switch_pmu_to_guest() 89 mtspr(SPRN_PMC2, vcpu->arch.pmc[1]); in switch_pmu_to_guest() 96 mtspr(SPRN_SDAR, vcpu->arch.sdar); in switch_pmu_to_guest() 97 mtspr(SPRN_SIAR, vcpu->arch.siar); in switch_pmu_to_guest() 111 if (!vcpu->arch.nested && in switch_pmu_to_guest() 113 vcpu->arch.hfscr |= HFSCR_PM; in switch_pmu_to_guest() 124 lp = vcpu->arch.vpa.pinned_addr; in switch_pmu_to_host() 140 freeze_pmu(vcpu->arch.mmcr[0], vcpu->arch.mmcra); in switch_pmu_to_host() 182 if (!vcpu->arch.nested) in switch_pmu_to_host() [all …]
|
| A D | book3s_hv_nestedv2.c | 265 vcpu->arch.regs.gpr[i]); in gs_msg_ops_vcpu_fill_info() 278 vcpu->arch.regs.link); in gs_msg_ops_vcpu_fill_info() 285 vcpu->arch.shregs.srr0); in gs_msg_ops_vcpu_fill_info() 289 vcpu->arch.shregs.srr1); in gs_msg_ops_vcpu_fill_info() 309 vcpu->arch.shregs.dar); in gs_msg_ops_vcpu_fill_info() 317 vcpu->arch.shregs.msr); in gs_msg_ops_vcpu_fill_info() 321 vcpu->arch.vcore->vtb); in gs_msg_ops_vcpu_fill_info() 611 vcpu->arch.dec_expires = in gs_msg_ops_vcpu_refresh_info() 943 vcpu->arch.fault_dar = 0; in kvmhv_nestedv2_parse_output() 944 vcpu->arch.fault_dsisr = 0; in kvmhv_nestedv2_parse_output() [all …]
|
| A D | book3s_hv_tm_builtin.c | 22 u32 instr = vcpu->arch.emul_inst; in kvmhv_p9_tm_emulation_early() 46 vcpu->arch.cfar = vcpu->arch.regs.nip - 4; in kvmhv_p9_tm_emulation_early() 47 vcpu->arch.regs.nip = vcpu->arch.shregs.srr0; in kvmhv_p9_tm_emulation_early() 52 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 68 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation_early() 69 vcpu->arch.cfar = vcpu->arch.regs.nip - 4; in kvmhv_p9_tm_emulation_early() 77 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 90 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 101 vcpu->arch.regs.ccr = (vcpu->arch.regs.ccr & 0x0fffffff) | in kvmhv_p9_tm_emulation_early() 116 vcpu->arch.regs.nip = vcpu->arch.tfhar; in kvmhv_emulate_tm_rollback() [all …]
|
| A D | book3s_emulate.c | 92 vcpu->arch.ppr_tm = vcpu->arch.ppr; in kvmppc_copyto_vcpu_tm() 93 vcpu->arch.dscr_tm = vcpu->arch.dscr; in kvmppc_copyto_vcpu_tm() 94 vcpu->arch.amr_tm = vcpu->arch.amr; in kvmppc_copyto_vcpu_tm() 95 vcpu->arch.ctr_tm = vcpu->arch.regs.ctr; in kvmppc_copyto_vcpu_tm() 96 vcpu->arch.tar_tm = vcpu->arch.tar; in kvmppc_copyto_vcpu_tm() 98 vcpu->arch.cr_tm = vcpu->arch.regs.ccr; in kvmppc_copyto_vcpu_tm() 111 vcpu->arch.ppr = vcpu->arch.ppr_tm; in kvmppc_copyfrom_vcpu_tm() 112 vcpu->arch.dscr = vcpu->arch.dscr_tm; in kvmppc_copyfrom_vcpu_tm() 113 vcpu->arch.amr = vcpu->arch.amr_tm; in kvmppc_copyfrom_vcpu_tm() 115 vcpu->arch.tar = vcpu->arch.tar_tm; in kvmppc_copyfrom_vcpu_tm() [all …]
|
| A D | book3s_hv.c | 489 vcpu->arch.regs.nip, vcpu->arch.shregs.msr, vcpu->arch.trap); in kvmppc_dump_regs() 495 vcpu->arch.regs.ctr, vcpu->arch.regs.link); in kvmppc_dump_regs() 503 vcpu->arch.regs.ccr, vcpu->arch.regs.xer, vcpu->arch.shregs.dsisr); in kvmppc_dump_regs() 512 vcpu->arch.vcore->lpcr, vcpu->kvm->arch.sdr1, in kvmppc_dump_regs() 756 vcpu->arch.dtl_ptr = vcpu->arch.dtl.pinned_addr; in kvmppc_update_vpas() 1807 vcpu->arch.fault_dar, vcpu->arch.fault_dsisr); in kvmppc_handle_exit_hv() 2996 vcpu->arch.shared = &vcpu->arch.shregs; in kvmppc_core_vcpu_create_hv() 4301 vcpu->arch.regs.msr = vcpu->arch.shregs.msr; in kvmhv_vcpu_entry_p9_nested() 4337 vcpu->arch.shregs.msr = vcpu->arch.regs.msr; in kvmhv_vcpu_entry_p9_nested() 5104 vcpu->arch.waitp = &vcpu->arch.vcore->wait; in kvmppc_vcpu_run_hv() [all …]
|
| A D | e500_emulate.c | 75 int cpir = cvcpu->arch.shared->pir; in kvmppc_e500_emul_msgsnd() 94 vcpu->run->debug.arch.address = vcpu->arch.regs.nip; in kvmppc_e500_emul_ehpriv() 95 vcpu->run->debug.arch.status = 0; in kvmppc_e500_emul_ehpriv() 273 vcpu->arch.pwrmgtcr0 = spr_val; in kvmppc_core_emulate_mtspr_e500() 360 *spr_val = vcpu->arch.decar; in kvmppc_core_emulate_mfspr_e500() 363 *spr_val = vcpu->arch.tlbcfg[0]; in kvmppc_core_emulate_mfspr_e500() 366 *spr_val = vcpu->arch.tlbcfg[1]; in kvmppc_core_emulate_mfspr_e500() 371 *spr_val = vcpu->arch.tlbps[0]; in kvmppc_core_emulate_mfspr_e500() 376 *spr_val = vcpu->arch.tlbps[1]; in kvmppc_core_emulate_mfspr_e500() 399 *spr_val = vcpu->arch.mmucfg; in kvmppc_core_emulate_mfspr_e500() [all …]
|
| A D | book3s_pr.c | 307 vcpu->arch.purr += get_tb() - vcpu->arch.entry_tb; in kvmppc_copy_from_svcpu() 308 vcpu->arch.spurr += get_tb() - vcpu->arch.entry_tb; in kvmppc_copy_from_svcpu() 311 vcpu->arch.ic += mfspr(SPRN_IC) - vcpu->arch.entry_ic; in kvmppc_copy_from_svcpu() 555 vcpu->arch.pvr = pvr; in kvmppc_set_pvr_pr() 1048 vcpu->arch.fscr = fscr; in kvmppc_set_fscr() 1053 vcpu->arch.fscr = fscr; in kvmppc_set_fscr() 1743 vcpu->arch.shadow_vcpu = in kvmppc_core_vcpu_create_pr() 1772 vcpu->arch.pvr = 0x84202; in kvmppc_core_vcpu_create_pr() 1773 vcpu->arch.intr_msr = 0; in kvmppc_core_vcpu_create_pr() 1776 vcpu->arch.slb_nr = 64; in kvmppc_core_vcpu_create_pr() [all …]
|
| A D | e500mc.c | 107 vcpu->arch.pid = pid; in kvmppc_set_pid() 128 vcpu->arch.epsc = vcpu->arch.eplc; in kvmppc_core_vcpu_load_e500mc() 129 mtspr(SPRN_EPLC, vcpu->arch.eplc); in kvmppc_core_vcpu_load_e500mc() 130 mtspr(SPRN_EPSC, vcpu->arch.epsc); in kvmppc_core_vcpu_load_e500mc() 143 mtspr(SPRN_GEPR, vcpu->arch.epr); in kvmppc_core_vcpu_load_e500mc() 167 vcpu->arch.epr = mfspr(SPRN_GEPR); in kvmppc_core_vcpu_put_e500mc() 211 vcpu->arch.pvr = mfspr(SPRN_PVR); in kvmppc_core_vcpu_setup() 318 vcpu->arch.oldpir = 0xffffffff; in kvmppc_core_vcpu_create_e500mc() 325 if (!vcpu->arch.shared) { in kvmppc_core_vcpu_create_e500mc() 361 kvm->arch.lpid = lpid; in kvmppc_core_init_vm_e500mc() [all …]
|
| /linux/arch/mips/kvm/ |
| A D | emulate.c | 45 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() local 1003 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1013 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1022 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1031 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1061 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1091 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1139 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1186 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store() 1290 vcpu->arch.io_pc = vcpu->arch.pc; in kvm_mips_emulate_load() [all …]
|
| /linux/tools/perf/util/ |
| A D | perf_regs.c | 37 if (!strcmp(arch, "csky")) in perf_reg_name() 41 else if (!strcmp(arch, "mips")) in perf_reg_name() 45 else if (!strcmp(arch, "riscv")) in perf_reg_name() 47 else if (!strcmp(arch, "s390")) in perf_reg_name() 49 else if (!strcmp(arch, "x86")) in perf_reg_name() 51 else if (!strcmp(arch, "arm")) in perf_reg_name() 53 else if (!strcmp(arch, "arm64")) in perf_reg_name() 88 if (!strcmp(arch, "arm")) in perf_arch_reg_ip() 104 else if (!strcmp(arch, "x86")) in perf_arch_reg_ip() 113 if (!strcmp(arch, "arm")) in perf_arch_reg_sp() [all …]
|
| /linux/arch/s390/kvm/ |
| A D | guestdbg.c | 132 vcpu->arch.guestdbg.cr0 = vcpu->arch.sie_block->gcr[0]; in kvm_s390_backup_guest_per_regs() 133 vcpu->arch.guestdbg.cr9 = vcpu->arch.sie_block->gcr[9]; in kvm_s390_backup_guest_per_regs() 134 vcpu->arch.guestdbg.cr10 = vcpu->arch.sie_block->gcr[10]; in kvm_s390_backup_guest_per_regs() 135 vcpu->arch.guestdbg.cr11 = vcpu->arch.sie_block->gcr[11]; in kvm_s390_backup_guest_per_regs() 140 vcpu->arch.sie_block->gcr[0] = vcpu->arch.guestdbg.cr0; in kvm_s390_restore_guest_per_regs() 141 vcpu->arch.sie_block->gcr[9] = vcpu->arch.guestdbg.cr9; in kvm_s390_restore_guest_per_regs() 142 vcpu->arch.sie_block->gcr[10] = vcpu->arch.guestdbg.cr10; in kvm_s390_restore_guest_per_regs() 143 vcpu->arch.sie_block->gcr[11] = vcpu->arch.guestdbg.cr11; in kvm_s390_restore_guest_per_regs() 211 if (dbg->arch.nr_hw_bp <= 0 || !dbg->arch.hw_bp) in kvm_s390_import_bp_data() 216 bp_data = memdup_array_user(dbg->arch.hw_bp, dbg->arch.nr_hw_bp, in kvm_s390_import_bp_data() [all …]
|
| A D | kvm-s390.c | 312 kvm->arch.epoch = vcpu->arch.sie_block->epoch; in kvm_clock_sync() 313 kvm->arch.epdx = vcpu->arch.sie_block->epdx; in kvm_clock_sync() 3266 kvm->arch.crypto.crycb = &kvm->arch.sie_page2->crycb; in kvm_s390_crypto_init() 3351 kvm->arch.model.fac_list = kvm->arch.sie_page2->fac_list; in kvm_arch_init_vm() 3744 vcpu->arch.sie_block->epoch = vcpu->kvm->arch.epoch; in kvm_arch_vcpu_postcreate() 3745 vcpu->arch.sie_block->epdx = vcpu->kvm->arch.epdx; in kvm_arch_vcpu_postcreate() 3749 vcpu->arch.gmap = vcpu->kvm->arch.gmap; in kvm_arch_vcpu_postcreate() 3755 vcpu->arch.enabled_gmap = vcpu->arch.gmap; in kvm_arch_vcpu_postcreate() 4565 vcpu->arch.sie_block->epoch = kvm->arch.epoch; in __kvm_s390_set_tod_clock() 4566 vcpu->arch.sie_block->epdx = kvm->arch.epdx; in __kvm_s390_set_tod_clock() [all …]
|
| /linux/scripts/ |
| A D | head-object-list.txt | 14 arch/alpha/kernel/head.o 15 arch/arc/kernel/head.o 17 arch/arm/kernel/head.o 18 arch/csky/kernel/head.o 21 arch/m68k/68000/head.o 22 arch/m68k/coldfire/head.o 23 arch/m68k/kernel/head.o 26 arch/mips/kernel/head.o 27 arch/nios2/kernel/head.o 29 arch/parisc/kernel/head.o [all …]
|
| A D | checkstack.pl | 42 my $arch = shift; 43 if ($arch eq "") { 44 $arch = `uname -m`; 45 chomp($arch); 61 } elsif ($arch eq 'arm') { 65 } elsif ($arch =~ /^x86(_64)?$/ || $arch =~ /^i[3456]86$/) { 71 } elsif ($arch eq 'm68k') { 78 } elsif ($arch eq 'mips') { 87 } elsif ($arch eq 'parisc' || $arch eq 'parisc64') { 89 } elsif ($arch eq 'powerpc' || $arch =~ /^ppc(64)?(le)?$/ ) { [all …]
|
| /linux/arch/powerpc/kernel/ |
| A D | asm-offsets.c | 365 OFFSET(VCPU_VRS, kvm_vcpu, arch.vr.vr); in main() 371 OFFSET(VCPU_TAR, kvm_vcpu, arch.tar); in main() 419 OFFSET(KVM_LPID, kvm, arch.lpid); in main() 423 OFFSET(KVM_SDR1, kvm, arch.sdr1); in main() 439 OFFSET(VCPU_PURR, kvm_vcpu, arch.purr); in main() 441 OFFSET(VCPU_IC, kvm_vcpu, arch.ic); in main() 443 OFFSET(VCPU_AMR, kvm_vcpu, arch.amr); in main() 460 OFFSET(VCPU_PMC, kvm_vcpu, arch.pmc); in main() 464 OFFSET(VCPU_SLB, kvm_vcpu, arch.slb); in main() 473 OFFSET(VCPU_PPR, kvm_vcpu, arch.ppr); in main() [all …]
|
| /linux/arch/riscv/kvm/ |
| A D | vcpu.c | 67 vcpu->arch.last_exit_cpu = -1; in kvm_riscv_reset_vcpu() 88 vcpu->arch.hfence_head = 0; in kvm_riscv_reset_vcpu() 89 vcpu->arch.hfence_tail = 0; in kvm_riscv_reset_vcpu() 90 memset(vcpu->arch.hfence_queue, 0, sizeof(vcpu->arch.hfence_queue)); in kvm_riscv_reset_vcpu() 114 vcpu->arch.ran_atleast_once = false; in kvm_arch_vcpu_create() 124 vcpu->arch.mimpid = sbi_get_mimpid(); in kvm_arch_vcpu_create() 592 vcpu->arch.isa); in kvm_arch_vcpu_load() 595 vcpu->arch.isa); in kvm_arch_vcpu_load() 613 vcpu->arch.isa); in kvm_arch_vcpu_put() 618 vcpu->arch.isa); in kvm_arch_vcpu_put() [all …]
|
| /linux/tools/testing/selftests/kvm/x86_64/ |
| A D | debug_regs.c | 131 run->debug.arch.dr6 == target_dr6, in main() 136 run->debug.arch.dr6, target_dr6); in main() 154 run->debug.arch.dr6 == target_dr6, in main() 159 run->debug.arch.dr6, target_dr6); in main() 174 debug.arch.debugreg[7] = 0x00000400; in main() 179 run->debug.arch.pc == target_rip && in main() 180 run->debug.arch.dr6 == target_dr6, in main() 184 run->debug.arch.pc, target_rip, run->debug.arch.dr6, in main() 191 debug.arch.debugreg[7] = 0x400 | DR7_GD; in main() 198 run->debug.arch.dr6 == target_dr6, in main() [all …]
|
| /linux/arch/loongarch/kvm/ |
| A D | vcpu.c | 164 ghc = &vcpu->arch.st.cache; in kvm_update_stolen_time() 399 vcpu->arch.gprs[i], vcpu->arch.gprs[i + 1], in kvm_arch_vcpu_dump_regs() 400 vcpu->arch.gprs[i + 2], vcpu->arch.gprs[i + 3]); in kvm_arch_vcpu_dump_regs() 873 memset(&vcpu->arch.irq_pending, 0, sizeof(vcpu->arch.irq_pending)); in kvm_set_one_reg() 874 memset(&vcpu->arch.irq_clear, 0, sizeof(vcpu->arch.irq_clear)); in kvm_set_one_reg() 1308 if (!kvm_guest_has_fpu(&vcpu->arch) || !kvm_guest_has_lsx(&vcpu->arch)) in kvm_own_lsx() 1344 …if (!kvm_guest_has_fpu(&vcpu->arch) || !kvm_guest_has_lsx(&vcpu->arch) || !kvm_guest_has_lasx(&vcp… in kvm_own_lasx() 1457 vcpu->arch.vpid = 0; in kvm_arch_vcpu_create() 1466 if (!vcpu->arch.csr) in kvm_arch_vcpu_create() 1485 csr = vcpu->arch.csr; in kvm_arch_vcpu_create() [all …]
|
| /linux/arch/powerpc/include/asm/ |
| A D | kvm_book3s_64.h | 596 vcpu->arch.amr = vcpu->arch.amr_tm; in copy_from_checkpoint() 597 vcpu->arch.ppr = vcpu->arch.ppr_tm; in copy_from_checkpoint() 598 vcpu->arch.dscr = vcpu->arch.dscr_tm; in copy_from_checkpoint() 599 vcpu->arch.tar = vcpu->arch.tar_tm; in copy_from_checkpoint() 602 vcpu->arch.fp = vcpu->arch.fp_tm; in copy_from_checkpoint() 603 vcpu->arch.vr = vcpu->arch.vr_tm; in copy_from_checkpoint() 613 vcpu->arch.amr_tm = vcpu->arch.amr; in copy_to_checkpoint() 614 vcpu->arch.ppr_tm = vcpu->arch.ppr; in copy_to_checkpoint() 616 vcpu->arch.tar_tm = vcpu->arch.tar; in copy_to_checkpoint() 619 vcpu->arch.fp_tm = vcpu->arch.fp; in copy_to_checkpoint() [all …]
|