Lines Matching refs:kvm_vcpu

79 	struct kvm_vcpu *runnable_threads[MAX_SMT_THREADS];
86 struct kvm_vcpu *runner;
146 extern void kvmppc_mmu_pte_flush(struct kvm_vcpu *vcpu, ulong ea, ulong ea_mask);
147 extern void kvmppc_mmu_pte_vflush(struct kvm_vcpu *vcpu, u64 vp, u64 vp_mask);
148 extern void kvmppc_mmu_pte_pflush(struct kvm_vcpu *vcpu, ulong pa_start, ulong pa_end);
149 extern void kvmppc_set_msr(struct kvm_vcpu *vcpu, u64 new_msr);
150 extern void kvmppc_mmu_book3s_64_init(struct kvm_vcpu *vcpu);
151 extern void kvmppc_mmu_book3s_32_init(struct kvm_vcpu *vcpu);
152 extern void kvmppc_mmu_book3s_hv_init(struct kvm_vcpu *vcpu);
153 extern int kvmppc_mmu_map_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte,
155 extern void kvmppc_mmu_unmap_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte);
156 extern int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr);
157 extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size);
158 extern void kvmppc_mmu_flush_segments(struct kvm_vcpu *vcpu);
159 extern int kvmppc_book3s_hv_page_fault(struct kvm_vcpu *vcpu,
163 extern int kvmppc_hv_emulate_mmio(struct kvm_vcpu *vcpu,
166 extern void kvmppc_mmu_hpte_cache_map(struct kvm_vcpu *vcpu, struct hpte_cache *pte);
167 extern struct hpte_cache *kvmppc_mmu_hpte_cache_next(struct kvm_vcpu *vcpu);
169 extern void kvmppc_mmu_hpte_destroy(struct kvm_vcpu *vcpu);
170 extern int kvmppc_mmu_hpte_init(struct kvm_vcpu *vcpu);
171 extern void kvmppc_mmu_invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte);
177 extern int kvmppc_book3s_radix_page_fault(struct kvm_vcpu *vcpu,
182 extern long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr,
184 extern long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr,
186 extern int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr,
189 extern int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr,
192 extern int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr,
203 extern int kvmppc_book3s_instantiate_page(struct kvm_vcpu *vcpu,
227 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr, bool data);
228 extern void kvmppc_book3s_queue_irqprio(struct kvm_vcpu *vcpu, unsigned int vec);
229 extern void kvmppc_book3s_dequeue_irqprio(struct kvm_vcpu *vcpu,
231 extern void kvmppc_inject_interrupt(struct kvm_vcpu *vcpu, int vec, u64 flags);
232 extern void kvmppc_trigger_fac_interrupt(struct kvm_vcpu *vcpu, ulong fac);
233 extern void kvmppc_set_bat(struct kvm_vcpu *vcpu, struct kvmppc_bat *bat,
235 extern void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr);
236 extern int kvmppc_emulate_paired_single(struct kvm_vcpu *vcpu);
237 extern kvm_pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa,
266 extern void kvmppc_set_fscr(struct kvm_vcpu *vcpu, u64 fscr);
268 extern int kvmhv_p9_tm_emulation_early(struct kvm_vcpu *vcpu);
269 extern int kvmhv_p9_tm_emulation(struct kvm_vcpu *vcpu);
270 extern void kvmhv_emulate_tm_rollback(struct kvm_vcpu *vcpu);
274 extern u32 kvmppc_alignment_dsisr(struct kvm_vcpu *vcpu, unsigned int inst);
275 extern ulong kvmppc_alignment_dar(struct kvm_vcpu *vcpu, unsigned int inst);
276 extern int kvmppc_h_pr(struct kvm_vcpu *vcpu, unsigned long cmd);
280 extern void kvmppc_copy_to_svcpu(struct kvm_vcpu *vcpu);
281 extern void kvmppc_copy_from_svcpu(struct kvm_vcpu *vcpu);
284 void kvmppc_set_msr_hv(struct kvm_vcpu *vcpu, u64 msr);
285 void kvmppc_inject_interrupt_hv(struct kvm_vcpu *vcpu, int vec, u64 srr1_flags);
288 void kvmppc_save_tm_pr(struct kvm_vcpu *vcpu);
289 void kvmppc_restore_tm_pr(struct kvm_vcpu *vcpu);
290 void kvmppc_save_tm_sprs(struct kvm_vcpu *vcpu);
291 void kvmppc_restore_tm_sprs(struct kvm_vcpu *vcpu);
293 static inline void kvmppc_save_tm_pr(struct kvm_vcpu *vcpu) {} in kvmppc_save_tm_pr()
294 static inline void kvmppc_restore_tm_pr(struct kvm_vcpu *vcpu) {} in kvmppc_restore_tm_pr()
295 static inline void kvmppc_save_tm_sprs(struct kvm_vcpu *vcpu) {} in kvmppc_save_tm_sprs()
296 static inline void kvmppc_restore_tm_sprs(struct kvm_vcpu *vcpu) {} in kvmppc_restore_tm_sprs()
303 long kvmhv_set_partition_table(struct kvm_vcpu *vcpu);
304 long kvmhv_copy_tofrom_guest_nested(struct kvm_vcpu *vcpu);
308 long kvmhv_enter_nested_guest(struct kvm_vcpu *vcpu);
309 long kvmhv_do_nested_tlbie(struct kvm_vcpu *vcpu);
310 long do_h_rpt_invalidate_pat(struct kvm_vcpu *vcpu, unsigned long lpid,
313 int kvmhv_run_single_vcpu(struct kvm_vcpu *vcpu,
315 void kvmhv_save_hv_regs(struct kvm_vcpu *vcpu, struct hv_guest_state *hr);
316 void kvmhv_restore_hv_return_state(struct kvm_vcpu *vcpu,
318 long int kvmhv_nested_page_fault(struct kvm_vcpu *vcpu);
320 void kvmppc_giveup_fac(struct kvm_vcpu *vcpu, ulong fac);
351 int __kvmhv_nestedv2_reload_ptregs(struct kvm_vcpu *vcpu, struct pt_regs *regs);
352 int __kvmhv_nestedv2_mark_dirty_ptregs(struct kvm_vcpu *vcpu, struct pt_regs *regs);
353 int __kvmhv_nestedv2_mark_dirty(struct kvm_vcpu *vcpu, u16 iden);
354 int __kvmhv_nestedv2_cached_reload(struct kvm_vcpu *vcpu, u16 iden);
356 static inline int kvmhv_nestedv2_reload_ptregs(struct kvm_vcpu *vcpu, in kvmhv_nestedv2_reload_ptregs()
363 static inline int kvmhv_nestedv2_mark_dirty_ptregs(struct kvm_vcpu *vcpu, in kvmhv_nestedv2_mark_dirty_ptregs()
371 static inline int kvmhv_nestedv2_mark_dirty(struct kvm_vcpu *vcpu, u16 iden) in kvmhv_nestedv2_mark_dirty()
378 static inline int kvmhv_nestedv2_cached_reload(struct kvm_vcpu *vcpu, u16 iden) in kvmhv_nestedv2_cached_reload()
387 static inline struct kvmppc_vcpu_book3s *to_book3s(struct kvm_vcpu *vcpu) in to_book3s()
401 static inline void kvmppc_set_gpr(struct kvm_vcpu *vcpu, int num, ulong val) in kvmppc_set_gpr()
407 static inline ulong kvmppc_get_gpr(struct kvm_vcpu *vcpu, int num) in kvmppc_get_gpr()
413 static inline void kvmppc_set_cr(struct kvm_vcpu *vcpu, u32 val) in kvmppc_set_cr()
419 static inline u32 kvmppc_get_cr(struct kvm_vcpu *vcpu) in kvmppc_get_cr()
425 static inline void kvmppc_set_xer(struct kvm_vcpu *vcpu, ulong val) in kvmppc_set_xer()
431 static inline ulong kvmppc_get_xer(struct kvm_vcpu *vcpu) in kvmppc_get_xer()
437 static inline void kvmppc_set_ctr(struct kvm_vcpu *vcpu, ulong val) in kvmppc_set_ctr()
443 static inline ulong kvmppc_get_ctr(struct kvm_vcpu *vcpu) in kvmppc_get_ctr()
449 static inline void kvmppc_set_lr(struct kvm_vcpu *vcpu, ulong val) in kvmppc_set_lr()
455 static inline ulong kvmppc_get_lr(struct kvm_vcpu *vcpu) in kvmppc_get_lr()
461 static inline void kvmppc_set_pc(struct kvm_vcpu *vcpu, ulong val) in kvmppc_set_pc()
467 static inline ulong kvmppc_get_pc(struct kvm_vcpu *vcpu) in kvmppc_get_pc()
473 static inline u64 kvmppc_get_msr(struct kvm_vcpu *vcpu);
474 static inline bool kvmppc_need_byteswap(struct kvm_vcpu *vcpu) in kvmppc_need_byteswap()
479 static inline ulong kvmppc_get_fault_dar(struct kvm_vcpu *vcpu) in kvmppc_get_fault_dar()
484 static inline u64 kvmppc_get_fpr(struct kvm_vcpu *vcpu, int i) in kvmppc_get_fpr()
490 static inline void kvmppc_set_fpr(struct kvm_vcpu *vcpu, int i, u64 val) in kvmppc_set_fpr()
496 static inline u64 kvmppc_get_fpscr(struct kvm_vcpu *vcpu) in kvmppc_get_fpscr()
502 static inline void kvmppc_set_fpscr(struct kvm_vcpu *vcpu, u64 val) in kvmppc_set_fpscr()
509 static inline u64 kvmppc_get_vsx_fpr(struct kvm_vcpu *vcpu, int i, int j) in kvmppc_get_vsx_fpr()
515 static inline void kvmppc_set_vsx_fpr(struct kvm_vcpu *vcpu, int i, int j, in kvmppc_set_vsx_fpr()
523 static inline void kvmppc_get_vsx_vr(struct kvm_vcpu *vcpu, int i, vector128 *v) in kvmppc_get_vsx_vr()
529 static inline void kvmppc_set_vsx_vr(struct kvm_vcpu *vcpu, int i, in kvmppc_set_vsx_vr()
536 static inline u32 kvmppc_get_vscr(struct kvm_vcpu *vcpu) in kvmppc_get_vscr()
542 static inline void kvmppc_set_vscr(struct kvm_vcpu *vcpu, u32 val) in kvmppc_set_vscr()
550 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, u##size val) \
558 static inline u##size kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
578 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, u##size val) \
585 static inline u##size kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
602 static inline u64 kvmppc_get_tb_offset(struct kvm_vcpu *vcpu) in kvmppc_get_tb_offset()
607 static inline u64 kvmppc_get_dec_expires(struct kvm_vcpu *vcpu) in kvmppc_get_dec_expires()
613 static inline void kvmppc_set_dec_expires(struct kvm_vcpu *vcpu, u64 val) in kvmppc_set_dec_expires()
620 static inline u64 kvmppc_dec_expires_host_tb(struct kvm_vcpu *vcpu) in kvmppc_dec_expires_host_tb()
631 static inline bool kvmppc_supports_magic_page(struct kvm_vcpu *vcpu) in kvmppc_supports_magic_page()
637 extern int kvmppc_h_logical_ci_load(struct kvm_vcpu *vcpu);
638 extern int kvmppc_h_logical_ci_store(struct kvm_vcpu *vcpu);