| /arch/arm/mach-omap2/ |
| A D | vc.c | 118 struct omap_vc_channel *vc = voltdm->vc; in omap_vc_config_channel() local 128 vc->cfg_channel << vc->cfg_channel_sa_shift, in omap_vc_config_channel() 139 struct omap_vc_channel *vc = voltdm->vc; in omap_vc_pre_scale() local 194 struct omap_vc_channel *vc = voltdm->vc; in omap_vc_bypass_scale() local 207 (vc->volt_reg_addr << vc->common->regaddr_shift) | in omap_vc_bypass_scale() 208 (vc->i2c_slave_addr << vc->common->slaveaddr_shift); in omap_vc_bypass_scale() 334 if (vc.vd) in omap3_vc_init_pmic_signaling() 744 struct omap_vc_channel *vc = voltdm->vc; in omap_vc_i2c_init() local 807 struct omap_vc_channel *vc = voltdm->vc; in omap_vc_init_channel() local 835 vc->i2c_slave_addr << __ffs(vc->smps_sa_mask), in omap_vc_init_channel() [all …]
|
| A D | voltagedomains54xx_data.c | 39 .vc = &omap4_vc_mpu, 50 .vc = &omap4_vc_iva, 61 .vc = &omap4_vc_core,
|
| A D | voltagedomains44xx_data.c | 53 .vc = &omap4_vc_mpu, 64 .vc = &omap4_vc_iva, 75 .vc = &omap4_vc_core,
|
| A D | voltagedomains3xxx_data.c | 54 .vc = &omap3_vc_mpu, 65 .vc = &omap3_vc_core,
|
| A D | voltage.h | 66 struct omap_vc_channel *vc; member
|
| A D | voltage.c | 261 if (voltdm->vc) { in omap_voltage_late_init()
|
| /arch/mips/math-emu/ |
| A D | ieee754int.h | 61 vc = IEEE754_CLASS_INF; \ 63 vc = IEEE754_CLASS_QNAN; \ 65 vc = IEEE754_CLASS_SNAN; \ 69 vc = IEEE754_CLASS_DNORM; \ 71 vc = IEEE754_CLASS_ZERO; \ 75 vc = IEEE754_CLASS_NORM; \ 99 vc = IEEE754_CLASS_INF; \ 101 vc = IEEE754_CLASS_QNAN; \ 103 vc = IEEE754_CLASS_SNAN; \ 109 vc = IEEE754_CLASS_ZERO; \ [all …]
|
| /arch/powerpc/math-emu/ |
| A D | math_efp.c | 210 pr_debug("vc: %08x %08x\n", vc.wp[0], vc.wp[1]); in do_spe_mathemu() 278 vc.wp[1] = 0; in do_spe_mathemu() 302 vc.wp[1] = 0; in do_spe_mathemu() 313 vc.wp[1] = 0; in do_spe_mathemu() 409 vc.wp[1] = 0; in do_spe_mathemu() 433 vc.dp[0] = 0; in do_spe_mathemu() 444 vc.wp[1] = 0; in do_spe_mathemu() 455 vc.wp[1] = 0; in do_spe_mathemu() 569 vc.wp[0] = 0; in do_spe_mathemu() 577 vc.wp[1] = 0; in do_spe_mathemu() [all …]
|
| /arch/powerpc/kvm/ |
| A D | book3s_hv.c | 312 vc->stolen_tb += tb - vc->preempt_tb; in kvmppc_core_end_stolen() 341 if (vc->runner == vcpu && vc->vcore_state >= VCORE_SLEEPING) in kvmppc_core_vcpu_load_hv() 3452 if (vc->num_threads < threads_per_vcore(vc->kvm)) { in kvmppc_vcore_preempt() 3503 cip->vc[0] = vc; in init_core_info() 3548 if (one_vm_per_core && vc->kvm != cip->vc[0]->kvm) in can_dynamic_split() 3561 cip->vc[sub] = vc; in can_dynamic_split() 3638 vc = cip->vc[sub]; in recheck_signals_and_mmu() 3703 if (vc->n_runnable > 0 && vc->runner == NULL) { in post_guest_process() 3886 split_info.vc[sub] = core_info.vc[sub]; in kvmppc_run_core() 5425 if (!vc) in kvmppc_update_lpcr() [all …]
|
| A D | book3s_hv_builtin.c | 121 set_bit(ptid, &vc->conferring_threads); in kvmppc_rm_h_confer() 123 threads_running = VCORE_ENTRY_MAP(vc); in kvmppc_rm_h_confer() 124 threads_ceded = vc->napping_threads; in kvmppc_rm_h_confer() 131 clear_bit(ptid, &vc->conferring_threads); in kvmppc_rm_h_confer() 241 int cpu = vc->pcpu; in kvmhv_interrupt_vcore() 261 ee = vc->entry_exit_map; in kvmhv_commence_exit() 284 vc = sip->vc[i]; in kvmhv_commence_exit() 285 if (!vc) in kvmhv_commence_exit() 288 ee = vc->entry_exit_map; in kvmhv_commence_exit() 292 } while (cmpxchg(&vc->entry_exit_map, ee, in kvmhv_commence_exit() [all …]
|
| A D | book3s_hv_ras.c | 143 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmppc_p9_realmode_hmi_handler() local 156 if (vc->tb_offset_applied) { in kvmppc_p9_realmode_hmi_handler() 157 u64 new_tb = mftb() - vc->tb_offset_applied; in kvmppc_p9_realmode_hmi_handler() 163 vc->tb_offset_applied = 0; in kvmppc_p9_realmode_hmi_handler() 178 u64 new_tb = mftb() + vc->tb_offset; in kvmppc_p9_realmode_hmi_handler() 184 vc->tb_offset_applied = kvmppc_get_tb_offset(vcpu); in kvmppc_p9_realmode_hmi_handler()
|
| A D | book3s_hv_p9_entry.c | 224 u64 tb = mftb() - vc->tb_offset_applied; in accumulate_time() 596 if (vc->tb_offset) { in kvmhv_vcpu_entry_p9() 597 u64 new_tb = *tb + vc->tb_offset; in kvmhv_vcpu_entry_p9() 604 vc->tb_offset_applied = vc->tb_offset; in kvmhv_vcpu_entry_p9() 607 mtspr(SPRN_VTB, vc->vtb); in kvmhv_vcpu_entry_p9() 611 if (vc->pcr) in kvmhv_vcpu_entry_p9() 612 mtspr(SPRN_PCR, vc->pcr | PCR_MASK); in kvmhv_vcpu_entry_p9() 843 vc->vtb = mfspr(SPRN_VTB); in kvmhv_vcpu_entry_p9() 851 if (vc->tb_offset_applied) { in kvmhv_vcpu_entry_p9() 859 vc->tb_offset_applied = 0; in kvmhv_vcpu_entry_p9() [all …]
|
| A D | book3s_hv_nested.c | 32 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmhv_save_hv_regs() local 34 hr->pcr = vc->pcr | PCR_MASK; in kvmhv_save_hv_regs() 37 hr->tb_offset = vc->tb_offset; in kvmhv_save_hv_regs() 44 hr->vtb = vc->vtb; in kvmhv_save_hv_regs() 112 hr->vtb = vc->vtb; in save_hv_return_state() 145 vc->pcr = hr->pcr | PCR_MASK; in restore_hv_regs() 154 vc->vtb = hr->vtb; in restore_hv_regs() 184 vc->vtb = hr->vtb; in kvmhv_restore_hv_return_state() 366 vc->tb_offset += l2_hv.tb_offset; in kvmhv_enter_nested_guest() 393 delta_vtb = vc->vtb - l2_hv.vtb; in kvmhv_enter_nested_guest() [all …]
|
| A D | trace_hv.h | 407 TP_PROTO(struct kvmppc_vcore *vc, int where), 409 TP_ARGS(vc, where), 419 __entry->runner_vcpu = vc->runner->vcpu_id; 420 __entry->n_runnable = vc->n_runnable;
|
| /arch/alpha/math-emu/ |
| A D | math.c | 105 unsigned long res, va, vb, vc, swcr, fpcr; in alpha_fp_emul() local 174 vc = 0x4000000000000000UL; in alpha_fp_emul() 185 case FOP_FNC_CMPxEQ: if (res) vc = 0; break; in alpha_fp_emul() 237 vc = 0; in alpha_fp_emul() 239 FP_TO_INT_ROUND_D(vc, DB, 64, 2); in alpha_fp_emul() 271 FP_PACK_SP(&vc, SR); in alpha_fp_emul() 273 vc = 0; in alpha_fp_emul() 274 alpha_write_fp_reg_s(fc, vc); in alpha_fp_emul() 278 FP_PACK_DP(&vc, DR); in alpha_fp_emul() 280 vc = 0; in alpha_fp_emul() [all …]
|
| /arch/x86/include/asm/vdso/ |
| A D | gettimeofday.h | 264 static inline bool arch_vdso_clocksource_ok(const struct vdso_clock *vc) in arch_vdso_clocksource_ok() argument 303 static __always_inline u64 vdso_calc_ns(const struct vdso_clock *vc, u64 cycles, u64 base) in vdso_calc_ns() argument 305 u64 delta = cycles - vc->cycle_last; in vdso_calc_ns() 317 if (unlikely(delta > vc->max_cycles)) { in vdso_calc_ns() 324 return base >> vc->shift; in vdso_calc_ns() 327 return mul_u64_u32_add_u64_shr(delta & S64_MAX, vc->mult, base, vc->shift); in vdso_calc_ns() 330 return ((delta * vc->mult) + base) >> vc->shift; in vdso_calc_ns()
|
| /arch/arm64/include/asm/vdso/ |
| A D | vsyscall.h | 16 void __arch_update_vdso_clock(struct vdso_clock *vc) in __arch_update_vdso_clock() argument 18 vc->mask = VDSO_PRECISION_MASK; in __arch_update_vdso_clock()
|
| A D | compat_gettimeofday.h | 158 static inline bool vdso_clocksource_ok(const struct vdso_clock *vc) in vdso_clocksource_ok() argument 160 return vc->clock_mode == VDSO_CLOCKMODE_ARCHTIMER; in vdso_clocksource_ok()
|
| /arch/powerpc/kernel/ |
| A D | vecemu.c | 266 unsigned int va, vb, vc, vd; in emulate_altivec() local 278 vc = (word >> 6) & 0x1f; in emulate_altivec() 283 switch (vc) { in emulate_altivec() 341 vmaddfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); in emulate_altivec() 344 vnmsubfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); in emulate_altivec()
|
| /arch/x86/coco/sev/ |
| A D | Makefile | 3 obj-y += core.o sev-nmi.o vc-handle.o
|
| /arch/powerpc/include/asm/ |
| A D | kvm_host.h | 340 #define VCORE_ENTRY_MAP(vc) ((vc)->entry_exit_map & 0xff) argument 341 #define VCORE_EXIT_MAP(vc) ((vc)->entry_exit_map >> 8) argument 342 #define VCORE_IS_EXITING(vc) (VCORE_EXIT_MAP(vc) != 0) argument
|
| A D | kvm_book3s_asm.h | 73 struct kvmppc_vcore *vc[MAX_SUBCORES];
|
| /arch/x86/boot/compressed/ |
| A D | Makefile | 50 CFLAGS_sev-handle-vc.o += -I$(objtree)/arch/x86/lib/ 99 vmlinux-objs-$(CONFIG_AMD_MEM_ENCRYPT) += $(obj)/sev.o $(obj)/sev-handle-vc.o
|
| /arch/powerpc/include/asm/vdso/ |
| A D | gettimeofday.h | 102 static inline bool vdso_clocksource_ok(const struct vdso_clock *vc) in vdso_clocksource_ok() argument
|
| /arch/arm/boot/dts/broadcom/ |
| A D | bcm2711-rpi.dtsi | 27 pinctrl-names = "i2c0", "i2c0-vc";
|