Lines Matching refs:insn
64 #define INSN_IS_16BIT(insn) (((insn) & INSN_16BIT_MASK) != INSN_16BIT_MASK) argument
66 #define INSN_LEN(insn) (INSN_IS_16BIT(insn) ? 2 : 4) argument
96 #define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3)) argument
97 #define RVC_RS2S(insn) (8 + RV_X(insn, SH_RS2C, 3)) argument
98 #define RVC_RS2(insn) RV_X(insn, SH_RS2C, 5) argument
106 #define REG_OFFSET(insn, pos) \ argument
107 (SHIFT_RIGHT((insn), (pos) - LOG_REGBYTES) & REG_MASK)
109 #define REG_PTR(insn, pos, regs) \ argument
110 ((ulong *)((ulong)(regs) + REG_OFFSET(insn, pos)))
112 #define GET_RM(insn) (((insn) >> 12) & 7) argument
114 #define GET_RS1(insn, regs) (*REG_PTR(insn, SH_RS1, regs)) argument
115 #define GET_RS2(insn, regs) (*REG_PTR(insn, SH_RS2, regs)) argument
116 #define GET_RS1S(insn, regs) (*REG_PTR(RVC_RS1S(insn), 0, regs)) argument
117 #define GET_RS2S(insn, regs) (*REG_PTR(RVC_RS2S(insn), 0, regs)) argument
118 #define GET_RS2C(insn, regs) (*REG_PTR(insn, SH_RS2C, regs)) argument
120 #define SET_RD(insn, regs, val) (*REG_PTR(insn, SH_RD, regs) = (val)) argument
121 #define IMM_I(insn) ((s32)(insn) >> 20) argument
122 #define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \ argument
123 (s32)(((insn) >> 7) & 0x1f))
128 ulong insn) in truly_illegal_insn() argument
135 utrap.stval = insn; in truly_illegal_insn()
143 ulong insn) in system_opcode_insn() argument
145 if ((insn & INSN_MASK_WFI) == INSN_MATCH_WFI) { in system_opcode_insn()
153 vcpu->arch.guest_context.sepc += INSN_LEN(insn); in system_opcode_insn()
157 return truly_illegal_insn(vcpu, run, insn); in system_opcode_insn()
163 unsigned long insn = trap->stval; in virtual_inst_fault() local
167 if (unlikely(INSN_IS_16BIT(insn))) { in virtual_inst_fault()
168 if (insn == 0) { in virtual_inst_fault()
170 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, in virtual_inst_fault()
179 if (INSN_IS_16BIT(insn)) in virtual_inst_fault()
180 return truly_illegal_insn(vcpu, run, insn); in virtual_inst_fault()
183 switch ((insn & INSN_OPCODE_MASK) >> INSN_OPCODE_SHIFT) { in virtual_inst_fault()
185 return system_opcode_insn(vcpu, run, insn); in virtual_inst_fault()
187 return truly_illegal_insn(vcpu, run, insn); in virtual_inst_fault()
195 unsigned long insn; in emulate_load() local
206 insn = htinst | INSN_16BIT_MASK; in emulate_load()
207 insn_len = (htinst & BIT(1)) ? INSN_LEN(insn) : 2; in emulate_load()
213 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, ct->sepc, in emulate_load()
221 insn_len = INSN_LEN(insn); in emulate_load()
225 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) { in emulate_load()
228 } else if ((insn & INSN_MASK_LB) == INSN_MATCH_LB) { in emulate_load()
231 } else if ((insn & INSN_MASK_LBU) == INSN_MATCH_LBU) { in emulate_load()
235 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) { in emulate_load()
238 } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) { in emulate_load()
241 } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) { in emulate_load()
244 } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) { in emulate_load()
247 } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) { in emulate_load()
250 insn = RVC_RS2S(insn) << SH_RD; in emulate_load()
251 } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP && in emulate_load()
252 ((insn >> SH_RD) & 0x1f)) { in emulate_load()
256 } else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) { in emulate_load()
259 insn = RVC_RS2S(insn) << SH_RD; in emulate_load()
260 } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP && in emulate_load()
261 ((insn >> SH_RD) & 0x1f)) { in emulate_load()
273 vcpu->arch.mmio_decode.insn = insn; in emulate_load()
308 unsigned long insn; in emulate_store() local
319 insn = htinst | INSN_16BIT_MASK; in emulate_store()
320 insn_len = (htinst & BIT(1)) ? INSN_LEN(insn) : 2; in emulate_store()
326 insn = kvm_riscv_vcpu_unpriv_read(vcpu, true, ct->sepc, in emulate_store()
334 insn_len = INSN_LEN(insn); in emulate_store()
337 data = GET_RS2(insn, &vcpu->arch.guest_context); in emulate_store()
340 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) { in emulate_store()
342 } else if ((insn & INSN_MASK_SB) == INSN_MATCH_SB) { in emulate_store()
345 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) { in emulate_store()
348 } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) { in emulate_store()
351 } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) { in emulate_store()
353 data64 = GET_RS2S(insn, &vcpu->arch.guest_context); in emulate_store()
354 } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP && in emulate_store()
355 ((insn >> SH_RD) & 0x1f)) { in emulate_store()
357 data64 = GET_RS2C(insn, &vcpu->arch.guest_context); in emulate_store()
359 } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) { in emulate_store()
361 data32 = GET_RS2S(insn, &vcpu->arch.guest_context); in emulate_store()
362 } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP && in emulate_store()
363 ((insn >> SH_RD) & 0x1f)) { in emulate_store()
365 data32 = GET_RS2C(insn, &vcpu->arch.guest_context); in emulate_store()
375 vcpu->arch.mmio_decode.insn = insn; in emulate_store()
607 ulong insn; in kvm_riscv_vcpu_mmio_return() local
614 insn = vcpu->arch.mmio_decode.insn; in kvm_riscv_vcpu_mmio_return()
625 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
630 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
635 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()
640 SET_RD(insn, &vcpu->arch.guest_context, in kvm_riscv_vcpu_mmio_return()