Lines Matching refs:code
261 return insn->code == (BPF_JMP | BPF_CALL) && in bpf_helper_call()
267 return insn->code == (BPF_JMP | BPF_CALL) && in bpf_pseudo_call()
273 return insn->code == (BPF_JMP | BPF_CALL) && in bpf_pseudo_kfunc_call()
552 return insn->code == (BPF_JMP | BPF_JCOND) && insn->src_reg == BPF_MAY_GOTO; in is_may_goto_insn()
585 return BPF_CLASS(insn->code) == BPF_STX && in is_cmpxchg_insn()
586 BPF_MODE(insn->code) == BPF_ATOMIC && in is_cmpxchg_insn()
592 return BPF_CLASS(insn->code) == BPF_STX && in is_atomic_load_insn()
593 BPF_MODE(insn->code) == BPF_ATOMIC && in is_atomic_load_insn()
3499 u8 code = insn->code; in jmp_offset() local
3501 if (code == (BPF_JMP32 | BPF_JA)) in jmp_offset()
3517 u8 code = insn[i].code; in check_subprogs() local
3519 if (code == (BPF_JMP | BPF_CALL) && in check_subprogs()
3525 if (BPF_CLASS(code) == BPF_LD && in check_subprogs()
3526 (BPF_MODE(code) == BPF_ABS || BPF_MODE(code) == BPF_IND)) in check_subprogs()
3528 if (BPF_CLASS(code) != BPF_JMP && BPF_CLASS(code) != BPF_JMP32) in check_subprogs()
3530 if (BPF_OP(code) == BPF_EXIT || BPF_OP(code) == BPF_CALL) in check_subprogs()
3543 if (code != (BPF_JMP | BPF_EXIT) && in check_subprogs()
3544 code != (BPF_JMP32 | BPF_JA) && in check_subprogs()
3545 code != (BPF_JMP | BPF_JA)) { in check_subprogs()
3669 u8 code, class, op; in is_reg64() local
3671 code = insn->code; in is_reg64()
3672 class = BPF_CLASS(code); in is_reg64()
3673 op = BPF_OP(code); in is_reg64()
3710 return BPF_SIZE(code) == BPF_DW || BPF_MODE(code) == BPF_MEMSX; in is_reg64()
3722 return BPF_SIZE(code) == BPF_DW; in is_reg64()
3726 u8 mode = BPF_MODE(code); in is_reg64()
3755 switch (BPF_CLASS(insn->code)) { in insn_def_regno()
3761 if (BPF_MODE(insn->code) == BPF_ATOMIC || in insn_def_regno()
3762 BPF_MODE(insn->code) == BPF_PROBE_ATOMIC) { in insn_def_regno()
4268 u8 class = BPF_CLASS(insn->code); in backtrack_insn()
4269 u8 opcode = BPF_OP(insn->code); in backtrack_insn()
4270 u8 mode = BPF_MODE(insn->code); in backtrack_insn()
4275 if (insn->code == 0) in backtrack_insn()
4302 if (BPF_SRC(insn->code) == BPF_X) { in backtrack_insn()
4320 if (BPF_SRC(insn->code) == BPF_X) { in backtrack_insn()
4513 } else if (BPF_SRC(insn->code) == BPF_X) { in backtrack_insn()
4526 } else if (BPF_SRC(insn->code) == BPF_K) { in backtrack_insn()
5073 return BPF_CLASS(insn->code) == BPF_ST && BPF_MODE(insn->code) == BPF_MEM; in is_bpf_st_mem()
6016 int class = BPF_CLASS(insn->code); in check_map_kptr_access()
6027 if (BPF_MODE(insn->code) != BPF_MEM) { in check_map_kptr_access()
7817 BPF_SIZE(insn->code), BPF_READ, insn->dst_reg, in check_load_mem()
7847 BPF_SIZE(insn->code), BPF_WRITE, insn->src_reg, in check_store_reg()
7860 if (BPF_SIZE(insn->code) != BPF_W && BPF_SIZE(insn->code) != BPF_DW) { in check_atomic_rmw()
7922 BPF_SIZE(insn->code), BPF_READ, -1, true, false); in check_atomic_rmw()
7925 insn->off, BPF_SIZE(insn->code), in check_atomic_rmw()
7937 BPF_SIZE(insn->code), BPF_WRITE, -1, true, false); in check_atomic_rmw()
7996 if (BPF_SIZE(insn->code) == BPF_DW && BITS_PER_LONG != 64) { in check_atomic()
8003 if (BPF_SIZE(insn->code) == BPF_DW && BITS_PER_LONG != 64) { in check_atomic()
14169 BPF_SRC(insn->code) == BPF_K || in can_skip_alu_sanitation()
14222 if (BPF_SRC(insn->code) == BPF_K) { in sanitize_speculative_path()
14224 } else if (BPF_SRC(insn->code) == BPF_X) { in sanitize_speculative_path()
14245 u8 opcode = BPF_OP(insn->code); in sanitize_ptr_alu()
14347 const char *op = BPF_OP(insn->code) == BPF_ADD ? "add" : "sub"; in sanitize_err()
14464 u8 opcode = BPF_OP(insn->code); in adjust_ptr_min_max_vals()
14479 if (BPF_CLASS(insn->code) != BPF_ALU64) { in adjust_ptr_min_max_vals()
15260 u64 insn_bitness = (BPF_CLASS(insn->code) == BPF_ALU64) ? 64 : 32; in is_safe_to_compute_dst_reg_range()
15274 switch (BPF_OP(insn->code)) { in is_safe_to_compute_dst_reg_range()
15306 u8 opcode = BPF_OP(insn->code); in adjust_scalar_min_max_vals()
15307 bool alu32 = (BPF_CLASS(insn->code) != BPF_ALU64); in adjust_scalar_min_max_vals()
15412 bool alu32 = (BPF_CLASS(insn->code) != BPF_ALU64); in adjust_reg_min_max_vals()
15413 u8 opcode = BPF_OP(insn->code); in adjust_reg_min_max_vals()
15422 if (BPF_CLASS(insn->code) == BPF_ALU64) in adjust_reg_min_max_vals()
15436 if (BPF_SRC(insn->code) == BPF_X) { in adjust_reg_min_max_vals()
15512 BPF_OP(insn->code) == BPF_ADD && !alu32 && in adjust_reg_min_max_vals()
15543 u8 opcode = BPF_OP(insn->code); in check_alu_op()
15548 if (BPF_SRC(insn->code) != BPF_K || in check_alu_op()
15557 (BPF_CLASS(insn->code) == BPF_ALU64 && in check_alu_op()
15558 BPF_SRC(insn->code) != BPF_TO_LE)) { in check_alu_op()
15589 if (BPF_SRC(insn->code) == BPF_X) { in check_alu_op()
15590 if (BPF_CLASS(insn->code) == BPF_ALU) { in check_alu_op()
15629 if (BPF_SRC(insn->code) == BPF_X) { in check_alu_op()
15633 if (BPF_CLASS(insn->code) == BPF_ALU64) { in check_alu_op()
15722 if (BPF_CLASS(insn->code) == BPF_ALU64) { in check_alu_op()
15737 if (BPF_SRC(insn->code) == BPF_X) { in check_alu_op()
15761 BPF_SRC(insn->code) == BPF_K && insn->imm == 0) { in check_alu_op()
15767 opcode == BPF_ARSH) && BPF_SRC(insn->code) == BPF_K) { in check_alu_op()
15768 int size = BPF_CLASS(insn->code) == BPF_ALU64 ? 64 : 32; in check_alu_op()
16417 if (BPF_SRC(insn->code) != BPF_X) in try_match_pkt_pointers()
16421 if (BPF_CLASS(insn->code) == BPF_JMP32) in try_match_pkt_pointers()
16424 switch (BPF_OP(insn->code)) { in try_match_pkt_pointers()
16618 u8 opcode = BPF_OP(insn->code); in check_cond_jmp_op()
16634 if (insn->code != (BPF_JMP | BPF_JCOND) || in check_cond_jmp_op()
16660 if (BPF_SRC(insn->code) == BPF_X) { in check_cond_jmp_op()
16703 is_jmp32 = BPF_CLASS(insn->code) == BPF_JMP32; in check_cond_jmp_op()
16711 if (BPF_SRC(insn->code) == BPF_X && !err && in check_cond_jmp_op()
16751 if (BPF_SRC(insn->code) == BPF_X && src_reg->type == SCALAR_VALUE && src_reg->id) in check_cond_jmp_op()
16767 if (BPF_SRC(insn->code) == BPF_X) { in check_cond_jmp_op()
16788 if (BPF_SRC(insn->code) == BPF_X && in check_cond_jmp_op()
16812 if (!is_jmp32 && BPF_SRC(insn->code) == BPF_X && in check_cond_jmp_op()
16841 if (!is_jmp32 && BPF_SRC(insn->code) == BPF_K && in check_cond_jmp_op()
16872 if (BPF_SIZE(insn->code) != BPF_DW) { in check_ld_imm()
16991 u8 mode = BPF_MODE(insn->code); in check_ld_abs()
17005 BPF_SIZE(insn->code) == BPF_DW || in check_ld_abs()
17625 if (stx->code != (BPF_STX | BPF_MEM | BPF_DW) || in mark_fastcall_pattern_for_call()
17626 ldx->code != (BPF_LDX | BPF_MEM | BPF_DW) || in mark_fastcall_pattern_for_call()
17684 if (insn->code != (BPF_STX | BPF_MEM | BPF_DW) || in mark_fastcall_patterns()
17692 if (insn->code != (BPF_JMP | BPF_CALL)) in mark_fastcall_patterns()
17714 if (BPF_CLASS(insn->code) != BPF_JMP && in visit_insn()
17715 BPF_CLASS(insn->code) != BPF_JMP32) { in visit_insn()
17720 switch (BPF_OP(insn->code)) { in visit_insn()
17790 if (BPF_SRC(insn->code) != BPF_K) in visit_insn()
17793 if (BPF_CLASS(insn->code) == BPF_JMP) in visit_insn()
18212 if (!prog->insnsi[linfo[i].insn_off].code) { in check_btf_line()
19833 u8 class = BPF_CLASS(insn->code); in do_check_insn()
19841 bool is_ldsx = BPF_MODE(insn->code) == BPF_MEMSX; in do_check_insn()
19850 if (BPF_MODE(insn->code) == BPF_ATOMIC) { in do_check_insn()
19858 if (BPF_MODE(insn->code) != BPF_MEM || insn->imm != 0) { in do_check_insn()
19869 if (BPF_MODE(insn->code) != BPF_MEM || in do_check_insn()
19883 insn->off, BPF_SIZE(insn->code), in do_check_insn()
19892 u8 opcode = BPF_OP(insn->code); in do_check_insn()
19896 if (BPF_SRC(insn->code) != BPF_K || in do_check_insn()
19931 if (BPF_SRC(insn->code) != BPF_K || in do_check_insn()
19946 if (BPF_SRC(insn->code) != BPF_K || in do_check_insn()
19961 u8 mode = BPF_MODE(insn->code); in do_check_insn()
20125 if (verifier_bug_if(BPF_CLASS(insn->code) == BPF_JMP || in do_check()
20126 BPF_CLASS(insn->code) == BPF_JMP32, env, in do_check()
20537 if (BPF_CLASS(insn->code) == BPF_LDX && in resolve_pseudo_ldimm64()
20538 ((BPF_MODE(insn->code) != BPF_MEM && BPF_MODE(insn->code) != BPF_MEMSX) || in resolve_pseudo_ldimm64()
20544 if (insn[0].code == (BPF_LD | BPF_IMM | BPF_DW)) { in resolve_pseudo_ldimm64()
20551 if (i == insn_cnt - 1 || insn[1].code != 0 || in resolve_pseudo_ldimm64()
20655 if (!bpf_opcode_in_insntable(insn->code)) { in resolve_pseudo_ldimm64()
20656 verbose(env, "unknown opcode %02x\n", insn->code); in resolve_pseudo_ldimm64()
20689 if (insn->code != (BPF_LD | BPF_IMM | BPF_DW)) in convert_pseudo_ld_imm64()
20801 u8 code = insn->code; in adjust_jmp_off() local
20806 if ((BPF_CLASS(code) != BPF_JMP && BPF_CLASS(code) != BPF_JMP32) || in adjust_jmp_off()
20807 BPF_OP(code) == BPF_CALL || BPF_OP(code) == BPF_EXIT) in adjust_jmp_off()
20810 if (insn->code == (BPF_JMP32 | BPF_JA)) { in adjust_jmp_off()
21001 static bool insn_is_cond_jump(u8 code) in insn_is_cond_jump() argument
21005 op = BPF_OP(code); in insn_is_cond_jump()
21006 if (BPF_CLASS(code) == BPF_JMP32) in insn_is_cond_jump()
21009 if (BPF_CLASS(code) != BPF_JMP) in insn_is_cond_jump()
21024 if (!insn_is_cond_jump(insn->code)) in opt_hard_wire_dead_code_branches()
21119 u8 code, class; in opt_subreg_zext_lo32_rnd_hi32() local
21125 code = insn.code; in opt_subreg_zext_lo32_rnd_hi32()
21126 class = BPF_CLASS(code); in opt_subreg_zext_lo32_rnd_hi32()
21136 BPF_MODE(code) == BPF_IMM) in opt_subreg_zext_lo32_rnd_hi32()
21297 if (insn->code == (BPF_LDX | BPF_MEM | BPF_B) || in convert_ctx_accesses()
21298 insn->code == (BPF_LDX | BPF_MEM | BPF_H) || in convert_ctx_accesses()
21299 insn->code == (BPF_LDX | BPF_MEM | BPF_W) || in convert_ctx_accesses()
21300 insn->code == (BPF_LDX | BPF_MEM | BPF_DW) || in convert_ctx_accesses()
21301 insn->code == (BPF_LDX | BPF_MEMSX | BPF_B) || in convert_ctx_accesses()
21302 insn->code == (BPF_LDX | BPF_MEMSX | BPF_H) || in convert_ctx_accesses()
21303 insn->code == (BPF_LDX | BPF_MEMSX | BPF_W)) { in convert_ctx_accesses()
21305 } else if (insn->code == (BPF_STX | BPF_MEM | BPF_B) || in convert_ctx_accesses()
21306 insn->code == (BPF_STX | BPF_MEM | BPF_H) || in convert_ctx_accesses()
21307 insn->code == (BPF_STX | BPF_MEM | BPF_W) || in convert_ctx_accesses()
21308 insn->code == (BPF_STX | BPF_MEM | BPF_DW) || in convert_ctx_accesses()
21309 insn->code == (BPF_ST | BPF_MEM | BPF_B) || in convert_ctx_accesses()
21310 insn->code == (BPF_ST | BPF_MEM | BPF_H) || in convert_ctx_accesses()
21311 insn->code == (BPF_ST | BPF_MEM | BPF_W) || in convert_ctx_accesses()
21312 insn->code == (BPF_ST | BPF_MEM | BPF_DW)) { in convert_ctx_accesses()
21314 } else if ((insn->code == (BPF_STX | BPF_ATOMIC | BPF_B) || in convert_ctx_accesses()
21315 insn->code == (BPF_STX | BPF_ATOMIC | BPF_H) || in convert_ctx_accesses()
21316 insn->code == (BPF_STX | BPF_ATOMIC | BPF_W) || in convert_ctx_accesses()
21317 insn->code == (BPF_STX | BPF_ATOMIC | BPF_DW)) && in convert_ctx_accesses()
21319 insn->code = BPF_STX | BPF_PROBE_ATOMIC | BPF_SIZE(insn->code); in convert_ctx_accesses()
21322 } else if (insn->code == (BPF_JMP | BPF_EXIT) && in convert_ctx_accesses()
21392 if (BPF_MODE(insn->code) == BPF_MEM) in convert_ctx_accesses()
21393 insn->code = BPF_LDX | BPF_PROBE_MEM | in convert_ctx_accesses()
21394 BPF_SIZE((insn)->code); in convert_ctx_accesses()
21396 insn->code = BPF_LDX | BPF_PROBE_MEMSX | in convert_ctx_accesses()
21397 BPF_SIZE((insn)->code); in convert_ctx_accesses()
21402 if (BPF_MODE(insn->code) == BPF_MEMSX) { in convert_ctx_accesses()
21406 insn->code = BPF_CLASS(insn->code) | BPF_PROBE_MEM32 | BPF_SIZE(insn->code); in convert_ctx_accesses()
21415 mode = BPF_MODE(insn->code); in convert_ctx_accesses()
21440 insn->code = BPF_LDX | BPF_MEM | size_code; in convert_ctx_accesses()
21608 if (BPF_CLASS(insn->code) == BPF_LDX && in jit_subprogs()
21609 (BPF_MODE(insn->code) == BPF_PROBE_MEM || in jit_subprogs()
21610 BPF_MODE(insn->code) == BPF_PROBE_MEM32 || in jit_subprogs()
21611 BPF_MODE(insn->code) == BPF_PROBE_MEMSX)) in jit_subprogs()
21613 if ((BPF_CLASS(insn->code) == BPF_STX || in jit_subprogs()
21614 BPF_CLASS(insn->code) == BPF_ST) && in jit_subprogs()
21615 BPF_MODE(insn->code) == BPF_PROBE_MEM32) in jit_subprogs()
21617 if (BPF_CLASS(insn->code) == BPF_STX && in jit_subprogs()
21618 BPF_MODE(insn->code) == BPF_PROBE_ATOMIC) in jit_subprogs()
22042 if (insn->code == (BPF_ALU64 | BPF_MOV | BPF_X) && insn->imm) { in do_misc_fixups()
22046 insn->code = BPF_ALU | BPF_MOV | BPF_X; in do_misc_fixups()
22056 insn->code = BPF_ALU | BPF_OP(insn->code) | BPF_SRC(insn->code); in do_misc_fixups()
22059 if ((insn->code == (BPF_ALU64 | BPF_MOD | BPF_K) || in do_misc_fixups()
22060 insn->code == (BPF_ALU64 | BPF_DIV | BPF_K) || in do_misc_fixups()
22061 insn->code == (BPF_ALU | BPF_MOD | BPF_K) || in do_misc_fixups()
22062 insn->code == (BPF_ALU | BPF_DIV | BPF_K)) && in do_misc_fixups()
22064 bool is64 = BPF_CLASS(insn->code) == BPF_ALU64; in do_misc_fixups()
22065 bool isdiv = BPF_OP(insn->code) == BPF_DIV; in do_misc_fixups()
22088 if (insn->code == (BPF_ALU64 | BPF_MOD | BPF_X) || in do_misc_fixups()
22089 insn->code == (BPF_ALU64 | BPF_DIV | BPF_X) || in do_misc_fixups()
22090 insn->code == (BPF_ALU | BPF_MOD | BPF_X) || in do_misc_fixups()
22091 insn->code == (BPF_ALU | BPF_DIV | BPF_X)) { in do_misc_fixups()
22092 bool is64 = BPF_CLASS(insn->code) == BPF_ALU64; in do_misc_fixups()
22093 bool isdiv = BPF_OP(insn->code) == BPF_DIV; in do_misc_fixups()
22179 if (BPF_CLASS(insn->code) == BPF_LDX && in do_misc_fixups()
22180 (BPF_MODE(insn->code) == BPF_PROBE_MEM || in do_misc_fixups()
22181 BPF_MODE(insn->code) == BPF_PROBE_MEMSX)) { in do_misc_fixups()
22209 if (BPF_CLASS(insn->code) == BPF_LD && in do_misc_fixups()
22210 (BPF_MODE(insn->code) == BPF_ABS || in do_misc_fixups()
22211 BPF_MODE(insn->code) == BPF_IND)) { in do_misc_fixups()
22229 if (insn->code == (BPF_ALU64 | BPF_ADD | BPF_X) || in do_misc_fixups()
22230 insn->code == (BPF_ALU64 | BPF_SUB | BPF_X)) { in do_misc_fixups()
22264 insn->code = insn->code == code_add ? in do_misc_fixups()
22347 if (insn->code != (BPF_JMP | BPF_CALL)) in do_misc_fixups()
22395 insn->code = BPF_JMP | BPF_TAIL_CALL; in do_misc_fixups()
23004 return insn->code == (BPF_JMP | BPF_CALL) && in is_bpf_loop_call()
24089 u8 class = BPF_CLASS(insn->code); in can_fallthrough()
24090 u8 opcode = BPF_OP(insn->code); in can_fallthrough()
24103 u8 class = BPF_CLASS(insn->code); in can_jump()
24104 u8 opcode = BPF_OP(insn->code); in can_jump()
24165 u8 class = BPF_CLASS(insn->code); in compute_insn_live_regs()
24166 u8 code = BPF_OP(insn->code); in compute_insn_live_regs() local
24167 u8 mode = BPF_MODE(insn->code); in compute_insn_live_regs()
24178 if (BPF_SIZE(insn->code) == BPF_DW) { in compute_insn_live_regs()
24238 switch (code) { in compute_insn_live_regs()
24245 if (BPF_SRC(insn->code) == BPF_K) in compute_insn_live_regs()
24252 if (BPF_SRC(insn->code) == BPF_K) in compute_insn_live_regs()
24260 switch (code) { in compute_insn_live_regs()
24278 if (BPF_SRC(insn->code) == BPF_K) in compute_insn_live_regs()