Lines Matching refs:src_reg
253 insn->src_reg == 0; in bpf_helper_call()
259 insn->src_reg == BPF_PSEUDO_CALL; in bpf_pseudo_call()
265 insn->src_reg == BPF_PSEUDO_KFUNC_CALL; in bpf_pseudo_kfunc_call()
540 return insn->code == (BPF_JMP | BPF_JCOND) && insn->src_reg == BPF_MAY_GOTO; in is_may_goto_insn()
3044 insn[i].src_reg == 0 && in check_subprogs()
3205 if (insn->src_reg == BPF_PSEUDO_CALL) in is_reg64()
3286 return insn->src_reg; in insn_def_regno()
3566 if (insn->src_reg != BPF_PSEUDO_KFUNC_CALL) in disasm_kfunc_name()
3787 u32 sreg = insn->src_reg; in backtrack_insn()
3972 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && insn->imm == 0) in backtrack_insn()
4522 struct bpf_reg_state *src_reg) in assign_scalar_id_before_mov() argument
4524 if (src_reg->type != SCALAR_VALUE) in assign_scalar_id_before_mov()
4527 if (src_reg->id & BPF_ADD_CONST) { in assign_scalar_id_before_mov()
4533 src_reg->id = 0; in assign_scalar_id_before_mov()
4534 src_reg->off = 0; in assign_scalar_id_before_mov()
4537 if (!src_reg->id && !tnum_is_const(src_reg->var_off)) in assign_scalar_id_before_mov()
4542 src_reg->id = ++env->id_gen; in assign_scalar_id_before_mov()
7183 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_atomic()
7206 if (is_pointer_value(env, insn->src_reg)) { in check_atomic()
7207 verbose(env, "R%d leaks addr into mem\n", insn->src_reg); in check_atomic()
7226 load_reg = insn->src_reg; in check_atomic()
13028 mark_reg_unknown(env, regs, insn->src_reg); in sanitize_speculative_path()
13150 u32 dst = insn->dst_reg, src = insn->src_reg; in sanitize_err()
13482 struct bpf_reg_state *src_reg) in scalar32_min_max_add() argument
13489 if (check_add_overflow(*dst_smin, src_reg->s32_min_value, dst_smin) || in scalar32_min_max_add()
13490 check_add_overflow(*dst_smax, src_reg->s32_max_value, dst_smax)) { in scalar32_min_max_add()
13494 if (check_add_overflow(*dst_umin, src_reg->u32_min_value, dst_umin) || in scalar32_min_max_add()
13495 check_add_overflow(*dst_umax, src_reg->u32_max_value, dst_umax)) { in scalar32_min_max_add()
13502 struct bpf_reg_state *src_reg) in scalar_min_max_add() argument
13509 if (check_add_overflow(*dst_smin, src_reg->smin_value, dst_smin) || in scalar_min_max_add()
13510 check_add_overflow(*dst_smax, src_reg->smax_value, dst_smax)) { in scalar_min_max_add()
13514 if (check_add_overflow(*dst_umin, src_reg->umin_value, dst_umin) || in scalar_min_max_add()
13515 check_add_overflow(*dst_umax, src_reg->umax_value, dst_umax)) { in scalar_min_max_add()
13522 struct bpf_reg_state *src_reg) in scalar32_min_max_sub() argument
13526 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_sub()
13527 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_sub()
13529 if (check_sub_overflow(*dst_smin, src_reg->s32_max_value, dst_smin) || in scalar32_min_max_sub()
13530 check_sub_overflow(*dst_smax, src_reg->s32_min_value, dst_smax)) { in scalar32_min_max_sub()
13547 struct bpf_reg_state *src_reg) in scalar_min_max_sub() argument
13551 u64 umin_val = src_reg->umin_value; in scalar_min_max_sub()
13552 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
13554 if (check_sub_overflow(*dst_smin, src_reg->smax_value, dst_smin) || in scalar_min_max_sub()
13555 check_sub_overflow(*dst_smax, src_reg->smin_value, dst_smax)) { in scalar_min_max_sub()
13572 struct bpf_reg_state *src_reg) in scalar32_min_max_mul() argument
13574 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_mul()
13575 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_mul()
13576 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_mul()
13604 struct bpf_reg_state *src_reg) in scalar_min_max_mul() argument
13606 s64 smin_val = src_reg->smin_value; in scalar_min_max_mul()
13607 u64 umin_val = src_reg->umin_value; in scalar_min_max_mul()
13608 u64 umax_val = src_reg->umax_value; in scalar_min_max_mul()
13636 struct bpf_reg_state *src_reg) in scalar32_min_max_and() argument
13638 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_and()
13641 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_and()
13667 struct bpf_reg_state *src_reg) in scalar_min_max_and() argument
13669 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_and()
13671 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
13699 struct bpf_reg_state *src_reg) in scalar32_min_max_or() argument
13701 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_or()
13704 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_or()
13730 struct bpf_reg_state *src_reg) in scalar_min_max_or() argument
13732 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_or()
13734 u64 umin_val = src_reg->umin_value; in scalar_min_max_or()
13762 struct bpf_reg_state *src_reg) in scalar32_min_max_xor() argument
13764 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_xor()
13790 struct bpf_reg_state *src_reg) in scalar_min_max_xor() argument
13792 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_xor()
13838 struct bpf_reg_state *src_reg) in scalar32_min_max_lsh() argument
13840 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_lsh()
13841 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_lsh()
13886 struct bpf_reg_state *src_reg) in scalar_min_max_lsh() argument
13888 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
13889 u64 umin_val = src_reg->umin_value; in scalar_min_max_lsh()
13901 struct bpf_reg_state *src_reg) in scalar32_min_max_rsh() argument
13904 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_rsh()
13905 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_rsh()
13933 struct bpf_reg_state *src_reg) in scalar_min_max_rsh() argument
13935 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
13936 u64 umin_val = src_reg->umin_value; in scalar_min_max_rsh()
13967 struct bpf_reg_state *src_reg) in scalar32_min_max_arsh() argument
13969 u64 umin_val = src_reg->u32_min_value; in scalar32_min_max_arsh()
13990 struct bpf_reg_state *src_reg) in scalar_min_max_arsh() argument
13992 u64 umin_val = src_reg->umin_value; in scalar_min_max_arsh()
14017 const struct bpf_reg_state *src_reg) in is_safe_to_compute_dst_reg_range() argument
14023 if (tnum_subreg_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
14024 && src_reg->s32_min_value == src_reg->s32_max_value in is_safe_to_compute_dst_reg_range()
14025 && src_reg->u32_min_value == src_reg->u32_max_value) in is_safe_to_compute_dst_reg_range()
14028 if (tnum_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
14029 && src_reg->smin_value == src_reg->smax_value in is_safe_to_compute_dst_reg_range()
14030 && src_reg->umin_value == src_reg->umax_value) in is_safe_to_compute_dst_reg_range()
14050 return (src_is_const && src_reg->umax_value < insn_bitness); in is_safe_to_compute_dst_reg_range()
14063 struct bpf_reg_state src_reg) in adjust_scalar_min_max_vals() argument
14069 if (!is_safe_to_compute_dst_reg_range(insn, &src_reg)) { in adjust_scalar_min_max_vals()
14096 scalar32_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14097 scalar_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14098 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14101 scalar32_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14102 scalar_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14103 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14106 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14107 scalar32_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14108 scalar_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14111 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14112 scalar32_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14113 scalar_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14116 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14117 scalar32_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14118 scalar_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14121 dst_reg->var_off = tnum_xor(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
14122 scalar32_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14123 scalar_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14127 scalar32_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14129 scalar_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14133 scalar32_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14135 scalar_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14139 scalar32_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14141 scalar_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
14162 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
14169 src_reg = NULL; in adjust_reg_min_max_vals()
14189 src_reg = ®s[insn->src_reg]; in adjust_reg_min_max_vals()
14190 if (src_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
14213 src_reg, dst_reg); in adjust_reg_min_max_vals()
14217 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
14221 dst_reg, src_reg); in adjust_reg_min_max_vals()
14224 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
14234 src_reg = &off_reg; in adjust_reg_min_max_vals()
14237 ptr_reg, src_reg); in adjust_reg_min_max_vals()
14246 if (WARN_ON(!src_reg)) { in adjust_reg_min_max_vals()
14251 err = adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
14265 dst_reg->id && is_reg_const(src_reg, false)) { in adjust_reg_min_max_vals()
14266 u64 val = reg_const_value(src_reg, false); in adjust_reg_min_max_vals()
14301 insn->src_reg != BPF_REG_0 || in check_alu_op()
14307 if (insn->src_reg != BPF_REG_0 || insn->off != 0 || in check_alu_op()
14359 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
14363 if (insn->src_reg != BPF_REG_0 || insn->off != 0) { in check_alu_op()
14375 struct bpf_reg_state *src_reg = regs + insn->src_reg; in check_alu_op() local
14391 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
14392 copy_register_state(dst_reg, src_reg); in check_alu_op()
14397 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
14400 insn->src_reg); in check_alu_op()
14402 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
14405 no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
14407 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
14408 copy_register_state(dst_reg, src_reg); in check_alu_op()
14420 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
14423 insn->src_reg); in check_alu_op()
14425 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
14427 bool is_src_reg_u32 = get_reg_width(src_reg) <= 32; in check_alu_op()
14430 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
14431 copy_register_state(dst_reg, src_reg); in check_alu_op()
14442 bool no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
14445 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
14446 copy_register_state(dst_reg, src_reg); in check_alu_op()
14489 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
14493 if (insn->src_reg != BPF_REG_0 || insn->off > 1 || in check_alu_op()
14763 struct bpf_reg_state *src_reg, in is_pkt_ptr_branch_taken() argument
14768 if (src_reg->type == PTR_TO_PACKET_END) { in is_pkt_ptr_branch_taken()
14771 pkt = src_reg; in is_pkt_ptr_branch_taken()
15154 struct bpf_reg_state *src_reg, in try_match_pkt_pointers() argument
15168 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
15170 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
15176 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
15178 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
15180 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
15181 src_reg->type, true); in try_match_pkt_pointers()
15182 mark_pkt_end(this_branch, insn->src_reg, false); in try_match_pkt_pointers()
15189 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
15191 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
15197 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
15199 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
15201 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
15202 src_reg->type, false); in try_match_pkt_pointers()
15203 mark_pkt_end(other_branch, insn->src_reg, true); in try_match_pkt_pointers()
15210 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
15212 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
15218 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
15220 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
15222 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
15223 src_reg->type, false); in try_match_pkt_pointers()
15224 mark_pkt_end(this_branch, insn->src_reg, true); in try_match_pkt_pointers()
15231 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
15233 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
15239 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
15241 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
15243 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
15244 src_reg->type, true); in try_match_pkt_pointers()
15245 mark_pkt_end(other_branch, insn->src_reg, false); in try_match_pkt_pointers()
15356 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
15375 insn->src_reg != BPF_MAY_GOTO || in check_cond_jmp_op()
15408 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_cond_jmp_op()
15412 src_reg = ®s[insn->src_reg]; in check_cond_jmp_op()
15413 if (!(reg_is_pkt_pointer_any(dst_reg) && reg_is_pkt_pointer_any(src_reg)) && in check_cond_jmp_op()
15414 is_pointer_value(env, insn->src_reg)) { in check_cond_jmp_op()
15416 insn->src_reg); in check_cond_jmp_op()
15420 if (insn->src_reg != BPF_REG_0) { in check_cond_jmp_op()
15424 src_reg = &env->fake_reg[0]; in check_cond_jmp_op()
15425 memset(src_reg, 0, sizeof(*src_reg)); in check_cond_jmp_op()
15426 src_reg->type = SCALAR_VALUE; in check_cond_jmp_op()
15427 __mark_reg_known(src_reg, insn->imm); in check_cond_jmp_op()
15431 pred = is_branch_taken(dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
15439 !__is_pointer_value(false, src_reg)) in check_cond_jmp_op()
15440 err = mark_chain_precision(env, insn->src_reg); in check_cond_jmp_op()
15478 if (BPF_SRC(insn->code) == BPF_X && src_reg->type == SCALAR_VALUE && src_reg->id) in check_cond_jmp_op()
15479 collect_linked_regs(this_branch, src_reg->id, &linked_regs); in check_cond_jmp_op()
15497 &other_branch_regs[insn->src_reg], in check_cond_jmp_op()
15498 dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
15516 src_reg->type == SCALAR_VALUE && src_reg->id && in check_cond_jmp_op()
15517 !WARN_ON_ONCE(src_reg->id != other_branch_regs[insn->src_reg].id)) { in check_cond_jmp_op()
15518 sync_linked_regs(this_branch, src_reg, &linked_regs); in check_cond_jmp_op()
15519 sync_linked_regs(other_branch, &other_branch_regs[insn->src_reg], &linked_regs); in check_cond_jmp_op()
15540 __is_pointer_value(false, src_reg) && __is_pointer_value(false, dst_reg) && in check_cond_jmp_op()
15541 type_may_be_null(src_reg->type) != type_may_be_null(dst_reg->type) && in check_cond_jmp_op()
15542 base_type(src_reg->type) != PTR_TO_BTF_ID && in check_cond_jmp_op()
15557 if (type_may_be_null(src_reg->type)) in check_cond_jmp_op()
15558 mark_ptr_not_null_reg(&eq_branch_regs[insn->src_reg]); in check_cond_jmp_op()
15578 } else if (!try_match_pkt_pointers(insn, dst_reg, ®s[insn->src_reg], in check_cond_jmp_op()
15613 if (insn->src_reg == 0) { in check_ld_imm()
15627 if (insn->src_reg == BPF_PSEUDO_BTF_ID) { in check_ld_imm()
15644 if (insn->src_reg == BPF_PSEUDO_FUNC) { in check_ld_imm()
15666 if (insn->src_reg == BPF_PSEUDO_MAP_VALUE || in check_ld_imm()
15667 insn->src_reg == BPF_PSEUDO_MAP_IDX_VALUE) { in check_ld_imm()
15676 } else if (insn->src_reg == BPF_PSEUDO_MAP_FD || in check_ld_imm()
15677 insn->src_reg == BPF_PSEUDO_MAP_IDX) { in check_ld_imm()
15733 (mode == BPF_ABS && insn->src_reg != BPF_REG_0)) { in check_ld_abs()
15776 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_ld_abs()
16317 ldx->src_reg != BPF_REG_10) in mark_fastcall_pattern_for_call()
16320 if (stx->src_reg != ldx->dst_reg) in mark_fastcall_pattern_for_call()
16323 if ((BIT(stx->src_reg) & expected_regs_mask) == 0) in mark_fastcall_pattern_for_call()
16331 expected_regs_mask &= ~BIT(stx->src_reg); in mark_fastcall_pattern_for_call()
16436 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in visit_insn()
16456 return visit_func_call_insn(t, insns, env, insn->src_reg == BPF_PSEUDO_CALL); in visit_insn()
18409 err = check_reg_arg(env, insn->src_reg, SRC_OP); in do_check()
18417 src_reg_type = regs[insn->src_reg].type; in do_check()
18422 err = check_mem_access(env, env->insn_idx, insn->src_reg, in do_check()
18447 err = check_reg_arg(env, insn->src_reg, SRC_OP); in do_check()
18460 BPF_WRITE, insn->src_reg, false, false); in do_check()
18471 insn->src_reg != BPF_REG_0) { in do_check()
18498 (insn->src_reg != BPF_PSEUDO_KFUNC_CALL in do_check()
18500 (insn->src_reg != BPF_REG_0 && in do_check()
18501 insn->src_reg != BPF_PSEUDO_CALL && in do_check()
18502 insn->src_reg != BPF_PSEUDO_KFUNC_CALL) || in do_check()
18510 if ((insn->src_reg == BPF_REG_0 && insn->imm != BPF_FUNC_spin_unlock) || in do_check()
18511 (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && in do_check()
18517 if (insn->src_reg == BPF_PSEUDO_CALL) { in do_check()
18519 } else if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_check()
18534 insn->src_reg != BPF_REG_0 || in do_check()
18551 insn->src_reg != BPF_REG_0 || in do_check()
19006 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in resolve_pseudo_ldimm64()
19012 if (insn[0].src_reg == 0) in resolve_pseudo_ldimm64()
19016 if (insn[0].src_reg == BPF_PSEUDO_BTF_ID) { in resolve_pseudo_ldimm64()
19024 if (insn[0].src_reg == BPF_PSEUDO_FUNC) { in resolve_pseudo_ldimm64()
19033 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
19047 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
19076 if (insn[0].src_reg == BPF_PSEUDO_MAP_FD || in resolve_pseudo_ldimm64()
19077 insn[0].src_reg == BPF_PSEUDO_MAP_IDX) { in resolve_pseudo_ldimm64()
19182 if (insn->src_reg == BPF_PSEUDO_FUNC) in convert_pseudo_ld_imm64()
19184 insn->src_reg = 0; in convert_pseudo_ld_imm64()
19662 zext_patch[1].src_reg = load_reg; in opt_subreg_zext_lo32_rnd_hi32()
20537 BPF_JNE | BPF_K, insn->src_reg, in do_misc_fixups()
20546 BPF_JEQ | BPF_K, insn->src_reg, in do_misc_fixups()
20557 BPF_MOV64_REG(BPF_REG_AX, insn->src_reg), in do_misc_fixups()
20580 BPF_MOV64_REG(BPF_REG_AX, insn->src_reg), in do_misc_fixups()
20629 *patch++ = BPF_MOV64_REG(BPF_REG_AX, insn->src_reg); in do_misc_fixups()
20688 off_reg = issrc ? insn->src_reg : insn->dst_reg; in do_misc_fixups()
20702 *patch++ = BPF_MOV64_REG(insn->dst_reg, insn->src_reg); in do_misc_fixups()
20703 insn->src_reg = BPF_REG_AX; in do_misc_fixups()
20747 if (insn->src_reg == BPF_PSEUDO_CALL) in do_misc_fixups()
20749 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_misc_fixups()
21382 insn->src_reg == 0 && in is_bpf_loop_call()