Lines Matching refs:smin_value
375 if (reg->smin_value > S64_MIN) { in verbose_invalid_scalar()
376 verbose(env, " smin=%lld", reg->smin_value); in verbose_invalid_scalar()
1757 reg->smin_value = (s64)imm; in ___mark_reg_known()
1909 reg->smin_value = S64_MIN; in __mark_reg_unbounded()
1922 reg->smin_value = S64_MIN; in __mark_reg64_unbounded()
1954 reg->smin_value = max_t(s64, reg->smin_value, in __update_reg64_bounds()
2006 if ((reg->smin_value >> 32) == (reg->smax_value >> 32)) { in __reg32_deduce_bounds()
2008 if ((u32)reg->smin_value <= (u32)reg->smax_value) { in __reg32_deduce_bounds()
2009 reg->u32_min_value = max_t(u32, reg->u32_min_value, (u32)reg->smin_value); in __reg32_deduce_bounds()
2013 if ((s32)reg->smin_value <= (s32)reg->smax_value) { in __reg32_deduce_bounds()
2014 reg->s32_min_value = max_t(s32, reg->s32_min_value, (s32)reg->smin_value); in __reg32_deduce_bounds()
2036 if ((u32)(reg->smin_value >> 32) + 1 == (u32)(reg->smax_value >> 32) && in __reg32_deduce_bounds()
2037 (s32)reg->smin_value < 0 && (s32)reg->smax_value >= 0) { in __reg32_deduce_bounds()
2038 reg->s32_min_value = max_t(s32, reg->s32_min_value, (s32)reg->smin_value); in __reg32_deduce_bounds()
2128 reg->smin_value = max_t(s64, reg->smin_value, reg->umin_value); in __reg64_deduce_bounds()
2135 if ((u64)reg->smin_value <= (u64)reg->smax_value) { in __reg64_deduce_bounds()
2136 reg->umin_value = max_t(u64, reg->smin_value, reg->umin_value); in __reg64_deduce_bounds()
2164 new_smin = (reg->smin_value & ~0xffffffffULL) | reg->u32_min_value; in __reg_deduce_mixed_bounds()
2166 reg->smin_value = max_t(s64, reg->smin_value, new_smin); in __reg_deduce_mixed_bounds()
2177 new_smin = (reg->smin_value & ~0xffffffffULL) | (u32)reg->s32_min_value; in __reg_deduce_mixed_bounds()
2179 reg->smin_value = max_t(s64, reg->smin_value, new_smin); in __reg_deduce_mixed_bounds()
2212 if (reg->s32_min_value >= 0 && reg->smin_value >= S32_MIN && reg->smax_value <= S32_MAX) { in __reg_deduce_mixed_bounds()
2213 reg->smin_value = reg->s32_min_value; in __reg_deduce_mixed_bounds()
2218 tnum_range(reg->smin_value, reg->smax_value)); in __reg_deduce_mixed_bounds()
2264 reg->smin_value > reg->smax_value || in reg_bounds_sanity_check()
2276 reg->smin_value != sval || reg->smax_value != sval) { in reg_bounds_sanity_check()
2298 reg->smin_value, reg->smax_value, in reg_bounds_sanity_check()
2324 reg->smin_value = reg->s32_min_value; in __reg_assign_32_into_64()
2327 reg->smin_value = 0; in __reg_assign_32_into_64()
2383 reg->smin_value = max_t(s64, reg->smin_value, s32_min); in __mark_reg_s32_range()
4776 min_off = ptr_reg->smin_value + off; in check_stack_write_var_off()
5090 min_off = reg->smin_value + off; in check_stack_read_var_off()
5264 if (reg->smin_value < 0 && in check_mem_region_access()
5265 (reg->smin_value == S64_MIN || in check_mem_region_access()
5266 (off + reg->smin_value != (s64)(s32)(off + reg->smin_value)) || in check_mem_region_access()
5267 reg->smin_value + off < 0)) { in check_mem_region_access()
5272 err = __check_mem_access(env, regno, reg->smin_value + off, size, in check_mem_region_access()
5576 if (reg->smin_value + off < p + field->size && in check_map_access()
5669 if (reg->smin_value < 0) { in check_packet_access()
5757 if (reg->smin_value < 0) { in check_sock_access()
6263 reg->smin_value = reg->umin_value; in coerce_reg_to_size()
6279 reg->smin_value = reg->s32_min_value = S8_MIN; in set_sext64_default_val()
6282 reg->smin_value = reg->s32_min_value = S16_MIN; in set_sext64_default_val()
6286 reg->smin_value = reg->s32_min_value = S32_MIN; in set_sext64_default_val()
6312 reg->smax_value = reg->smin_value = u64_cval; in coerce_reg_to_size_sx()
6320 top_smin_value = ((u64)reg->smin_value >> num_bits) << num_bits; in coerce_reg_to_size_sx()
6328 init_s64_min = (s8)reg->smin_value; in coerce_reg_to_size_sx()
6331 init_s64_min = (s16)reg->smin_value; in coerce_reg_to_size_sx()
6334 init_s64_min = (s32)reg->smin_value; in coerce_reg_to_size_sx()
6342 reg->s32_min_value = reg->smin_value = s64_min; in coerce_reg_to_size_sx()
6849 reg->smin_value <= -BPF_MAX_VAR_OFF) { in check_stack_access_within_bounds()
6854 min_off = reg->smin_value + off; in check_stack_access_within_bounds()
7335 min_off = reg->smin_value + off; in check_stack_range_initialized()
7553 if (reg->smin_value < 0) { in check_mem_size_reg()
10072 return range.minval <= reg->smin_value && reg->smax_value <= range.maxval; in retval_range_within()
10196 ret_reg->smin_value = -MAX_ERRNO; in do_refine_retval_range()
10207 ret_reg->smin_value = 0; in do_refine_retval_range()
12901 s64 smin = reg->smin_value; in check_reg_sane_offset()
12956 ptr_reg->smin_value : in retrieve_ptr_limit()
13045 bool off_is_neg = off_reg->smin_value < 0; in sanitize_ptr_alu()
13065 (off_reg->smin_value < 0) != (off_reg->smax_value < 0)) in sanitize_ptr_alu()
13262 s64 smin_val = off_reg->smin_value, smax_val = off_reg->smax_value, in adjust_ptr_min_max_vals()
13263 smin_ptr = ptr_reg->smin_value, smax_ptr = ptr_reg->smax_value; in adjust_ptr_min_max_vals()
13358 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
13376 if (check_add_overflow(smin_ptr, smin_val, &dst_reg->smin_value) || in adjust_ptr_min_max_vals()
13378 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
13414 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
13427 if (check_sub_overflow(smin_ptr, smax_val, &dst_reg->smin_value) || in adjust_ptr_min_max_vals()
13430 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
13504 s64 *dst_smin = &dst_reg->smin_value; in scalar_min_max_add()
13509 if (check_add_overflow(*dst_smin, src_reg->smin_value, dst_smin) || in scalar_min_max_add()
13549 s64 *dst_smin = &dst_reg->smin_value; in scalar_min_max_sub()
13555 check_sub_overflow(*dst_smax, src_reg->smin_value, dst_smax)) { in scalar_min_max_sub()
13606 s64 smin_val = src_reg->smin_value; in scalar_min_max_mul()
13610 if (smin_val < 0 || dst_reg->smin_value < 0) { in scalar_min_max_mul()
13627 dst_reg->smin_value = S64_MIN; in scalar_min_max_mul()
13630 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_mul()
13688 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_and()
13691 dst_reg->smin_value = S64_MIN; in scalar_min_max_and()
13751 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_or()
13754 dst_reg->smin_value = S64_MIN; in scalar_min_max_or()
13809 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_xor()
13812 dst_reg->smin_value = S64_MIN; in scalar_min_max_xor()
13871 dst_reg->smin_value = (s64)dst_reg->s32_min_value << 32; in __scalar64_min_max_lsh()
13873 dst_reg->smin_value = S64_MIN; in __scalar64_min_max_lsh()
13952 dst_reg->smin_value = S64_MIN; in scalar_min_max_rsh()
13997 dst_reg->smin_value >>= umin_val; in scalar_min_max_arsh()
14029 && src_reg->smin_value == src_reg->smax_value in is_safe_to_compute_dst_reg_range()
14620 s64 smin1 = is_jmp32 ? (s64)reg1->s32_min_value : reg1->smin_value; in is_scalar_branch_taken()
14624 s64 smin2 = is_jmp32 ? (s64)reg2->s32_min_value : reg2->smin_value; in is_scalar_branch_taken()
14914 reg1->smin_value = max(reg1->smin_value, reg2->smin_value); in regs_refine_cond_op()
14918 reg2->smin_value = reg1->smin_value; in regs_refine_cond_op()
14958 if (reg1->smin_value == (s64)val) in regs_refine_cond_op()
14959 reg1->smin_value++; in regs_refine_cond_op()
15026 reg2->smin_value = max(reg1->smin_value, reg2->smin_value); in regs_refine_cond_op()
15035 reg2->smin_value = max(reg1->smin_value + 1, reg2->smin_value); in regs_refine_cond_op()
15097 if (WARN_ON_ONCE(reg->smin_value || reg->smax_value || !tnum_equals_const(reg->var_off, 0))) in mark_ptr_or_null_reg()
17050 old->smin_value <= cur->smin_value && in range_within()