Lines Matching refs:u32_max_value
1765 reg->u32_max_value = (u32)imm; in ___mark_reg_known()
1787 reg->u32_max_value = (u32)imm; in __mark_reg32_known()
1917 reg->u32_max_value = U32_MAX; in __mark_reg_unbounded()
1933 reg->u32_max_value = U32_MAX; in __mark_reg32_unbounded()
1947 reg->u32_max_value = min(reg->u32_max_value, in __update_reg32_bounds()
1999 reg->u32_max_value = min_t(u32, reg->u32_max_value, (u32)reg->umax_value); in __reg32_deduce_bounds()
2010 reg->u32_max_value = min_t(u32, reg->u32_max_value, (u32)reg->smax_value); in __reg32_deduce_bounds()
2044 if ((s32)reg->u32_min_value <= (s32)reg->u32_max_value) { in __reg32_deduce_bounds()
2046 reg->s32_max_value = min_t(s32, reg->s32_max_value, reg->u32_max_value); in __reg32_deduce_bounds()
2054 reg->u32_max_value = min_t(u32, reg->s32_max_value, reg->u32_max_value); in __reg32_deduce_bounds()
2160 new_umax = (reg->umax_value & ~0xffffffffULL) | reg->u32_max_value; in __reg_deduce_mixed_bounds()
2165 new_smax = (reg->smax_value & ~0xffffffffULL) | reg->u32_max_value; in __reg_deduce_mixed_bounds()
2237 reg->u32_max_value)); in __reg_bound_offset()
2265 reg->u32_min_value > reg->u32_max_value || in reg_bounds_sanity_check()
2286 if (reg->u32_min_value != uval32 || reg->u32_max_value != uval32 || in reg_bounds_sanity_check()
2299 reg->u32_min_value, reg->u32_max_value, in reg_bounds_sanity_check()
2316 reg->umax_value = reg->u32_max_value; in __reg_assign_32_into_64()
6291 reg->u32_max_value = U32_MAX; in set_sext64_default_val()
6315 reg->u32_max_value = reg->u32_min_value = u64_cval; in coerce_reg_to_size_sx()
6345 reg->u32_max_value = reg->umax_value = s64_max; in coerce_reg_to_size_sx()
6365 reg->u32_max_value = U32_MAX; in set_sext32_default_val()
6384 reg->u32_min_value = reg->u32_max_value = u32_val; in coerce_subreg_to_size_sx()
6410 reg->u32_max_value = (u32)s32_max; in coerce_subreg_to_size_sx()
10202 ret_reg->u32_max_value = nr_cpu_ids - 1; in do_refine_retval_range()
13487 u32 *dst_umax = &dst_reg->u32_max_value; in scalar32_min_max_add()
13495 check_add_overflow(*dst_umax, src_reg->u32_max_value, dst_umax)) { in scalar32_min_max_add()
13527 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_sub()
13538 dst_reg->u32_max_value = U32_MAX; in scalar32_min_max_sub()
13542 dst_reg->u32_max_value -= umin_val; in scalar32_min_max_sub()
13576 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_mul()
13586 if (umax_val > U16_MAX || dst_reg->u32_max_value > U16_MAX) { in scalar32_min_max_mul()
13592 dst_reg->u32_max_value *= umax_val; in scalar32_min_max_mul()
13593 if (dst_reg->u32_max_value > S32_MAX) { in scalar32_min_max_mul()
13599 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_mul()
13641 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_and()
13652 dst_reg->u32_max_value = min(dst_reg->u32_max_value, umax_val); in scalar32_min_max_and()
13657 if ((s32)dst_reg->u32_min_value <= (s32)dst_reg->u32_max_value) { in scalar32_min_max_and()
13659 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_and()
13715 dst_reg->u32_max_value = var32_off.value | var32_off.mask; in scalar32_min_max_or()
13720 if ((s32)dst_reg->u32_min_value <= (s32)dst_reg->u32_max_value) { in scalar32_min_max_or()
13722 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_or()
13775 dst_reg->u32_max_value = var32_off.value | var32_off.mask; in scalar32_min_max_xor()
13780 if ((s32)dst_reg->u32_min_value <= (s32)dst_reg->u32_max_value) { in scalar32_min_max_xor()
13782 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_xor()
13828 if (umax_val > 31 || dst_reg->u32_max_value > 1ULL << (31 - umax_val)) { in __scalar32_min_max_lsh()
13830 dst_reg->u32_max_value = U32_MAX; in __scalar32_min_max_lsh()
13833 dst_reg->u32_max_value <<= umax_val; in __scalar32_min_max_lsh()
13840 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_lsh()
13904 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_rsh()
13926 dst_reg->u32_max_value >>= umin_val; in scalar32_min_max_rsh()
13983 dst_reg->u32_max_value = U32_MAX; in scalar32_min_max_arsh()
14025 && src_reg->u32_min_value == src_reg->u32_max_value) in is_safe_to_compute_dst_reg_range()
14619 u64 umax1 = is_jmp32 ? (u64)reg1->u32_max_value : reg1->umax_value; in is_scalar_branch_taken()
14623 u64 umax2 = is_jmp32 ? (u64)reg2->u32_max_value : reg2->umax_value; in is_scalar_branch_taken()
14644 if (reg1->u32_min_value > reg2->u32_max_value || in is_scalar_branch_taken()
14645 reg1->u32_max_value < reg2->u32_min_value) in is_scalar_branch_taken()
14668 if (reg1->u32_min_value > reg2->u32_max_value || in is_scalar_branch_taken()
14669 reg1->u32_max_value < reg2->u32_min_value) in is_scalar_branch_taken()
14900 reg1->u32_max_value = min(reg1->u32_max_value, reg2->u32_max_value); in regs_refine_cond_op()
14904 reg2->u32_max_value = reg1->u32_max_value; in regs_refine_cond_op()
14947 if (reg1->u32_max_value == (u32)val) in regs_refine_cond_op()
14948 reg1->u32_max_value--; in regs_refine_cond_op()
15004 reg1->u32_max_value = min(reg1->u32_max_value, reg2->u32_max_value); in regs_refine_cond_op()
15013 reg1->u32_max_value = min(reg1->u32_max_value, reg2->u32_max_value - 1); in regs_refine_cond_op()
17053 old->u32_max_value >= cur->u32_max_value && in range_within()