Lines Matching refs:ir
52 const unsigned char *ir = ip + 4; in lzo1x_1_do_compress() local
60 for (; (ir + 32) <= limit; ir += 32) { in lzo1x_1_do_compress()
61 dv64 = get_unaligned((u64 *)ir); in lzo1x_1_do_compress()
62 dv64 |= get_unaligned((u64 *)ir + 1); in lzo1x_1_do_compress()
63 dv64 |= get_unaligned((u64 *)ir + 2); in lzo1x_1_do_compress()
64 dv64 |= get_unaligned((u64 *)ir + 3); in lzo1x_1_do_compress()
68 for (; (ir + 8) <= limit; ir += 8) { in lzo1x_1_do_compress()
69 dv64 = get_unaligned((u64 *)ir); in lzo1x_1_do_compress()
72 ir += __builtin_ctzll(dv64) >> 3; in lzo1x_1_do_compress()
74 ir += __builtin_clzll(dv64) >> 3; in lzo1x_1_do_compress()
82 while ((ir < (const unsigned char *) in lzo1x_1_do_compress()
83 ALIGN((uintptr_t)ir, 4)) && in lzo1x_1_do_compress()
84 (ir < limit) && (*ir == 0)) in lzo1x_1_do_compress()
85 ir++; in lzo1x_1_do_compress()
86 if (IS_ALIGNED((uintptr_t)ir, 4)) { in lzo1x_1_do_compress()
87 for (; (ir + 4) <= limit; ir += 4) { in lzo1x_1_do_compress()
88 dv = *((u32 *)ir); in lzo1x_1_do_compress()
91 ir += __builtin_ctz(dv) >> 3; in lzo1x_1_do_compress()
93 ir += __builtin_clz(dv) >> 3; in lzo1x_1_do_compress()
102 while (likely(ir < limit) && unlikely(*ir == 0)) in lzo1x_1_do_compress()
103 ir++; in lzo1x_1_do_compress()
104 run_length = ir - ip; in lzo1x_1_do_compress()