Lines Matching refs:o3
96 andcc %o1, 0xffffff80, %o3
103 andcc %o1, 0xffffff80, %o3
114 andcc %o1, 0xffffff80, %o3 ! num loop iterations
122 subcc %o3, 128, %o3 ! detract from loop iters
224 andcc %o3, 4, %g0
232 andcc %o3, 4, %g0
235 andcc %o3, 3, %o3
241 andcc %o3, 3, %g0
244 addcc %o3, -1, %g0
246 subcc %o3, 2, %o3
267 srl %g1, 1, %o3
268 2: cmp %o3, 0
270 andcc %g1, 0xf, %o3
271 andcc %o3, %o0, %g0 ! Check %o0 only (%o1 has the same last 2 bits)
273 srl %o3, 1, %o3
323 5: CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
324 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
325 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
326 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
335 andcc %g1, 0xf, %o3 ! get low bits of length (clears carry btw)
352 andcc %o3, 0xf, %g0 ! check for low bits set
354 andcc %o3, 8, %g0 ! begin checks for that code
357 ccdbl: CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
358 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
359 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
360 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)