1 /* Copyright (C) 1991,1993,1995,1997,1998,2003,2004
2 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Torbjorn Granlund (tege@sics.se).
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <http://www.gnu.org/licenses/>. */
19
20 #include <string.h>
21 #include "memcopy.h"
22
23
24 #include <endian.h>
25
26 #if __BYTE_ORDER == __BIG_ENDIAN
27 # define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
28 #else
29 # define CMP_LT_OR_GT(a, b) memcmp_bytes ((a), (b))
30 #endif
31
32 /* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
33
34 /* The strategy of this memcmp is:
35
36 1. Compare bytes until one of the block pointers is aligned.
37
38 2. Compare using memcmp_common_alignment or
39 memcmp_not_common_alignment, regarding the alignment of the other
40 block after the initial byte operations. The maximum number of
41 full words (of type op_t) are compared in this way.
42
43 3. Compare the few remaining bytes. */
44
45 #if __BYTE_ORDER != __BIG_ENDIAN
46 /* memcmp_bytes -- Compare A and B bytewise in the byte order of the machine.
47 A and B are known to be different.
48 This is needed only on little-endian machines. */
49
50 static __inline__ int
memcmp_bytes(op_t a,op_t b)51 memcmp_bytes (op_t a, op_t b)
52 {
53 long int srcp1 = (long int) &a;
54 long int srcp2 = (long int) &b;
55 op_t a0, b0;
56
57 do
58 {
59 a0 = ((byte *) srcp1)[0];
60 b0 = ((byte *) srcp2)[0];
61 srcp1 += 1;
62 srcp2 += 1;
63 }
64 while (a0 == b0);
65 return a0 - b0;
66 }
67 #endif
68
69 /* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
70 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
71 memory operations on `op_t's. */
72 static int
memcmp_common_alignment(long int srcp1,long int srcp2,size_t len)73 memcmp_common_alignment (long int srcp1, long int srcp2, size_t len)
74 {
75 op_t a0, a1;
76 op_t b0, b1;
77
78 switch (len % 4)
79 {
80 default: /* Avoid warning about uninitialized local variables. */
81 case 2:
82 a0 = ((op_t *) srcp1)[0];
83 b0 = ((op_t *) srcp2)[0];
84 srcp1 -= 2 * OPSIZ;
85 srcp2 -= 2 * OPSIZ;
86 len += 2;
87 goto do1;
88 case 3:
89 a1 = ((op_t *) srcp1)[0];
90 b1 = ((op_t *) srcp2)[0];
91 srcp1 -= OPSIZ;
92 srcp2 -= OPSIZ;
93 len += 1;
94 goto do2;
95 case 0:
96 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
97 return 0;
98 a0 = ((op_t *) srcp1)[0];
99 b0 = ((op_t *) srcp2)[0];
100 goto do3;
101 case 1:
102 a1 = ((op_t *) srcp1)[0];
103 b1 = ((op_t *) srcp2)[0];
104 srcp1 += OPSIZ;
105 srcp2 += OPSIZ;
106 len -= 1;
107 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
108 goto do0;
109 /* Fall through. */
110 }
111
112 do
113 {
114 a0 = ((op_t *) srcp1)[0];
115 b0 = ((op_t *) srcp2)[0];
116 if (a1 != b1)
117 return CMP_LT_OR_GT (a1, b1);
118
119 do3:
120 a1 = ((op_t *) srcp1)[1];
121 b1 = ((op_t *) srcp2)[1];
122 if (a0 != b0)
123 return CMP_LT_OR_GT (a0, b0);
124
125 do2:
126 a0 = ((op_t *) srcp1)[2];
127 b0 = ((op_t *) srcp2)[2];
128 if (a1 != b1)
129 return CMP_LT_OR_GT (a1, b1);
130
131 do1:
132 a1 = ((op_t *) srcp1)[3];
133 b1 = ((op_t *) srcp2)[3];
134 if (a0 != b0)
135 return CMP_LT_OR_GT (a0, b0);
136
137 srcp1 += 4 * OPSIZ;
138 srcp2 += 4 * OPSIZ;
139 len -= 4;
140 }
141 while (len != 0);
142
143 /* This is the right position for do0. Please don't move
144 it into the loop. */
145 do0:
146 if (a1 != b1)
147 return CMP_LT_OR_GT (a1, b1);
148 return 0;
149 }
150
151 /* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
152 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
153 operations on `op_t', but SRCP1 *should be unaligned*. */
154 static int
memcmp_not_common_alignment(long int srcp1,long int srcp2,size_t len)155 memcmp_not_common_alignment (long int srcp1, long int srcp2, size_t len)
156 {
157 op_t a0, a1, a2, a3;
158 op_t b0, b1, b2, b3;
159 op_t x;
160 int shl, shr;
161
162 /* Calculate how to shift a word read at the memory operation
163 aligned srcp1 to make it aligned for comparison. */
164
165 shl = 8 * (srcp1 % OPSIZ);
166 shr = 8 * OPSIZ - shl;
167
168 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
169 it points in the middle of. */
170 srcp1 &= -OPSIZ;
171
172 switch (len % 4)
173 {
174 default: /* Avoid warning about uninitialized local variables. */
175 case 2:
176 a1 = ((op_t *) srcp1)[0];
177 a2 = ((op_t *) srcp1)[1];
178 b2 = ((op_t *) srcp2)[0];
179 srcp1 -= 1 * OPSIZ;
180 srcp2 -= 2 * OPSIZ;
181 len += 2;
182 goto do1;
183 case 3:
184 a0 = ((op_t *) srcp1)[0];
185 a1 = ((op_t *) srcp1)[1];
186 b1 = ((op_t *) srcp2)[0];
187 srcp2 -= 1 * OPSIZ;
188 len += 1;
189 goto do2;
190 case 0:
191 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
192 return 0;
193 a3 = ((op_t *) srcp1)[0];
194 a0 = ((op_t *) srcp1)[1];
195 b0 = ((op_t *) srcp2)[0];
196 srcp1 += 1 * OPSIZ;
197 goto do3;
198 case 1:
199 a2 = ((op_t *) srcp1)[0];
200 a3 = ((op_t *) srcp1)[1];
201 b3 = ((op_t *) srcp2)[0];
202 srcp1 += 2 * OPSIZ;
203 srcp2 += 1 * OPSIZ;
204 len -= 1;
205 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
206 goto do0;
207 /* Fall through. */
208 }
209
210 do
211 {
212 a0 = ((op_t *) srcp1)[0];
213 b0 = ((op_t *) srcp2)[0];
214 x = MERGE(a2, shl, a3, shr);
215 if (x != b3)
216 return CMP_LT_OR_GT (x, b3);
217
218 do3:
219 a1 = ((op_t *) srcp1)[1];
220 b1 = ((op_t *) srcp2)[1];
221 x = MERGE(a3, shl, a0, shr);
222 if (x != b0)
223 return CMP_LT_OR_GT (x, b0);
224
225 do2:
226 a2 = ((op_t *) srcp1)[2];
227 b2 = ((op_t *) srcp2)[2];
228 x = MERGE(a0, shl, a1, shr);
229 if (x != b1)
230 return CMP_LT_OR_GT (x, b1);
231
232 do1:
233 a3 = ((op_t *) srcp1)[3];
234 b3 = ((op_t *) srcp2)[3];
235 x = MERGE(a1, shl, a2, shr);
236 if (x != b2)
237 return CMP_LT_OR_GT (x, b2);
238
239 srcp1 += 4 * OPSIZ;
240 srcp2 += 4 * OPSIZ;
241 len -= 4;
242 }
243 while (len != 0);
244
245 /* This is the right position for do0. Please don't move
246 it into the loop. */
247 do0:
248 x = MERGE(a2, shl, a3, shr);
249 if (x != b3)
250 return CMP_LT_OR_GT (x, b3);
251 return 0;
252 }
253
254 int
memcmp(const __ptr_t s1,const __ptr_t s2,size_t len)255 memcmp (const __ptr_t s1, const __ptr_t s2, size_t len)
256 {
257 op_t a0;
258 op_t b0;
259 long int srcp1 = (long int) s1;
260 long int srcp2 = (long int) s2;
261 op_t res;
262
263 if (len >= OP_T_THRES)
264 {
265 /* There are at least some bytes to compare. No need to test
266 for LEN == 0 in this alignment loop. */
267 while (srcp2 % OPSIZ != 0)
268 {
269 a0 = ((byte *) srcp1)[0];
270 b0 = ((byte *) srcp2)[0];
271 srcp1 += 1;
272 srcp2 += 1;
273 res = a0 - b0;
274 if (res != 0)
275 return res;
276 len -= 1;
277 }
278
279 /* SRCP2 is now aligned for memory operations on `op_t'.
280 SRCP1 alignment determines if we can do a simple,
281 aligned compare or need to shuffle bits. */
282
283 if (srcp1 % OPSIZ == 0)
284 res = memcmp_common_alignment (srcp1, srcp2, len / OPSIZ);
285 else
286 res = memcmp_not_common_alignment (srcp1, srcp2, len / OPSIZ);
287 if (res != 0)
288 return res;
289
290 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
291 srcp1 += len & -OPSIZ;
292 srcp2 += len & -OPSIZ;
293 len %= OPSIZ;
294 }
295
296 /* There are just a few bytes to compare. Use byte memory operations. */
297 while (len != 0)
298 {
299 a0 = ((byte *) srcp1)[0];
300 b0 = ((byte *) srcp2)[0];
301 srcp1 += 1;
302 srcp2 += 1;
303 res = a0 - b0;
304 if (res != 0)
305 return res;
306 len -= 1;
307 }
308
309 return 0;
310 }
311 libc_hidden_weak(memcmp)
312 #ifdef __UCLIBC_SUSV3_LEGACY__
313 strong_alias(memcmp,bcmp)
314 #endif
315