1 #include "asan_impl.h"
2 #include "libc.h"
3 #include <stdint.h>
4 #include <string.h>
5 
memset(void * dest,int c,size_t n)6 NO_ASAN void* memset(void* dest, int c, size_t n) {
7     unsigned char* s = dest;
8     size_t k;
9 
10     /* Fill head and tail with minimal branching. Each
11      * conditional ensures that all the subsequently used
12      * offsets are well-defined and in the dest region. */
13 
14     if (!n)
15         return dest;
16     s[0] = s[n - 1] = c;
17     if (n <= 2)
18         return dest;
19     s[1] = s[n - 2] = c;
20     s[2] = s[n - 3] = c;
21     if (n <= 6)
22         return dest;
23     s[3] = s[n - 4] = c;
24     if (n <= 8)
25         return dest;
26 
27     /* Advance pointer to align it at a 4-byte boundary,
28      * and truncate n to a multiple of 4. The previous code
29      * already took care of any head/tail that get cut off
30      * by the alignment. */
31 
32     k = -(uintptr_t)s & 3;
33     s += k;
34     n -= k;
35     n &= -4;
36 
37 #ifdef __GNUC__
38     typedef uint32_t __attribute__((__may_alias__)) u32;
39     typedef uint64_t __attribute__((__may_alias__)) u64;
40 
41     u32 c32 = ((u32)-1) / 255 * (unsigned char)c;
42 
43     /* In preparation to copy 32 bytes at a time, aligned on
44      * an 8-byte bounary, fill head/tail up to 28 bytes each.
45      * As in the initial byte-based head/tail fill, each
46      * conditional below ensures that the subsequent offsets
47      * are valid (e.g. !(n<=24) implies n>=28). */
48 
49     *(u32*)(s + 0) = c32;
50     *(u32*)(s + n - 4) = c32;
51     if (n <= 8)
52         return dest;
53     *(u32*)(s + 4) = c32;
54     *(u32*)(s + 8) = c32;
55     *(u32*)(s + n - 12) = c32;
56     *(u32*)(s + n - 8) = c32;
57     if (n <= 24)
58         return dest;
59     *(u32*)(s + 12) = c32;
60     *(u32*)(s + 16) = c32;
61     *(u32*)(s + 20) = c32;
62     *(u32*)(s + 24) = c32;
63     *(u32*)(s + n - 28) = c32;
64     *(u32*)(s + n - 24) = c32;
65     *(u32*)(s + n - 20) = c32;
66     *(u32*)(s + n - 16) = c32;
67 
68     /* Align to a multiple of 8 so we can fill 64 bits at a time,
69      * and avoid writing the same bytes twice as much as is
70      * practical without introducing additional branching. */
71 
72     k = 24 + ((uintptr_t)s & 4);
73     s += k;
74     n -= k;
75 
76     /* If this loop is reached, 28 tail bytes have already been
77      * filled, so any remainder when n drops below 32 can be
78      * safely ignored. */
79 
80     u64 c64 = c32 | ((u64)c32 << 32);
81     for (; n >= 32; n -= 32, s += 32) {
82         *(u64*)(s + 0) = c64;
83         *(u64*)(s + 8) = c64;
84         *(u64*)(s + 16) = c64;
85         *(u64*)(s + 24) = c64;
86     }
87 #else
88     /* Pure C fallback with no aliasing violations. */
89     for (; n; n--, s++)
90         *s = c;
91 #endif
92 
93     return dest;
94 }
95 
96 __typeof(memset) __unsanitized_memset __attribute__((alias("memset")));
97 __asan_weak_alias(memset)
98