| OLD | NEW |
| 1 #include <string.h> | 1 #include <string.h> |
| 2 #include <stdint.h> | 2 #include <stdint.h> |
| 3 | 3 |
| 4 void *memset(void *dest, int c, size_t n) | 4 void* memset(void* dest, int c, size_t n) { |
| 5 { | 5 unsigned char* s = dest; |
| 6 » unsigned char *s = dest; | 6 size_t k; |
| 7 » size_t k; | |
| 8 | 7 |
| 9 » /* Fill head and tail with minimal branching. Each | 8 /* Fill head and tail with minimal branching. Each |
| 10 » * conditional ensures that all the subsequently used | 9 * conditional ensures that all the subsequently used |
| 11 » * offsets are well-defined and in the dest region. */ | 10 * offsets are well-defined and in the dest region. */ |
| 12 | 11 |
| 13 » if (!n) return dest; | 12 if (!n) |
| 14 » s[0] = s[n-1] = c; | 13 return dest; |
| 15 » if (n <= 2) return dest; | 14 s[0] = s[n - 1] = c; |
| 16 » s[1] = s[n-2] = c; | 15 if (n <= 2) |
| 17 » s[2] = s[n-3] = c; | 16 return dest; |
| 18 » if (n <= 6) return dest; | 17 s[1] = s[n - 2] = c; |
| 19 » s[3] = s[n-4] = c; | 18 s[2] = s[n - 3] = c; |
| 20 » if (n <= 8) return dest; | 19 if (n <= 6) |
| 20 return dest; |
| 21 s[3] = s[n - 4] = c; |
| 22 if (n <= 8) |
| 23 return dest; |
| 21 | 24 |
| 22 » /* Advance pointer to align it at a 4-byte boundary, | 25 /* Advance pointer to align it at a 4-byte boundary, |
| 23 » * and truncate n to a multiple of 4. The previous code | 26 * and truncate n to a multiple of 4. The previous code |
| 24 » * already took care of any head/tail that get cut off | 27 * already took care of any head/tail that get cut off |
| 25 » * by the alignment. */ | 28 * by the alignment. */ |
| 26 | 29 |
| 27 » k = -(uintptr_t)s & 3; | 30 k = -(uintptr_t)s & 3; |
| 28 » s += k; | 31 s += k; |
| 29 » n -= k; | 32 n -= k; |
| 30 » n &= -4; | 33 n &= -4; |
| 31 | 34 |
| 32 #ifdef __GNUC__ | 35 #ifdef __GNUC__ |
| 33 » typedef uint32_t __attribute__((__may_alias__)) u32; | 36 typedef uint32_t __attribute__((__may_alias__)) u32; |
| 34 » typedef uint64_t __attribute__((__may_alias__)) u64; | 37 typedef uint64_t __attribute__((__may_alias__)) u64; |
| 35 | 38 |
| 36 » u32 c32 = ((u32)-1)/255 * (unsigned char)c; | 39 u32 c32 = ((u32)-1) / 255 * (unsigned char)c; |
| 37 | 40 |
| 38 » /* In preparation to copy 32 bytes at a time, aligned on | 41 /* In preparation to copy 32 bytes at a time, aligned on |
| 39 » * an 8-byte bounary, fill head/tail up to 28 bytes each. | 42 * an 8-byte bounary, fill head/tail up to 28 bytes each. |
| 40 » * As in the initial byte-based head/tail fill, each | 43 * As in the initial byte-based head/tail fill, each |
| 41 » * conditional below ensures that the subsequent offsets | 44 * conditional below ensures that the subsequent offsets |
| 42 » * are valid (e.g. !(n<=24) implies n>=28). */ | 45 * are valid (e.g. !(n<=24) implies n>=28). */ |
| 43 | 46 |
| 44 » *(u32 *)(s+0) = c32; | 47 *(u32*)(s + 0) = c32; |
| 45 » *(u32 *)(s+n-4) = c32; | 48 *(u32*)(s + n - 4) = c32; |
| 46 » if (n <= 8) return dest; | 49 if (n <= 8) |
| 47 » *(u32 *)(s+4) = c32; | 50 return dest; |
| 48 » *(u32 *)(s+8) = c32; | 51 *(u32*)(s + 4) = c32; |
| 49 » *(u32 *)(s+n-12) = c32; | 52 *(u32*)(s + 8) = c32; |
| 50 » *(u32 *)(s+n-8) = c32; | 53 *(u32*)(s + n - 12) = c32; |
| 51 » if (n <= 24) return dest; | 54 *(u32*)(s + n - 8) = c32; |
| 52 » *(u32 *)(s+12) = c32; | 55 if (n <= 24) |
| 53 » *(u32 *)(s+16) = c32; | 56 return dest; |
| 54 » *(u32 *)(s+20) = c32; | 57 *(u32*)(s + 12) = c32; |
| 55 » *(u32 *)(s+24) = c32; | 58 *(u32*)(s + 16) = c32; |
| 56 » *(u32 *)(s+n-28) = c32; | 59 *(u32*)(s + 20) = c32; |
| 57 » *(u32 *)(s+n-24) = c32; | 60 *(u32*)(s + 24) = c32; |
| 58 » *(u32 *)(s+n-20) = c32; | 61 *(u32*)(s + n - 28) = c32; |
| 59 » *(u32 *)(s+n-16) = c32; | 62 *(u32*)(s + n - 24) = c32; |
| 63 *(u32*)(s + n - 20) = c32; |
| 64 *(u32*)(s + n - 16) = c32; |
| 60 | 65 |
| 61 » /* Align to a multiple of 8 so we can fill 64 bits at a time, | 66 /* Align to a multiple of 8 so we can fill 64 bits at a time, |
| 62 » * and avoid writing the same bytes twice as much as is | 67 * and avoid writing the same bytes twice as much as is |
| 63 » * practical without introducing additional branching. */ | 68 * practical without introducing additional branching. */ |
| 64 | 69 |
| 65 » k = 24 + ((uintptr_t)s & 4); | 70 k = 24 + ((uintptr_t)s & 4); |
| 66 » s += k; | 71 s += k; |
| 67 » n -= k; | 72 n -= k; |
| 68 | 73 |
| 69 » /* If this loop is reached, 28 tail bytes have already been | 74 /* If this loop is reached, 28 tail bytes have already been |
| 70 » * filled, so any remainder when n drops below 32 can be | 75 * filled, so any remainder when n drops below 32 can be |
| 71 » * safely ignored. */ | 76 * safely ignored. */ |
| 72 | 77 |
| 73 » u64 c64 = c32 | ((u64)c32 << 32); | 78 u64 c64 = c32 | ((u64)c32 << 32); |
| 74 » for (; n >= 32; n-=32, s+=32) { | 79 for (; n >= 32; n -= 32, s += 32) { |
| 75 » » *(u64 *)(s+0) = c64; | 80 *(u64*)(s + 0) = c64; |
| 76 » » *(u64 *)(s+8) = c64; | 81 *(u64*)(s + 8) = c64; |
| 77 » » *(u64 *)(s+16) = c64; | 82 *(u64*)(s + 16) = c64; |
| 78 » » *(u64 *)(s+24) = c64; | 83 *(u64*)(s + 24) = c64; |
| 79 » } | 84 } |
| 80 #else | 85 #else |
| 81 » /* Pure C fallback with no aliasing violations. */ | 86 /* Pure C fallback with no aliasing violations. */ |
| 82 » for (; n; n--, s++) *s = c; | 87 for (; n; n--, s++) |
| 88 *s = c; |
| 83 #endif | 89 #endif |
| 84 | 90 |
| 85 » return dest; | 91 return dest; |
| 86 } | 92 } |
| OLD | NEW |