| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2003 Apple Computer, Inc. | 2 * Copyright (C) 2003 Apple Computer, Inc. |
| 3 * Copyright (C) Research In Motion Limited 2010. All rights reserved. | 3 * Copyright (C) Research In Motion Limited 2010. All rights reserved. |
| 4 * | 4 * |
| 5 * Portions are Copyright (C) 1998 Netscape Communications Corporation. | 5 * Portions are Copyright (C) 1998 Netscape Communications Corporation. |
| 6 * | 6 * |
| 7 * This library is free software; you can redistribute it and/or | 7 * This library is free software; you can redistribute it and/or |
| 8 * modify it under the terms of the GNU Lesser General Public | 8 * modify it under the terms of the GNU Lesser General Public |
| 9 * License as published by the Free Software Foundation; either | 9 * License as published by the Free Software Foundation; either |
| 10 * version 2.1 of the License, or (at your option) any later version. | 10 * version 2.1 of the License, or (at your option) any later version. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 35 | 35 |
| 36 #include "config.h" | 36 #include "config.h" |
| 37 #include "RenderArena.h" | 37 #include "RenderArena.h" |
| 38 | 38 |
| 39 #include <stdlib.h> | 39 #include <stdlib.h> |
| 40 #include <string.h> | 40 #include <string.h> |
| 41 #include <wtf/Assertions.h> | 41 #include <wtf/Assertions.h> |
| 42 | 42 |
| 43 #define ROUNDUP(x, y) ((((x)+((y)-1))/(y))*(y)) | 43 #define ROUNDUP(x, y) ((((x)+((y)-1))/(y))*(y)) |
| 44 | 44 |
| 45 #ifdef NDEBUG |
| 46 // Mask freelist pointers to detect corruption and prevent freelist spraying. |
| 47 // We use an arbitray function and rely on ASLR to randomize it. |
| 48 // The first value in RenderObject (or any class) is a vtable pointer, which alw
ays |
| 49 // overlaps with the next pointer. This change guarantees that the masked vtable
/next |
| 50 // pointer will never point to valid memory. So, we should immediately crash on
the |
| 51 // first invalid vtable access for a stale RenderObject pointer. |
| 52 // See http://download.crowdstrike.com/papers/hes-exploiting-a-coalmine.pdf. |
| 53 static void* MaskPtr(void* p) |
| 54 { |
| 55 // The bottom bits are predictable because the binary is loaded on a boundar
y. |
| 56 // This just shifts most of those predictable bits out. |
| 57 const uintptr_t mask = ~(reinterpret_cast<uintptr_t>(WTF::fastMalloc) >> 13)
; |
| 58 return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(p) ^ mask); |
| 59 } |
| 60 #endif |
| 61 |
| 45 namespace WebCore { | 62 namespace WebCore { |
| 46 | 63 |
| 47 #ifndef NDEBUG | 64 #ifndef NDEBUG |
| 48 | 65 |
| 49 const int signature = 0xDBA00AEA; | 66 const int signature = 0xDBA00AEA; |
| 50 const int signatureDead = 0xDBA00AED; | 67 const int signatureDead = 0xDBA00AED; |
| 51 | 68 |
| 52 typedef struct { | 69 typedef struct { |
| 53 RenderArena* arena; | 70 RenderArena* arena; |
| 54 size_t size; | 71 size_t size; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 96 // Ensure we have correct alignment for pointers. Important for Tru64 | 113 // Ensure we have correct alignment for pointers. Important for Tru64 |
| 97 size = ROUNDUP(size, sizeof(void*)); | 114 size = ROUNDUP(size, sizeof(void*)); |
| 98 | 115 |
| 99 // Check recyclers first | 116 // Check recyclers first |
| 100 if (size < gMaxRecycledSize) { | 117 if (size < gMaxRecycledSize) { |
| 101 const int index = size >> 2; | 118 const int index = size >> 2; |
| 102 | 119 |
| 103 result = m_recyclers[index]; | 120 result = m_recyclers[index]; |
| 104 if (result) { | 121 if (result) { |
| 105 // Need to move to the next object | 122 // Need to move to the next object |
| 106 void* next = *((void**)result); | 123 void* next = MaskPtr(*((void**)result)); |
| 107 m_recyclers[index] = next; | 124 m_recyclers[index] = next; |
| 108 } | 125 } |
| 109 } | 126 } |
| 110 | 127 |
| 111 if (!result) { | 128 if (!result) { |
| 112 // Allocate a new chunk from the arena | 129 // Allocate a new chunk from the arena |
| 113 unsigned bytesAllocated = 0; | 130 unsigned bytesAllocated = 0; |
| 114 ARENA_ALLOCATE(result, &m_pool, size, &bytesAllocated); | 131 ARENA_ALLOCATE(result, &m_pool, size, &bytesAllocated); |
| 115 m_totalAllocated += bytesAllocated; | 132 m_totalAllocated += bytesAllocated; |
| 116 } | 133 } |
| (...skipping 19 matching lines...) Expand all Loading... |
| 136 ::free(block); | 153 ::free(block); |
| 137 #else | 154 #else |
| 138 // Ensure we have correct alignment for pointers. Important for Tru64 | 155 // Ensure we have correct alignment for pointers. Important for Tru64 |
| 139 size = ROUNDUP(size, sizeof(void*)); | 156 size = ROUNDUP(size, sizeof(void*)); |
| 140 | 157 |
| 141 // See if it's a size that we recycle | 158 // See if it's a size that we recycle |
| 142 if (size < gMaxRecycledSize) { | 159 if (size < gMaxRecycledSize) { |
| 143 const int index = size >> 2; | 160 const int index = size >> 2; |
| 144 void* currentTop = m_recyclers[index]; | 161 void* currentTop = m_recyclers[index]; |
| 145 m_recyclers[index] = ptr; | 162 m_recyclers[index] = ptr; |
| 146 *((void**)ptr) = currentTop; | 163 *((void**)ptr) = MaskPtr(currentTop); |
| 147 } | 164 } |
| 148 #endif | 165 #endif |
| 149 } | 166 } |
| 150 | 167 |
| 151 } // namespace WebCore | 168 } // namespace WebCore |
| OLD | NEW |