OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 12 matching lines...) Expand all Loading... |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_ZONE_INL_H_ | 28 #ifndef V8_ZONE_INL_H_ |
29 #define V8_ZONE_INL_H_ | 29 #define V8_ZONE_INL_H_ |
30 | 30 |
31 #include "zone.h" | 31 #include "zone.h" |
32 | 32 |
| 33 #ifdef ADDRESS_SANITIZER |
| 34 #include <sanitizer/asan_interface.h> |
| 35 #else |
| 36 #define ASAN_UNPOISON_MEMORY_REGION(start, size) ((void) 0) |
| 37 #endif |
| 38 |
33 #include "counters.h" | 39 #include "counters.h" |
34 #include "isolate.h" | 40 #include "isolate.h" |
35 #include "utils.h" | 41 #include "utils.h" |
36 #include "v8-counters.h" | 42 #include "v8-counters.h" |
37 | 43 |
38 namespace v8 { | 44 namespace v8 { |
39 namespace internal { | 45 namespace internal { |
40 | 46 |
41 | 47 |
| 48 static const int kASanRedzoneBytes = 24; // Must be a multiple of 8. |
| 49 |
| 50 |
42 inline void* Zone::New(int size) { | 51 inline void* Zone::New(int size) { |
43 // Round up the requested size to fit the alignment. | 52 // Round up the requested size to fit the alignment. |
44 size = RoundUp(size, kAlignment); | 53 size = RoundUp(size, kAlignment); |
45 | 54 |
46 // If the allocation size is divisible by 8 then we return an 8-byte aligned | 55 // If the allocation size is divisible by 8 then we return an 8-byte aligned |
47 // address. | 56 // address. |
48 if (kPointerSize == 4 && kAlignment == 4) { | 57 if (kPointerSize == 4 && kAlignment == 4) { |
49 position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4); | 58 position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4); |
50 } else { | 59 } else { |
51 ASSERT(kAlignment >= kPointerSize); | 60 ASSERT(kAlignment >= kPointerSize); |
52 } | 61 } |
53 | 62 |
54 // Check if the requested size is available without expanding. | 63 // Check if the requested size is available without expanding. |
55 Address result = position_; | 64 Address result = position_; |
56 | 65 |
57 if (size > limit_ - position_) { | 66 int size_with_redzone = |
58 result = NewExpand(size); | 67 #ifdef ADDRESS_SANITIZER |
| 68 size + kASanRedzoneBytes; |
| 69 #else |
| 70 size; |
| 71 #endif |
| 72 |
| 73 if (size_with_redzone > limit_ - position_) { |
| 74 result = NewExpand(size_with_redzone); |
59 } else { | 75 } else { |
60 position_ += size; | 76 position_ += size_with_redzone; |
61 } | 77 } |
62 | 78 |
| 79 #ifdef ADDRESS_SANITIZER |
| 80 Address redzone_position = result + size; |
| 81 ASSERT(redzone_position + kASanRedzoneBytes == position_); |
| 82 ASAN_POISON_MEMORY_REGION(redzone_position, kASanRedzoneBytes); |
| 83 #endif |
| 84 |
63 // Check that the result has the proper alignment and return it. | 85 // Check that the result has the proper alignment and return it. |
64 ASSERT(IsAddressAligned(result, kAlignment, 0)); | 86 ASSERT(IsAddressAligned(result, kAlignment, 0)); |
65 allocation_size_ += size; | 87 allocation_size_ += size; |
66 return reinterpret_cast<void*>(result); | 88 return reinterpret_cast<void*>(result); |
67 } | 89 } |
68 | 90 |
69 | 91 |
70 template <typename T> | 92 template <typename T> |
71 T* Zone::NewArray(int length) { | 93 T* Zone::NewArray(int length) { |
| 94 CHECK(std::numeric_limits<int>::max() / static_cast<int>(sizeof(T)) > length); |
72 return static_cast<T*>(New(length * sizeof(T))); | 95 return static_cast<T*>(New(length * sizeof(T))); |
73 } | 96 } |
74 | 97 |
75 | 98 |
76 bool Zone::excess_allocation() { | 99 bool Zone::excess_allocation() { |
77 return segment_bytes_allocated_ > kExcessLimit; | 100 return segment_bytes_allocated_ > kExcessLimit; |
78 } | 101 } |
79 | 102 |
80 | 103 |
81 void Zone::adjust_segment_bytes_allocated(int delta) { | 104 void Zone::adjust_segment_bytes_allocated(int delta) { |
(...skipping 29 matching lines...) Expand all Loading... |
111 | 134 |
112 template <typename T> | 135 template <typename T> |
113 void* ZoneSplayTree<T>::operator new(size_t size, Zone* zone) { | 136 void* ZoneSplayTree<T>::operator new(size_t size, Zone* zone) { |
114 return zone->New(static_cast<int>(size)); | 137 return zone->New(static_cast<int>(size)); |
115 } | 138 } |
116 | 139 |
117 | 140 |
118 } } // namespace v8::internal | 141 } } // namespace v8::internal |
119 | 142 |
120 #endif // V8_ZONE_INL_H_ | 143 #endif // V8_ZONE_INL_H_ |
OLD | NEW |