| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ZONE_INL_H_ | 5 #ifndef V8_ZONE_INL_H_ |
| 6 #define V8_ZONE_INL_H_ | 6 #define V8_ZONE_INL_H_ |
| 7 | 7 |
| 8 #include "src/zone.h" | 8 #include "src/zone.h" |
| 9 | 9 |
| 10 #ifdef V8_USE_ADDRESS_SANITIZER | 10 #ifdef V8_USE_ADDRESS_SANITIZER |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 | 26 |
| 27 inline void* Zone::New(int size) { | 27 inline void* Zone::New(int size) { |
| 28 // Round up the requested size to fit the alignment. | 28 // Round up the requested size to fit the alignment. |
| 29 size = RoundUp(size, kAlignment); | 29 size = RoundUp(size, kAlignment); |
| 30 | 30 |
| 31 // If the allocation size is divisible by 8 then we return an 8-byte aligned | 31 // If the allocation size is divisible by 8 then we return an 8-byte aligned |
| 32 // address. | 32 // address. |
| 33 if (kPointerSize == 4 && kAlignment == 4) { | 33 if (kPointerSize == 4 && kAlignment == 4) { |
| 34 position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4); | 34 position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4); |
| 35 } else { | 35 } else { |
| 36 ASSERT(kAlignment >= kPointerSize); | 36 DCHECK(kAlignment >= kPointerSize); |
| 37 } | 37 } |
| 38 | 38 |
| 39 // Check if the requested size is available without expanding. | 39 // Check if the requested size is available without expanding. |
| 40 Address result = position_; | 40 Address result = position_; |
| 41 | 41 |
| 42 int size_with_redzone = | 42 int size_with_redzone = |
| 43 #ifdef V8_USE_ADDRESS_SANITIZER | 43 #ifdef V8_USE_ADDRESS_SANITIZER |
| 44 size + kASanRedzoneBytes; | 44 size + kASanRedzoneBytes; |
| 45 #else | 45 #else |
| 46 size; | 46 size; |
| 47 #endif | 47 #endif |
| 48 | 48 |
| 49 if (size_with_redzone > limit_ - position_) { | 49 if (size_with_redzone > limit_ - position_) { |
| 50 result = NewExpand(size_with_redzone); | 50 result = NewExpand(size_with_redzone); |
| 51 } else { | 51 } else { |
| 52 position_ += size_with_redzone; | 52 position_ += size_with_redzone; |
| 53 } | 53 } |
| 54 | 54 |
| 55 #ifdef V8_USE_ADDRESS_SANITIZER | 55 #ifdef V8_USE_ADDRESS_SANITIZER |
| 56 Address redzone_position = result + size; | 56 Address redzone_position = result + size; |
| 57 ASSERT(redzone_position + kASanRedzoneBytes == position_); | 57 DCHECK(redzone_position + kASanRedzoneBytes == position_); |
| 58 ASAN_POISON_MEMORY_REGION(redzone_position, kASanRedzoneBytes); | 58 ASAN_POISON_MEMORY_REGION(redzone_position, kASanRedzoneBytes); |
| 59 #endif | 59 #endif |
| 60 | 60 |
| 61 // Check that the result has the proper alignment and return it. | 61 // Check that the result has the proper alignment and return it. |
| 62 ASSERT(IsAddressAligned(result, kAlignment, 0)); | 62 DCHECK(IsAddressAligned(result, kAlignment, 0)); |
| 63 allocation_size_ += size; | 63 allocation_size_ += size; |
| 64 return reinterpret_cast<void*>(result); | 64 return reinterpret_cast<void*>(result); |
| 65 } | 65 } |
| 66 | 66 |
| 67 | 67 |
| 68 template <typename T> | 68 template <typename T> |
| 69 T* Zone::NewArray(int length) { | 69 T* Zone::NewArray(int length) { |
| 70 CHECK(std::numeric_limits<int>::max() / static_cast<int>(sizeof(T)) > length); | 70 CHECK(std::numeric_limits<int>::max() / static_cast<int>(sizeof(T)) > length); |
| 71 return static_cast<T*>(New(length * sizeof(T))); | 71 return static_cast<T*>(New(length * sizeof(T))); |
| 72 } | 72 } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 90 // freed by the Zone. | 90 // freed by the Zone. |
| 91 SplayTree<Config, ZoneAllocationPolicy>::ResetRoot(); | 91 SplayTree<Config, ZoneAllocationPolicy>::ResetRoot(); |
| 92 } | 92 } |
| 93 | 93 |
| 94 | 94 |
| 95 void* ZoneObject::operator new(size_t size, Zone* zone) { | 95 void* ZoneObject::operator new(size_t size, Zone* zone) { |
| 96 return zone->New(static_cast<int>(size)); | 96 return zone->New(static_cast<int>(size)); |
| 97 } | 97 } |
| 98 | 98 |
| 99 inline void* ZoneAllocationPolicy::New(size_t size) { | 99 inline void* ZoneAllocationPolicy::New(size_t size) { |
| 100 ASSERT(zone_); | 100 DCHECK(zone_); |
| 101 return zone_->New(static_cast<int>(size)); | 101 return zone_->New(static_cast<int>(size)); |
| 102 } | 102 } |
| 103 | 103 |
| 104 | 104 |
| 105 template <typename T> | 105 template <typename T> |
| 106 void* ZoneList<T>::operator new(size_t size, Zone* zone) { | 106 void* ZoneList<T>::operator new(size_t size, Zone* zone) { |
| 107 return zone->New(static_cast<int>(size)); | 107 return zone->New(static_cast<int>(size)); |
| 108 } | 108 } |
| 109 | 109 |
| 110 | 110 |
| 111 template <typename T> | 111 template <typename T> |
| 112 void* ZoneSplayTree<T>::operator new(size_t size, Zone* zone) { | 112 void* ZoneSplayTree<T>::operator new(size_t size, Zone* zone) { |
| 113 return zone->New(static_cast<int>(size)); | 113 return zone->New(static_cast<int>(size)); |
| 114 } | 114 } |
| 115 | 115 |
| 116 | 116 |
| 117 } } // namespace v8::internal | 117 } } // namespace v8::internal |
| 118 | 118 |
| 119 #endif // V8_ZONE_INL_H_ | 119 #endif // V8_ZONE_INL_H_ |
| OLD | NEW |