| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_ZONE_INL_H_ | 28 #ifndef V8_ZONE_INL_H_ |
| 29 #define V8_ZONE_INL_H_ | 29 #define V8_ZONE_INL_H_ |
| 30 | 30 |
| 31 #include "isolate.h" |
| 31 #include "zone.h" | 32 #include "zone.h" |
| 32 #include "v8-counters.h" | 33 #include "v8-counters.h" |
| 33 | 34 |
| 34 namespace v8 { | 35 namespace v8 { |
| 35 namespace internal { | 36 namespace internal { |
| 36 | 37 |
| 37 | 38 |
| 39 AssertNoZoneAllocation::AssertNoZoneAllocation() |
| 40 : prev_(Isolate::Current()->zone_allow_allocation()) { |
| 41 Isolate::Current()->set_zone_allow_allocation(false); |
| 42 } |
| 43 |
| 44 |
| 45 AssertNoZoneAllocation::~AssertNoZoneAllocation() { |
| 46 Isolate::Current()->set_zone_allow_allocation(prev_); |
| 47 } |
| 48 |
| 49 |
| 38 inline void* Zone::New(int size) { | 50 inline void* Zone::New(int size) { |
| 39 ASSERT(AssertNoZoneAllocation::allow_allocation()); | 51 ASSERT(Isolate::Current()->zone_allow_allocation()); |
| 40 ASSERT(ZoneScope::nesting() > 0); | 52 ASSERT(ZoneScope::nesting() > 0); |
| 41 // Round up the requested size to fit the alignment. | 53 // Round up the requested size to fit the alignment. |
| 42 size = RoundUp(size, kAlignment); | 54 size = RoundUp(size, kAlignment); |
| 43 | 55 |
| 44 // Check if the requested size is available without expanding. | 56 // Check if the requested size is available without expanding. |
| 45 Address result = position_; | 57 Address result = position_; |
| 46 if ((position_ += size) > limit_) result = NewExpand(size); | 58 if ((position_ += size) > limit_) result = NewExpand(size); |
| 47 | 59 |
| 48 // Check that the result has the proper alignment and return it. | 60 // Check that the result has the proper alignment and return it. |
| 49 ASSERT(IsAddressAligned(result, kAlignment, 0)); | 61 ASSERT(IsAddressAligned(result, kAlignment, 0)); |
| 50 allocation_size_ += size; | 62 allocation_size_ += size; |
| 51 return reinterpret_cast<void*>(result); | 63 return reinterpret_cast<void*>(result); |
| 52 } | 64 } |
| 53 | 65 |
| 54 | 66 |
| 55 template <typename T> | 67 template <typename T> |
| 56 T* Zone::NewArray(int length) { | 68 T* Zone::NewArray(int length) { |
| 57 return static_cast<T*>(Zone::New(length * sizeof(T))); | 69 return static_cast<T*>(New(length * sizeof(T))); |
| 58 } | 70 } |
| 59 | 71 |
| 60 | 72 |
| 61 bool Zone::excess_allocation() { | 73 bool Zone::excess_allocation() { |
| 62 return segment_bytes_allocated_ > zone_excess_limit_; | 74 return segment_bytes_allocated_ > zone_excess_limit_; |
| 63 } | 75 } |
| 64 | 76 |
| 65 | 77 |
| 66 void Zone::adjust_segment_bytes_allocated(int delta) { | 78 void Zone::adjust_segment_bytes_allocated(int delta) { |
| 67 segment_bytes_allocated_ += delta; | 79 segment_bytes_allocated_ += delta; |
| 68 Counters::zone_segment_bytes.Set(segment_bytes_allocated_); | 80 isolate_->counters()->zone_segment_bytes()->Set(segment_bytes_allocated_); |
| 69 } | 81 } |
| 70 | 82 |
| 71 | 83 |
| 72 template <typename Config> | 84 template <typename Config> |
| 73 ZoneSplayTree<Config>::~ZoneSplayTree() { | 85 ZoneSplayTree<Config>::~ZoneSplayTree() { |
| 74 // Reset the root to avoid unneeded iteration over all tree nodes | 86 // Reset the root to avoid unneeded iteration over all tree nodes |
| 75 // in the destructor. For a zone-allocated tree, nodes will be | 87 // in the destructor. For a zone-allocated tree, nodes will be |
| 76 // freed by the Zone. | 88 // freed by the Zone. |
| 77 SplayTree<Config, ZoneListAllocationPolicy>::ResetRoot(); | 89 SplayTree<Config, ZoneListAllocationPolicy>::ResetRoot(); |
| 78 } | 90 } |
| 79 | 91 |
| 80 | 92 |
| 93 // TODO(isolates): for performance reasons, this should be replaced with a new |
| 94 // operator that takes the zone in which the object should be |
| 95 // allocated. |
| 96 void* ZoneObject::operator new(size_t size) { |
| 97 return ZONE->New(static_cast<int>(size)); |
| 98 } |
| 99 |
| 100 |
| 101 inline void* ZoneListAllocationPolicy::New(int size) { |
| 102 return ZONE->New(size); |
| 103 } |
| 104 |
| 105 |
| 106 ZoneScope::ZoneScope(ZoneScopeMode mode) |
| 107 : isolate_(Isolate::Current()), |
| 108 mode_(mode) { |
| 109 isolate_->zone()->scope_nesting_++; |
| 110 } |
| 111 |
| 112 |
| 113 bool ZoneScope::ShouldDeleteOnExit() { |
| 114 return isolate_->zone()->scope_nesting_ == 1 && mode_ == DELETE_ON_EXIT; |
| 115 } |
| 116 |
| 117 |
| 118 int ZoneScope::nesting() { |
| 119 return Isolate::Current()->zone()->scope_nesting_; |
| 120 } |
| 121 |
| 122 |
| 81 } } // namespace v8::internal | 123 } } // namespace v8::internal |
| 82 | 124 |
| 83 #endif // V8_ZONE_INL_H_ | 125 #endif // V8_ZONE_INL_H_ |
| OLD | NEW |