| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_INL_H_ | 5 #ifndef V8_HEAP_HEAP_INL_H_ |
| 6 #define V8_HEAP_HEAP_INL_H_ | 6 #define V8_HEAP_HEAP_INL_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 | 9 |
| 10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 | 150 |
| 151 | 151 |
| 152 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { | 152 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { |
| 153 if (src->length() == 0) return src; | 153 if (src->length() == 0) return src; |
| 154 return CopyConstantPoolArrayWithMap(src, src->map()); | 154 return CopyConstantPoolArrayWithMap(src, src->map()); |
| 155 } | 155 } |
| 156 | 156 |
| 157 | 157 |
| 158 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, | 158 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
| 159 AllocationSpace retry_space, | 159 AllocationSpace retry_space, |
| 160 Alignment alignment) { | 160 AllocationAlignment alignment) { |
| 161 DCHECK(AllowHandleAllocation::IsAllowed()); | 161 DCHECK(AllowHandleAllocation::IsAllowed()); |
| 162 DCHECK(AllowHeapAllocation::IsAllowed()); | 162 DCHECK(AllowHeapAllocation::IsAllowed()); |
| 163 DCHECK(gc_state_ == NOT_IN_GC); | 163 DCHECK(gc_state_ == NOT_IN_GC); |
| 164 #ifdef DEBUG | 164 #ifdef DEBUG |
| 165 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && | 165 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && |
| 166 Heap::allocation_timeout_-- <= 0) { | 166 Heap::allocation_timeout_-- <= 0) { |
| 167 return AllocationResult::Retry(space); | 167 return AllocationResult::Retry(space); |
| 168 } | 168 } |
| 169 isolate_->counters()->objs_since_last_full()->Increment(); | 169 isolate_->counters()->objs_since_last_full()->Increment(); |
| 170 isolate_->counters()->objs_since_last_young()->Increment(); | 170 isolate_->counters()->objs_since_last_young()->Increment(); |
| 171 #endif | 171 #endif |
| 172 | 172 |
| 173 HeapObject* object; | 173 HeapObject* object; |
| 174 AllocationResult allocation; | 174 AllocationResult allocation; |
| 175 if (NEW_SPACE == space) { | 175 if (NEW_SPACE == space) { |
| 176 #ifndef V8_HOST_ARCH_64_BIT | 176 #ifndef V8_HOST_ARCH_64_BIT |
| 177 if (alignment == kDoubleAligned) { | 177 if (alignment == kWordAligned) { |
| 178 allocation = new_space_.AllocateRawDoubleAligned(size_in_bytes); | 178 allocation = new_space_.AllocateRaw(size_in_bytes); |
| 179 } else { | 179 } else { |
| 180 allocation = new_space_.AllocateRaw(size_in_bytes); | 180 allocation = new_space_.AllocateRawAligned(size_in_bytes, alignment); |
| 181 } | 181 } |
| 182 #else | 182 #else |
| 183 allocation = new_space_.AllocateRaw(size_in_bytes); | 183 allocation = new_space_.AllocateRaw(size_in_bytes); |
| 184 #endif | 184 #endif |
| 185 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { | 185 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { |
| 186 space = retry_space; | 186 space = retry_space; |
| 187 } else { | 187 } else { |
| 188 if (allocation.To(&object)) { | 188 if (allocation.To(&object)) { |
| 189 OnAllocationEvent(object, size_in_bytes); | 189 OnAllocationEvent(object, size_in_bytes); |
| 190 } | 190 } |
| 191 return allocation; | 191 return allocation; |
| 192 } | 192 } |
| 193 } | 193 } |
| 194 | 194 |
| 195 if (OLD_SPACE == space) { | 195 if (OLD_SPACE == space) { |
| 196 #ifndef V8_HOST_ARCH_64_BIT | 196 #ifndef V8_HOST_ARCH_64_BIT |
| 197 if (alignment == kDoubleAligned) { | 197 if (alignment == kWordAligned) { |
| 198 allocation = old_space_->AllocateRawDoubleAligned(size_in_bytes); | 198 allocation = old_space_->AllocateRaw(size_in_bytes); |
| 199 } else { | 199 } else { |
| 200 allocation = old_space_->AllocateRaw(size_in_bytes); | 200 allocation = old_space_->AllocateRawAligned(size_in_bytes, alignment); |
| 201 } | 201 } |
| 202 #else | 202 #else |
| 203 allocation = old_space_->AllocateRaw(size_in_bytes); | 203 allocation = old_space_->AllocateRaw(size_in_bytes); |
| 204 #endif | 204 #endif |
| 205 } else if (CODE_SPACE == space) { | 205 } else if (CODE_SPACE == space) { |
| 206 if (size_in_bytes <= code_space()->AreaSize()) { | 206 if (size_in_bytes <= code_space()->AreaSize()) { |
| 207 allocation = code_space_->AllocateRaw(size_in_bytes); | 207 allocation = code_space_->AllocateRaw(size_in_bytes); |
| 208 } else { | 208 } else { |
| 209 // Large code objects are allocated in large object space. | 209 // Large code objects are allocated in large object space. |
| 210 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); | 210 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); |
| (...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 704 | 704 |
| 705 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { | 705 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { |
| 706 for (Object** current = start; current < end; current++) { | 706 for (Object** current = start; current < end; current++) { |
| 707 CHECK((*current)->IsSmi()); | 707 CHECK((*current)->IsSmi()); |
| 708 } | 708 } |
| 709 } | 709 } |
| 710 } | 710 } |
| 711 } // namespace v8::internal | 711 } // namespace v8::internal |
| 712 | 712 |
| 713 #endif // V8_HEAP_HEAP_INL_H_ | 713 #endif // V8_HEAP_HEAP_INL_H_ |
| OLD | NEW |