OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_INL_H_ | 5 #ifndef V8_HEAP_HEAP_INL_H_ |
6 #define V8_HEAP_HEAP_INL_H_ | 6 #define V8_HEAP_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/base/platform/platform.h" | 10 #include "src/base/platform/platform.h" |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
149 } | 149 } |
150 | 150 |
151 | 151 |
152 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { | 152 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { |
153 if (src->length() == 0) return src; | 153 if (src->length() == 0) return src; |
154 return CopyConstantPoolArrayWithMap(src, src->map()); | 154 return CopyConstantPoolArrayWithMap(src, src->map()); |
155 } | 155 } |
156 | 156 |
157 | 157 |
158 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, | 158 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
159 AllocationSpace retry_space) { | 159 AllocationSpace retry_space, |
| 160 Alignment alignment) { |
160 DCHECK(AllowHandleAllocation::IsAllowed()); | 161 DCHECK(AllowHandleAllocation::IsAllowed()); |
161 DCHECK(AllowHeapAllocation::IsAllowed()); | 162 DCHECK(AllowHeapAllocation::IsAllowed()); |
162 DCHECK(gc_state_ == NOT_IN_GC); | 163 DCHECK(gc_state_ == NOT_IN_GC); |
163 #ifdef DEBUG | 164 #ifdef DEBUG |
164 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && | 165 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && |
165 Heap::allocation_timeout_-- <= 0) { | 166 Heap::allocation_timeout_-- <= 0) { |
166 return AllocationResult::Retry(space); | 167 return AllocationResult::Retry(space); |
167 } | 168 } |
168 isolate_->counters()->objs_since_last_full()->Increment(); | 169 isolate_->counters()->objs_since_last_full()->Increment(); |
169 isolate_->counters()->objs_since_last_young()->Increment(); | 170 isolate_->counters()->objs_since_last_young()->Increment(); |
170 #endif | 171 #endif |
171 | 172 |
172 HeapObject* object; | 173 HeapObject* object; |
173 AllocationResult allocation; | 174 AllocationResult allocation; |
174 if (NEW_SPACE == space) { | 175 if (NEW_SPACE == space) { |
| 176 #ifndef V8_HOST_ARCH_64_BIT |
| 177 if (alignment == kDoubleAligned) { |
| 178 allocation = new_space_.AllocateRawDoubleAligned(size_in_bytes); |
| 179 } else { |
| 180 allocation = new_space_.AllocateRaw(size_in_bytes); |
| 181 } |
| 182 #else |
175 allocation = new_space_.AllocateRaw(size_in_bytes); | 183 allocation = new_space_.AllocateRaw(size_in_bytes); |
| 184 #endif |
176 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { | 185 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { |
177 space = retry_space; | 186 space = retry_space; |
178 } else { | 187 } else { |
179 if (allocation.To(&object)) { | 188 if (allocation.To(&object)) { |
180 OnAllocationEvent(object, size_in_bytes); | 189 OnAllocationEvent(object, size_in_bytes); |
181 } | 190 } |
182 return allocation; | 191 return allocation; |
183 } | 192 } |
184 } | 193 } |
185 | 194 |
186 if (OLD_SPACE == space) { | 195 if (OLD_SPACE == space) { |
| 196 #ifndef V8_HOST_ARCH_64_BIT |
| 197 if (alignment == kDoubleAligned) { |
| 198 allocation = old_space_->AllocateRawDoubleAligned(size_in_bytes); |
| 199 } else { |
| 200 allocation = old_space_->AllocateRaw(size_in_bytes); |
| 201 } |
| 202 #else |
187 allocation = old_space_->AllocateRaw(size_in_bytes); | 203 allocation = old_space_->AllocateRaw(size_in_bytes); |
| 204 #endif |
188 } else if (CODE_SPACE == space) { | 205 } else if (CODE_SPACE == space) { |
189 if (size_in_bytes <= code_space()->AreaSize()) { | 206 if (size_in_bytes <= code_space()->AreaSize()) { |
190 allocation = code_space_->AllocateRaw(size_in_bytes); | 207 allocation = code_space_->AllocateRaw(size_in_bytes); |
191 } else { | 208 } else { |
192 // Large code objects are allocated in large object space. | 209 // Large code objects are allocated in large object space. |
193 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); | 210 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); |
194 } | 211 } |
195 } else if (LO_SPACE == space) { | 212 } else if (LO_SPACE == space) { |
196 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); | 213 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
197 } else { | 214 } else { |
(...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
687 | 704 |
688 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { | 705 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) { |
689 for (Object** current = start; current < end; current++) { | 706 for (Object** current = start; current < end; current++) { |
690 CHECK((*current)->IsSmi()); | 707 CHECK((*current)->IsSmi()); |
691 } | 708 } |
692 } | 709 } |
693 } | 710 } |
694 } // namespace v8::internal | 711 } // namespace v8::internal |
695 | 712 |
696 #endif // V8_HEAP_HEAP_INL_H_ | 713 #endif // V8_HEAP_HEAP_INL_H_ |
OLD | NEW |