Index: src/heap/heap-inl.h |
diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h |
index 6f7c060d70c9e53f41fd9802f21389920d2aec2b..ca9812aaf8033f55aca767d3496840c2b54b5f67 100644 |
--- a/src/heap/heap-inl.h |
+++ b/src/heap/heap-inl.h |
@@ -123,12 +123,11 @@ AllocationResult Heap::AllocateOneByteInternalizedString( |
// Compute map and object size. |
Map* map = one_byte_internalized_string_map(); |
int size = SeqOneByteString::SizeFor(str.length()); |
- AllocationSpace space = SelectSpace(size, TENURED); |
// Allocate string. |
HeapObject* result = nullptr; |
{ |
- AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
if (!allocation.To(&result)) return allocation; |
} |
@@ -155,12 +154,11 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, |
// Compute map and object size. |
Map* map = internalized_string_map(); |
int size = SeqTwoByteString::SizeFor(str.length()); |
- AllocationSpace space = SelectSpace(size, TENURED); |
// Allocate string. |
HeapObject* result = nullptr; |
{ |
- AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
if (!allocation.To(&result)) return allocation; |
} |
@@ -206,33 +204,42 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
isolate_->counters()->objs_since_last_young()->Increment(); |
#endif |
+ bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize; |
HeapObject* object = nullptr; |
AllocationResult allocation; |
if (NEW_SPACE == space) { |
- allocation = new_space_.AllocateRaw(size_in_bytes, alignment); |
- if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) { |
- space = retry_space; |
- } else { |
- if (allocation.To(&object)) { |
- OnAllocationEvent(object, size_in_bytes); |
+ if (!large_object) { |
+ allocation = new_space_.AllocateRaw(size_in_bytes, alignment); |
+ if (always_allocate() && allocation.IsRetry() && |
+ retry_space != NEW_SPACE) { |
+ space = retry_space; |
+ } else { |
+ if (allocation.To(&object)) { |
+ OnAllocationEvent(object, size_in_bytes); |
+ } |
+ return allocation; |
} |
- return allocation; |
+ } else { |
+ space = LO_SPACE; |
} |
} |
if (OLD_SPACE == space) { |
- allocation = old_space_->AllocateRaw(size_in_bytes, alignment); |
+ if (!large_object) { |
+ allocation = old_space_->AllocateRaw(size_in_bytes, alignment); |
+ } else { |
+ allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
+ } |
} else if (CODE_SPACE == space) { |
- if (size_in_bytes <= code_space()->AreaSize()) { |
+ if (!large_object) { |
allocation = code_space_->AllocateRawUnaligned(size_in_bytes); |
} else { |
- // Large code objects are allocated in large object space. |
allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); |
} |
} else if (LO_SPACE == space) { |
+ DCHECK(large_object); |
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
- } else { |
- DCHECK(MAP_SPACE == space); |
+ } else if (MAP_SPACE == space) { |
allocation = map_space_->AllocateRawUnaligned(size_in_bytes); |
} |
if (allocation.To(&object)) { |