Index: src/heap/heap-inl.h |
diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h |
index 6c2e36294a4460462ff470bb8d86a73302871794..bd42ddd047ca7a4625d1ea90363c185f16154b0a 100644 |
--- a/src/heap/heap-inl.h |
+++ b/src/heap/heap-inl.h |
@@ -127,7 +127,7 @@ AllocationResult Heap::AllocateOneByteInternalizedString( |
// Allocate string. |
HeapObject* result = nullptr; |
{ |
- AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
if (!allocation.To(&result)) return allocation; |
} |
@@ -158,7 +158,7 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, |
// Allocate string. |
HeapObject* result = nullptr; |
{ |
- AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
if (!allocation.To(&result)) return allocation; |
} |
@@ -190,7 +190,6 @@ AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { |
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
- AllocationSpace retry_space, |
AllocationAlignment alignment) { |
DCHECK(AllowHandleAllocation::IsAllowed()); |
DCHECK(AllowHeapAllocation::IsAllowed()); |
@@ -208,19 +207,14 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, |
HeapObject* object = nullptr; |
AllocationResult allocation; |
if (NEW_SPACE == space) { |
- if (!large_object) { |
+ if (large_object) { |
+ space = LO_SPACE; |
+ } else { |
allocation = new_space_.AllocateRaw(size_in_bytes, alignment); |
- if (always_allocate() && allocation.IsRetry() && |
- retry_space != NEW_SPACE) { |
- space = retry_space; |
- } else { |
- if (allocation.To(&object)) { |
- OnAllocationEvent(object, size_in_bytes); |
- } |
- return allocation; |
+ if (allocation.To(&object)) { |
+ OnAllocationEvent(object, size_in_bytes); |
} |
- } else { |
- space = LO_SPACE; |
+ return allocation; |
} |
} |
@@ -570,10 +564,7 @@ Isolate* Heap::isolate() { |
} \ |
(ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
(ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
- { \ |
- AlwaysAllocateScope __scope__(ISOLATE); \ |
- __allocation__ = FUNCTION_CALL; \ |
- } \ |
+ __allocation__ = FUNCTION_CALL; \ |
Michael Starzinger
2015/09/28 16:07:50
As discussed offline: I think we should still keep
Hannes Payer (out of office)
2015/09/28 16:11:49
Done.
|
RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ |
/* TODO(1181417): Fix this. */ \ |
v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ |