Index: test/cctest/cctest.h |
diff --git a/test/cctest/cctest.h b/test/cctest/cctest.h |
index b609036f56a139f4617c53e5d46b4c207b74bc6d..a0bc09d95062cbbc9a8b12799b4fb37a2106acf5 100644 |
--- a/test/cctest/cctest.h |
+++ b/test/cctest/cctest.h |
@@ -530,16 +530,61 @@ static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { |
} |
+static int LenFromSize(int size) { |
+ return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; |
+} |
+ |
+ |
+static inline void CreatePadding(i::Heap* heap, int padding_size, |
+ i::PretenureFlag tenure) { |
+ const int max_number_of_objects = 20; |
+ v8::internal::Handle<v8::internal::FixedArray> |
+ big_objects[max_number_of_objects]; |
+ i::Isolate* isolate = heap->isolate(); |
+ int allocate_memory; |
+ int length; |
+ int free_memory = padding_size; |
+ if (tenure == i::TENURED) { |
+ int current_free_memory = |
+ static_cast<int>(*heap->old_space()->allocation_limit_address() - |
+ *heap->old_space()->allocation_top_address()); |
+ CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
+ } else { |
+ DisableInlineAllocationSteps(heap->new_space()); |
+ int current_free_memory = |
+ static_cast<int>(*heap->new_space()->allocation_limit_address() - |
+ *heap->new_space()->allocation_top_address()); |
+ CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
+ } |
+ for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { |
+ if (free_memory > i::Page::kMaxRegularHeapObjectSize) { |
+ allocate_memory = i::Page::kMaxRegularHeapObjectSize; |
+ length = LenFromSize(allocate_memory); |
+ } else { |
+ allocate_memory = free_memory; |
+ length = LenFromSize(allocate_memory); |
+ if (length <= 0) { |
+ // Not enough room to create another fixed array. Let's create a filler. |
+ heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), |
+ free_memory); |
+ break; |
+ } |
+ } |
+ big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); |
+ CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || |
+ (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); |
+ free_memory -= allocate_memory; |
+ } |
+} |
+ |
+ |
// Helper function that simulates a full new-space in the heap. |
static inline bool FillUpOnePage(v8::internal::NewSpace* space) { |
DisableInlineAllocationSteps(space); |
- v8::internal::AllocationResult allocation = space->AllocateRawUnaligned( |
- v8::internal::Page::kMaxRegularHeapObjectSize); |
- if (allocation.IsRetry()) return false; |
- v8::internal::HeapObject* free_space = NULL; |
- CHECK(allocation.To(&free_space)); |
- space->heap()->CreateFillerObjectAt( |
- free_space->address(), v8::internal::Page::kMaxRegularHeapObjectSize); |
+ int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
+ *space->allocation_top_address()); |
+ if (space_remaining == 0) return false; |
+ CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); |
return true; |
} |
@@ -553,11 +598,7 @@ static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
CHECK(space_remaining >= extra_bytes); |
int new_linear_size = space_remaining - extra_bytes; |
if (new_linear_size == 0) return; |
- v8::internal::AllocationResult allocation = |
- space->AllocateRawUnaligned(new_linear_size); |
- v8::internal::HeapObject* free_space = NULL; |
- CHECK(allocation.To(&free_space)); |
- space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size); |
+ CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); |
} |