Index: test/cctest/test-heap.cc |
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc |
index 38e5a471ee941c71f4aafc605325ce3bf5350712..2c85596090bd6e4f858336be88b464ed766ef64b 100644 |
--- a/test/cctest/test-heap.cc |
+++ b/test/cctest/test-heap.cc |
@@ -435,7 +435,7 @@ |
HandleScope sc(isolate); |
// Check GC. |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object()); |
Handle<String> name = factory->InternalizeUtf8String("theFunction"); |
@@ -461,7 +461,7 @@ |
*Object::GetProperty(obj, prop_namex).ToHandleChecked()); |
} |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
// Function should be alive. |
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); |
@@ -551,7 +551,7 @@ |
} |
// after gc, it should survive |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK((*h1)->IsString()); |
CHECK((*h2)->IsHeapNumber()); |
@@ -609,7 +609,7 @@ |
&TestWeakGlobalHandleCallback); |
// Scavenge treats weak pointers as normal roots. |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK((*h1)->IsString()); |
CHECK((*h2)->IsHeapNumber()); |
@@ -647,7 +647,7 @@ |
// Make sure the objects are promoted. |
heap->CollectGarbage(OLD_SPACE); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); |
std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); |
@@ -694,7 +694,7 @@ |
&TestWeakGlobalHandleCallback); |
// Scanvenge does not recognize weak reference. |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK(!WeakPointerCleared); |
@@ -1513,7 +1513,7 @@ |
// perform a scavenge while incremental marking is still running. |
SimulateIncrementalMarking(CcTest::heap()); |
*function2.location() = NULL; |
- CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking"); |
+ CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); |
// Simulate one final GC to make sure the candidate queue is sane. |
CcTest::heap()->CollectAllGarbage(); |
@@ -1787,7 +1787,7 @@ |
// Scavenge treats these references as strong. |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbageNewSpace(); |
+ CcTest::heap()->CollectGarbage(NEW_SPACE); |
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); |
} |
@@ -1799,7 +1799,7 @@ |
// Get rid of f3 and f5 in the same way. |
CompileRun("f3=null"); |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbageNewSpace(); |
+ CcTest::heap()->CollectGarbage(NEW_SPACE); |
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
} |
CcTest::heap()->CollectAllGarbage(); |
@@ -2382,7 +2382,7 @@ |
CHECK(old_capacity == new_capacity); |
// Let the scavenger empty the new space. |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK_LE(new_space->Size(), old_capacity); |
// Explicitly shrinking should halve the space capacity. |
@@ -2837,7 +2837,7 @@ |
// Set the flags to check whether we appropriately resets them after the GC. |
heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); |
- heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask); |
+ heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); |
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
MarkCompactCollector* collector = heap->mark_compact_collector(); |
@@ -2850,11 +2850,11 @@ |
heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); |
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
// NewSpace scavenges should not overwrite the flags. |
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
- heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask); |
+ heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
} |
@@ -3740,18 +3740,17 @@ |
// Triggering one GC will cause a lot of garbage to be discovered but |
// even spread across all allocated pages. |
- heap->CollectAllGarbage("triggered for preparation", |
- Heap::kFinalizeIncrementalMarkingMask); |
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
+ "triggered for preparation"); |
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); |
// Triggering subsequent GCs should cause at least half of the pages |
// to be released to the OS after at most two cycles. |
- heap->CollectAllGarbage("triggered by test 1", |
- Heap::kFinalizeIncrementalMarkingMask); |
- ; |
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
+ "triggered by test 1"); |
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); |
- heap->CollectAllGarbage("triggered by test 2", |
- Heap::kFinalizeIncrementalMarkingMask); |
+ heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
+ "triggered by test 2"); |
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); |
// Triggering a last-resort GC should cause all pages to be released to the |
@@ -4449,7 +4448,7 @@ |
CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0)); |
// First make sure we flip spaces |
- CcTest::heap()->CollectGarbageNewSpace(); |
+ CcTest::heap()->CollectGarbage(NEW_SPACE); |
// Allocate the object. |
Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); |
@@ -5519,10 +5518,10 @@ |
} |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
heap->CollectAllAvailableGarbage(); |
@@ -5553,7 +5552,7 @@ |
heap->StartIncrementalMarking(); |
} |
marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
CHECK(weak_cell->value()->IsFixedArray()); |
weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); |
} |
@@ -5759,8 +5758,8 @@ |
CHECK(2 * old_capacity == new_space->TotalCapacity()); |
// Call the scavenger two times to get an empty new space |
- heap->CollectGarbageNewSpace(); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
+ heap->CollectGarbage(NEW_SPACE); |
// First create a few objects which will survive a scavenge, and will get |
// promoted to the old generation later on. These objects will create |
@@ -5770,7 +5769,7 @@ |
for (int i = 0; i < number_handles; i++) { |
handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); |
} |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
// Create the first huge object which will exactly fit the first semi-space |
// page. |
@@ -5795,7 +5794,7 @@ |
// This scavenge will corrupt memory if the promotion queue is not |
// evacuated. |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
} |
isolate->Dispose(); |
} |
@@ -6220,12 +6219,12 @@ |
Isolate* isolate = CcTest::i_isolate(); |
Heap* heap = isolate->heap(); |
size_t counter1 = heap->NewSpaceAllocationCounter(); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
const size_t kSize = 1024; |
AllocateInSpace(isolate, kSize, NEW_SPACE); |
size_t counter2 = heap->NewSpaceAllocationCounter(); |
CHECK_EQ(kSize, counter2 - counter1); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
size_t counter3 = heap->NewSpaceAllocationCounter(); |
CHECK_EQ(0U, counter3 - counter2); |
// Test counter overflow. |
@@ -6247,14 +6246,14 @@ |
Isolate* isolate = CcTest::i_isolate(); |
Heap* heap = isolate->heap(); |
size_t counter1 = heap->OldGenerationAllocationCounter(); |
- heap->CollectGarbageNewSpace(); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
+ heap->CollectGarbage(NEW_SPACE); |
const size_t kSize = 1024; |
AllocateInSpace(isolate, kSize, OLD_SPACE); |
size_t counter2 = heap->OldGenerationAllocationCounter(); |
// TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. |
CHECK_LE(kSize, counter2 - counter1); |
- heap->CollectGarbageNewSpace(); |
+ heap->CollectGarbage(NEW_SPACE); |
size_t counter3 = heap->OldGenerationAllocationCounter(); |
CHECK_EQ(0u, counter3 - counter2); |
AllocateInSpace(isolate, kSize, OLD_SPACE); |