Index: test/cctest/test-heap.cc |
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc |
index 0c79e6535e0a1bce5fe6fa09ec2c930b79a14e66..e6f0ec38bdc8283576c9562c2c86babb6641935d 100644 |
--- a/test/cctest/test-heap.cc |
+++ b/test/cctest/test-heap.cc |
@@ -40,25 +40,6 @@ |
using namespace v8::internal; |
-// Go through all incremental marking steps in one swoop. |
-static void SimulateIncrementalMarking() { |
- MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); |
- IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
- if (collector->sweeping_in_progress()) { |
- collector->EnsureSweepingCompleted(); |
- } |
- CHECK(marking->IsMarking() || marking->IsStopped()); |
- if (marking->IsStopped()) { |
- marking->Start(); |
- } |
- CHECK(marking->IsMarking()); |
- while (!marking->IsComplete()) { |
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD); |
- } |
- CHECK(marking->IsComplete()); |
-} |
- |
- |
static void CheckMap(Map* map, int type, int instance_size) { |
CHECK(map->IsHeapObject()); |
#ifdef DEBUG |
@@ -1223,7 +1204,7 @@ TEST(TestCodeFlushingIncremental) { |
// Simulate several GCs that use incremental marking. |
const int kAgingThreshold = 6; |
for (int i = 0; i < kAgingThreshold; i++) { |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
} |
CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
@@ -1237,7 +1218,7 @@ TEST(TestCodeFlushingIncremental) { |
// Simulate several GCs that use incremental marking but make sure |
// the loop breaks once the function is enqueued as a candidate. |
for (int i = 0; i < kAgingThreshold; i++) { |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
if (!function->next_function_link()->IsUndefined()) break; |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
} |
@@ -1313,7 +1294,7 @@ TEST(TestCodeFlushingIncrementalScavenge) { |
// Simulate incremental marking so that the functions are enqueued as |
// code flushing candidates. Then kill one of the functions. Finally |
// perform a scavenge while incremental marking is still running. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
*function2.location() = NULL; |
CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); |
@@ -1367,7 +1348,7 @@ TEST(TestCodeFlushingIncrementalAbort) { |
// Simulate incremental marking so that the function is enqueued as |
// code flushing candidate. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
// Enable the debugger and add a breakpoint while incremental marking |
// is running so that incremental marking aborts and code flushing is |
@@ -2758,7 +2739,7 @@ TEST(Regress1465) { |
CompileRun("%DebugPrint(root);"); |
CHECK_EQ(transitions_count, transitions_before); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
// Count number of live transitions after marking. Note that one transition |
@@ -2923,7 +2904,7 @@ TEST(Regress2143a) { |
"root.foo = 0;" |
"root = new Object;"); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
// Compile a StoreIC that performs the prepared map transition. This |
// will restart incremental marking and should make sure the root is |
@@ -2964,7 +2945,7 @@ TEST(Regress2143b) { |
"root.foo = 0;" |
"root = new Object;"); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
// Compile an optimized LStoreNamedField that performs the prepared |
// map transition. This will restart incremental marking and should |
@@ -3167,7 +3148,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) { |
} |
} |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
CHECK_EQ(expected_length, feedback_vector->length()); |
@@ -3210,7 +3191,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) { |
Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC); |
CHECK(ic_before->ic_state() == MONOMORPHIC); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC); |
@@ -3244,7 +3225,7 @@ TEST(IncrementalMarkingClearsMonomorphicIC) { |
// Fire context dispose notification. |
CcTest::isolate()->ContextDisposedNotification(); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC); |
@@ -3285,7 +3266,7 @@ TEST(IncrementalMarkingClearsPolymorphicIC) { |
// Fire context dispose notification. |
CcTest::isolate()->ContextDisposedNotification(); |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(CcTest::heap()); |
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags); |
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC); |
@@ -3446,7 +3427,7 @@ TEST(Regress159140) { |
// Simulate incremental marking so that the functions are enqueued as |
// code flushing candidates. Then optimize one function. Finally |
// finish the GC to complete code flushing. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); |
heap->CollectAllGarbage(Heap::kNoGCFlags); |
@@ -3493,7 +3474,7 @@ TEST(Regress165495) { |
// Simulate incremental marking so that unoptimized code is flushed |
// even though it still is cached in the optimized code map. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
heap->CollectAllGarbage(Heap::kNoGCFlags); |
// Make a new closure that will get code installed from the code map. |
@@ -3561,7 +3542,7 @@ TEST(Regress169209) { |
} |
// Simulate incremental marking and collect code flushing candidates. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
CHECK(shared1->code()->gc_metadata() != NULL); |
// Optimize function and make sure the unoptimized code is replaced. |
@@ -3707,7 +3688,7 @@ TEST(Regress168801) { |
// Simulate incremental marking so that unoptimized function is enqueued as a |
// candidate for code flushing. The shared function info however will not be |
// explicitly enqueued. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
// Now optimize the function so that it is taken off the candidate list. |
{ |
@@ -3764,7 +3745,7 @@ TEST(Regress173458) { |
// Simulate incremental marking so that unoptimized function is enqueued as a |
// candidate for code flushing. The shared function info however will not be |
// explicitly enqueued. |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
// Now enable the debugger which in turn will disable code flushing. |
CHECK(isolate->debug()->Load()); |
@@ -4012,7 +3993,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) { |
if (!isolate->use_crankshaft()) return; |
HandleScope outer_scope(heap->isolate()); |
for (int i = 0; i < 3; i++) { |
- SimulateIncrementalMarking(); |
+ SimulateIncrementalMarking(heap); |
{ |
LocalContext context; |
HandleScope scope(heap->isolate()); |