Index: test/cctest/heap/test-heap.cc |
diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc |
index 81c4aed7036e82edd3f322dc03532b9c4e07bca0..cf897b0b1a41db6e868fd68ad4c4c38b3c2f7993 100644 |
--- a/test/cctest/heap/test-heap.cc |
+++ b/test/cctest/heap/test-heap.cc |
@@ -138,7 +138,7 @@ HEAP_TEST(TestNewSpaceRefsInCopiedCode) { |
Handle<Code> copy(tmp); |
CheckEmbeddedObjectsAreEqual(code, copy); |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CheckEmbeddedObjectsAreEqual(code, copy); |
} |
@@ -478,12 +478,11 @@ TEST(Tagging) { |
TEST(GarbageCollection) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
HandleScope sc(isolate); |
// Check GC. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
Handle<JSGlobalObject> global( |
CcTest::i_isolate()->context()->global_object()); |
@@ -510,7 +509,7 @@ TEST(GarbageCollection) { |
*Object::GetProperty(obj, prop_namex).ToHandleChecked()); |
} |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Function should be alive. |
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); |
@@ -529,7 +528,7 @@ TEST(GarbageCollection) { |
} |
// After gc, it should survive. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name)); |
Handle<Object> obj = |
@@ -578,7 +577,6 @@ TEST(LocalHandles) { |
TEST(GlobalHandles) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
GlobalHandles* global_handles = isolate->global_handles(); |
@@ -600,7 +598,7 @@ TEST(GlobalHandles) { |
} |
// after gc, it should survive |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK((*h1)->IsString()); |
CHECK((*h2)->IsHeapNumber()); |
@@ -633,7 +631,6 @@ TEST(WeakGlobalHandlesScavenge) { |
i::FLAG_stress_compaction = false; |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
GlobalHandles* global_handles = isolate->global_handles(); |
@@ -658,7 +655,7 @@ TEST(WeakGlobalHandlesScavenge) { |
&TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter); |
// Scavenge treats weak pointers as normal roots. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK((*h1)->IsString()); |
CHECK((*h2)->IsHeapNumber()); |
@@ -695,8 +692,8 @@ TEST(WeakGlobalHandlesMark) { |
} |
// Make sure the objects are promoted. |
- heap->CollectGarbage(OLD_SPACE); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); |
std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); |
@@ -707,7 +704,7 @@ TEST(WeakGlobalHandlesMark) { |
CHECK(!GlobalHandles::IsNearDeath(h2.location())); |
// Incremental marking potentially marked handles before they turned weak. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK((*h1)->IsString()); |
@@ -722,7 +719,6 @@ TEST(DeleteWeakGlobalHandle) { |
i::FLAG_stress_compaction = false; |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
GlobalHandles* global_handles = isolate->global_handles(); |
@@ -743,12 +739,12 @@ TEST(DeleteWeakGlobalHandle) { |
v8::WeakCallbackType::kParameter); |
// Scanvenge does not recognize weak reference. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(!WeakPointerCleared); |
// Mark-compact treats weak reference properly. |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
CHECK(WeakPointerCleared); |
} |
@@ -764,7 +760,7 @@ TEST(DoNotPromoteWhiteObjectsOnScavenge) { |
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white)))); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(heap->InNewSpace(*white)); |
} |
@@ -780,14 +776,15 @@ TEST(PromoteGreyOrBlackObjectsOnScavenge) { |
IncrementalMarking* marking = heap->incremental_marking(); |
marking->Stop(); |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
while ( |
Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) { |
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, |
IncrementalMarking::DO_NOT_FORCE_COMPLETION); |
} |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(!heap->InNewSpace(*marked)); |
} |
@@ -835,7 +832,7 @@ TEST(BytecodeArray) { |
// evacuation candidate. |
Page* evac_page = Page::FromAddress(constant_pool->address()); |
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// BytecodeArray should survive. |
CHECK_EQ(array->length(), kRawBytesSize); |
@@ -1305,14 +1302,20 @@ UNINITIALIZED_TEST(TestCodeFlushing) { |
CHECK(function->shared()->is_compiled()); |
// The code will survive at least two GCs. |
- i_isolate->heap()->CollectAllGarbage(); |
- i_isolate->heap()->CollectAllGarbage(); |
+ i_isolate->heap()->CollectAllGarbage( |
+ i::Heap::kFinalizeIncrementalMarkingMask, |
+ i::GarbageCollectionReason::kTesting); |
+ i_isolate->heap()->CollectAllGarbage( |
+ i::Heap::kFinalizeIncrementalMarkingMask, |
+ i::GarbageCollectionReason::kTesting); |
CHECK(function->shared()->is_compiled()); |
// Simulate several GCs that use full marking. |
const int kAgingThreshold = 6; |
for (int i = 0; i < kAgingThreshold; i++) { |
- i_isolate->heap()->CollectAllGarbage(); |
+ i_isolate->heap()->CollectAllGarbage( |
+ i::Heap::kFinalizeIncrementalMarkingMask, |
+ i::GarbageCollectionReason::kTesting); |
} |
// foo should no longer be in the compilation cache |
@@ -1358,12 +1361,12 @@ TEST(TestCodeFlushingPreAged) { |
CHECK(function->shared()->is_compiled()); |
// The code has been run so will survive at least one GC. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled()); |
// The code was only run once, so it should be pre-aged and collected on the |
// next GC. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
// Execute the function again twice, and ensure it is reset to the young age. |
@@ -1373,14 +1376,14 @@ TEST(TestCodeFlushingPreAged) { |
} |
// The code will survive at least two GC now that it is young again. |
- CcTest::heap()->CollectAllGarbage(); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled()); |
// Simulate several GCs that use full marking. |
const int kAgingThreshold = 6; |
for (int i = 0; i < kAgingThreshold; i++) { |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
// foo should no longer be in the compilation cache |
@@ -1423,15 +1426,15 @@ TEST(TestCodeFlushingIncremental) { |
CHECK(function->shared()->is_compiled()); |
// The code will survive at least two GCs. |
- CcTest::heap()->CollectAllGarbage(); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled()); |
// Simulate several GCs that use incremental marking. |
const int kAgingThreshold = 6; |
for (int i = 0; i < kAgingThreshold; i++) { |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
CHECK(!function->is_compiled() || function->IsOptimized()); |
@@ -1447,7 +1450,7 @@ TEST(TestCodeFlushingIncremental) { |
heap::SimulateIncrementalMarking(CcTest::heap()); |
if (!function->next_function_link()->IsUndefined(CcTest::i_isolate())) |
break; |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
// Force optimization while incremental marking is active and while |
@@ -1457,7 +1460,7 @@ TEST(TestCodeFlushingIncremental) { |
} |
// Simulate one final GC to make sure the candidate queue is sane. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled() || !function->IsOptimized()); |
CHECK(function->is_compiled() || !function->IsOptimized()); |
} |
@@ -1486,7 +1489,7 @@ TEST(TestCodeFlushingIncrementalScavenge) { |
Handle<String> bar_name = factory->InternalizeUtf8String("bar"); |
// Perfrom one initial GC to enable code flushing. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// This compile will add the code to the compilation cache. |
{ v8::HandleScope scope(CcTest::isolate()); |
@@ -1523,10 +1526,10 @@ TEST(TestCodeFlushingIncrementalScavenge) { |
// perform a scavenge while incremental marking is still running. |
heap::SimulateIncrementalMarking(CcTest::heap(), false); |
*function2.location() = NULL; |
- CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Simulate one final GC to make sure the candidate queue is sane. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
CHECK(!function->is_compiled() || function->IsOptimized()); |
} |
@@ -1563,8 +1566,8 @@ TEST(TestCodeFlushingIncrementalAbort) { |
CHECK(function->shared()->is_compiled()); |
// The code will survive at least two GCs. |
- heap->CollectAllGarbage(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled()); |
// Bump the code age so that flushing is triggered. |
@@ -1593,7 +1596,7 @@ TEST(TestCodeFlushingIncrementalAbort) { |
} |
// Simulate one final GC to make sure the candidate queue is sane. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(function->shared()->is_compiled() || !function->IsOptimized()); |
CHECK(function->is_compiled() || !function->IsOptimized()); |
} |
@@ -1649,7 +1652,6 @@ TEST(CompilationCacheCachingBehavior) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
Factory* factory = isolate->factory(); |
- Heap* heap = isolate->heap(); |
CompilationCache* compilation_cache = isolate->compilation_cache(); |
LanguageMode language_mode = construct_language_mode(FLAG_use_strict); |
@@ -1680,7 +1682,7 @@ TEST(CompilationCacheCachingBehavior) { |
// (Unless --optimize-for-size, in which case it might get collected |
// immediately.) |
if (!FLAG_optimize_for_size) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
info = compilation_cache->LookupScript( |
source, Handle<Object>(), 0, 0, |
v8::ScriptOriginOptions(false, true, false), native_context, |
@@ -1698,7 +1700,7 @@ TEST(CompilationCacheCachingBehavior) { |
info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY); |
} |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Ensure code aging cleared the entry from the cache. |
info = compilation_cache->LookupScript( |
source, Handle<Object>(), 0, 0, |
@@ -1761,7 +1763,6 @@ TEST(TestInternalWeakLists) { |
static const int kNumTestContexts = 10; |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
HandleScope scope(isolate); |
v8::Local<v8::Context> ctx[kNumTestContexts]; |
if (!isolate->use_crankshaft()) return; |
@@ -1775,7 +1776,7 @@ TEST(TestInternalWeakLists) { |
// Collect garbage that might have been created by one of the |
// installed extensions. |
isolate->compilation_cache()->Clear(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(i + 1, CountNativeContexts()); |
@@ -1801,29 +1802,29 @@ TEST(TestInternalWeakLists) { |
// Scavenge treats these references as strong. |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); |
} |
// Mark compact handles the weak references. |
isolate->compilation_cache()->Clear(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
// Get rid of f3 and f5 in the same way. |
CompileRun("f3=null"); |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
} |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); |
CompileRun("f5=null"); |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); |
} |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i])); |
ctx[i]->Exit(); |
@@ -1831,7 +1832,7 @@ TEST(TestInternalWeakLists) { |
// Force compilation cache cleanup. |
CcTest::heap()->NotifyContextDisposed(true); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Dispose the native contexts one by one. |
for (int i = 0; i < kNumTestContexts; i++) { |
@@ -1842,12 +1843,12 @@ TEST(TestInternalWeakLists) { |
// Scavenge treats these references as strong. |
for (int j = 0; j < 10; j++) { |
- CcTest::heap()->CollectGarbage(i::NEW_SPACE); |
+ CcTest::CollectGarbage(i::NEW_SPACE); |
CHECK_EQ(kNumTestContexts - i, CountNativeContexts()); |
} |
// Mark compact handles the weak references. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts()); |
} |
@@ -1863,7 +1864,8 @@ static int CountNativeContextsWithGC(Isolate* isolate, int n) { |
Handle<Object> object(heap->native_contexts_list(), isolate); |
while (!object->IsUndefined(isolate)) { |
count++; |
- if (count == n) heap->CollectAllGarbage(); |
+ if (count == n) |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
object = |
Handle<Object>(Context::cast(*object)->next_context_link(), isolate); |
} |
@@ -1884,7 +1886,10 @@ static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context, |
while (object->IsJSFunction() && |
!Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) { |
count++; |
- if (count == n) isolate->heap()->CollectAllGarbage(); |
+ if (count == n) |
+ isolate->heap()->CollectAllGarbage( |
+ i::Heap::kFinalizeIncrementalMarkingMask, |
+ i::GarbageCollectionReason::kTesting); |
object = Handle<Object>( |
Object::cast(JSFunction::cast(*object)->next_function_link()), |
isolate); |
@@ -1967,7 +1972,7 @@ TEST(TestSizeOfRegExpCode) { |
// Get initial heap size after several full GCs, which will stabilize |
// the heap size and return with sweeping finished completely. |
- CcTest::heap()->CollectAllAvailableGarbage("initial cleanup"); |
+ CcTest::CollectAllAvailableGarbage(); |
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); |
if (collector->sweeping_in_progress()) { |
collector->EnsureSweepingCompleted(); |
@@ -1975,11 +1980,11 @@ TEST(TestSizeOfRegExpCode) { |
int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); |
CompileRun("'foo'.match(reg_exp_source);"); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); |
CompileRun("'foo'.match(half_size_reg_exp);"); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
int size_with_optimized_regexp = |
static_cast<int>(CcTest::heap()->SizeOfObjects()); |
@@ -2005,7 +2010,7 @@ HEAP_TEST(TestSizeOfObjects) { |
// Get initial heap size after several full GCs, which will stabilize |
// the heap size and return with sweeping finished completely. |
- heap->CollectAllAvailableGarbage("initial cleanup"); |
+ CcTest::CollectAllAvailableGarbage(); |
if (collector->sweeping_in_progress()) { |
collector->EnsureSweepingCompleted(); |
} |
@@ -2026,7 +2031,7 @@ HEAP_TEST(TestSizeOfObjects) { |
// The heap size should go back to initial size after a full GC, even |
// though sweeping didn't finish yet. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Normally sweeping would not be complete here, but no guarantees. |
CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); |
// Waiting for sweeper threads should not change heap size. |
@@ -2400,7 +2405,7 @@ TEST(GrowAndShrinkNewSpace) { |
CHECK(old_capacity == new_capacity); |
// Let the scavenger empty the new space. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK_LE(new_space->Size(), old_capacity); |
// Explicitly shrinking should halve the space capacity. |
@@ -2434,7 +2439,7 @@ TEST(CollectingAllAvailableGarbageShrinksNewSpace) { |
new_capacity = new_space->TotalCapacity(); |
CHECK(2 * old_capacity == new_capacity); |
FillUpNewSpace(new_space); |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
new_capacity = new_space->TotalCapacity(); |
CHECK(old_capacity == new_capacity); |
} |
@@ -2465,7 +2470,7 @@ TEST(LeakNativeContextViaMap) { |
v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(2, NumberOfGlobalObjects()); |
{ |
@@ -2491,10 +2496,10 @@ TEST(LeakNativeContextViaMap) { |
ctx1p.Reset(); |
isolate->ContextDisposedNotification(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(1, NumberOfGlobalObjects()); |
ctx2p.Reset(); |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(0, NumberOfGlobalObjects()); |
} |
@@ -2514,7 +2519,7 @@ TEST(LeakNativeContextViaFunction) { |
v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(2, NumberOfGlobalObjects()); |
{ |
@@ -2540,10 +2545,10 @@ TEST(LeakNativeContextViaFunction) { |
ctx1p.Reset(); |
isolate->ContextDisposedNotification(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(1, NumberOfGlobalObjects()); |
ctx2p.Reset(); |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(0, NumberOfGlobalObjects()); |
} |
@@ -2561,7 +2566,7 @@ TEST(LeakNativeContextViaMapKeyed) { |
v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(2, NumberOfGlobalObjects()); |
{ |
@@ -2587,10 +2592,10 @@ TEST(LeakNativeContextViaMapKeyed) { |
ctx1p.Reset(); |
isolate->ContextDisposedNotification(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(1, NumberOfGlobalObjects()); |
ctx2p.Reset(); |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(0, NumberOfGlobalObjects()); |
} |
@@ -2608,7 +2613,7 @@ TEST(LeakNativeContextViaMapProto) { |
v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(2, NumberOfGlobalObjects()); |
{ |
@@ -2638,10 +2643,10 @@ TEST(LeakNativeContextViaMapProto) { |
ctx1p.Reset(); |
isolate->ContextDisposedNotification(); |
} |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(1, NumberOfGlobalObjects()); |
ctx2p.Reset(); |
- CcTest::heap()->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(0, NumberOfGlobalObjects()); |
} |
@@ -2672,7 +2677,8 @@ TEST(InstanceOfStubWriteBarrier) { |
IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
marking->Stop(); |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( |
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( |
@@ -2699,7 +2705,7 @@ TEST(InstanceOfStubWriteBarrier) { |
} |
CcTest::heap()->incremental_marking()->set_should_hurry(true); |
- CcTest::heap()->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
namespace { |
@@ -2745,11 +2751,12 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) { |
// Make sure incremental marking it not running. |
CcTest::heap()->incremental_marking()->Stop(); |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
// The following calls will increment CcTest::heap()->global_ic_age(). |
CcTest::isolate()->ContextDisposedNotification(); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); |
CHECK_EQ(0, f->shared()->opt_count()); |
@@ -2793,7 +2800,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) { |
// The following two calls will increment CcTest::heap()->global_ic_age(). |
CcTest::isolate()->ContextDisposedNotification(); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); |
CHECK_EQ(0, f->shared()->opt_count()); |
@@ -2810,7 +2817,7 @@ HEAP_TEST(GCFlags) { |
// Set the flags to check whether we appropriately resets them after the GC. |
heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); |
- heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); |
+ CcTest::CollectAllGarbage(Heap::kReduceMemoryFootprintMask); |
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
MarkCompactCollector* collector = heap->mark_compact_collector(); |
@@ -2820,14 +2827,15 @@ HEAP_TEST(GCFlags) { |
IncrementalMarking* marking = heap->incremental_marking(); |
marking->Stop(); |
- heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); |
+ heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask, |
+ i::GarbageCollectionReason::kTesting); |
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// NewSpace scavenges should not overwrite the flags. |
CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
- heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
} |
@@ -2839,7 +2847,8 @@ TEST(IdleNotificationFinishMarking) { |
heap::SimulateFullSpace(CcTest::heap()->old_space()); |
IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
marking->Stop(); |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count); |
@@ -3361,7 +3370,7 @@ TEST(Regress1465) { |
CHECK_EQ(transitions_count, transitions_before); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Count number of live transitions after marking. Note that one transition |
// is left, because 'o' still holds an instance of one transition target. |
@@ -3425,7 +3434,7 @@ TEST(TransitionArrayShrinksDuringAllocToZero) { |
"root = new F"); |
root = GetByName("root"); |
AddPropertyTo(2, root, "funny"); |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Count number of live transitions after marking. Note that one transition |
// is left, because 'o' still holds an instance of one transition target. |
@@ -3452,7 +3461,7 @@ TEST(TransitionArrayShrinksDuringAllocToOne) { |
root = GetByName("root"); |
AddPropertyTo(2, root, "funny"); |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Count number of live transitions after marking. Note that one transition |
// is left, because 'o' still holds an instance of one transition target. |
@@ -3479,7 +3488,7 @@ TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) { |
root = GetByName("root"); |
AddPropertyTo(0, root, "prop9"); |
- CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
// Count number of live transitions after marking. Note that one transition |
// is left, because 'o' still holds an instance of one transition target. |
@@ -3545,7 +3554,7 @@ TEST(Regress2143a) { |
CcTest::heap()->AgeInlineCaches(); |
// Explicitly request GC to perform final marking step and sweeping. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( |
CcTest::global() |
@@ -3588,7 +3597,7 @@ TEST(Regress2143b) { |
CcTest::heap()->AgeInlineCaches(); |
// Explicitly request GC to perform final marking step and sweeping. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( |
CcTest::global() |
@@ -3639,17 +3648,14 @@ TEST(ReleaseOverReservedPages) { |
// Triggering one GC will cause a lot of garbage to be discovered but |
// even spread across all allocated pages. |
- heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
- "triggered for preparation"); |
+ CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask); |
CHECK_GE(overall_page_count, old_space->CountTotalPages()); |
// Triggering subsequent GCs should cause at least half of the pages |
// to be released to the OS after at most two cycles. |
- heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
- "triggered by test 1"); |
+ CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask); |
CHECK_GE(overall_page_count, old_space->CountTotalPages()); |
- heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, |
- "triggered by test 2"); |
+ CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask); |
CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2); |
// Triggering a last-resort GC should cause all pages to be released to the |
@@ -3659,7 +3665,7 @@ TEST(ReleaseOverReservedPages) { |
// first page should be small in order to reduce memory used when the VM |
// boots, but if the 20 small arrays don't fit on the first page then that's |
// an indication that it is too small. |
- heap->CollectAllAvailableGarbage("triggered really hard"); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK_EQ(initial_page_count, old_space->CountTotalPages()); |
} |
@@ -3744,7 +3750,7 @@ TEST(IncrementalMarkingPreservesMonomorphicCallIC) { |
CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell()); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1))) |
->cleared()); |
@@ -3801,7 +3807,7 @@ TEST(IncrementalMarkingPreservesMonomorphicConstructor) { |
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); |
} |
@@ -3822,7 +3828,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) { |
CheckVectorIC(f, 0, MONOMORPHIC); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CheckVectorIC(f, 0, MONOMORPHIC); |
} |
@@ -3859,7 +3865,7 @@ TEST(IncrementalMarkingPreservesPolymorphicIC) { |
// Fire context dispose notification. |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CheckVectorIC(f, 0, POLYMORPHIC); |
} |
@@ -3897,7 +3903,7 @@ TEST(ContextDisposeDoesntClearPolymorphicIC) { |
// Fire context dispose notification. |
CcTest::isolate()->ContextDisposedNotification(); |
heap::SimulateIncrementalMarking(CcTest::heap()); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CheckVectorIC(f, 0, POLYMORPHIC); |
} |
@@ -3939,7 +3945,8 @@ void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source, |
v8::Local<v8::Context> ctx = isolate->GetCurrentContext(); |
v8::Local<v8::String> source_string = |
v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked(); |
- i_isolate->heap()->CollectAllAvailableGarbage(); |
+ i_isolate->heap()->CollectAllAvailableGarbage( |
+ i::GarbageCollectionReason::kTesting); |
v8::Script::Compile(ctx, source_string) |
.ToLocalChecked() |
->Run(ctx) |
@@ -3950,7 +3957,8 @@ void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source, |
CHECK(!resource->IsDisposed()); |
CompileRun(accessor); |
- i_isolate->heap()->CollectAllAvailableGarbage(); |
+ i_isolate->heap()->CollectAllAvailableGarbage( |
+ i::GarbageCollectionReason::kTesting); |
// External source has been released. |
CHECK(resource->IsDisposed()); |
@@ -4026,7 +4034,7 @@ TEST(Regress159140) { |
HandleScope scope(isolate); |
// Perform one initial GC to enable code flushing. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Prepare several closures that are all eligible for code flushing |
// because all reachable ones are not optimized. Make sure that the |
@@ -4070,7 +4078,7 @@ TEST(Regress159140) { |
// finish the GC to complete code flushing. |
heap::SimulateIncrementalMarking(heap); |
CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Unoptimized code is missing and the deoptimizer will go ballistic. |
CompileRun("g('bozo');"); |
@@ -4085,7 +4093,7 @@ TEST(Regress165495) { |
HandleScope scope(isolate); |
// Perform one initial GC to enable code flushing. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Prepare an optimized closure that the optimized code map will get |
// populated. Then age the unoptimized code to trigger code flushing |
@@ -4115,7 +4123,7 @@ TEST(Regress165495) { |
// Simulate incremental marking so that unoptimized code is flushed |
// even though it still is cached in the optimized code map. |
heap::SimulateIncrementalMarking(heap); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Make a new closure that will get code installed from the code map. |
// Unoptimized code is missing and the deoptimizer will go ballistic. |
@@ -4133,7 +4141,7 @@ TEST(Regress169209) { |
HandleScope scope(isolate); |
// Perform one initial GC to enable code flushing. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Prepare a shared function info eligible for code flushing for which |
// the unoptimized code will be replaced during optimization. |
@@ -4190,7 +4198,7 @@ TEST(Regress169209) { |
"g(false);"); |
// Finish garbage collection cycle. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(shared1->code()->gc_metadata() == NULL); |
} |
@@ -4231,7 +4239,7 @@ TEST(Regress169928) { |
.FromJust()); |
// First make sure we flip spaces |
- CcTest::heap()->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Allocate the object. |
Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); |
@@ -4341,7 +4349,7 @@ TEST(Regress514122) { |
HandleScope scope(isolate); |
// Perfrom one initial GC to enable code flushing. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Prepare function whose optimized code map we can use. |
Handle<SharedFunctionInfo> shared; |
@@ -4419,7 +4427,7 @@ TEST(Regress514122) { |
} |
// Trigger a GC to flush out the bug. |
- heap->CollectGarbage(i::OLD_SPACE, "fire in the hole"); |
+ CcTest::CollectGarbage(i::OLD_SPACE); |
boomer->Print(); |
} |
@@ -4433,7 +4441,6 @@ TEST(OptimizedCodeMapReuseEntries) { |
CcTest::InitializeVM(); |
v8::Isolate* v8_isolate = CcTest::isolate(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
HandleScope scope(isolate); |
// Create 3 contexts, allow the 2nd one to be disposed, and verify that |
@@ -4449,7 +4456,7 @@ TEST(OptimizedCodeMapReuseEntries) { |
.ToLocalChecked(); |
const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);"; |
// Perfrom one initial GC to enable code flushing. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
c1->Enter(); |
indep->BindToCurrentContext()->Run(c1).ToLocalChecked(); |
@@ -4483,7 +4490,7 @@ TEST(OptimizedCodeMapReuseEntries) { |
// Now, collect garbage. Context c2 should have no roots to it, and it's |
// entry in the optimized code map should be free for a new context. |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
Handle<FixedArray> optimized_code_map = |
@@ -4538,11 +4545,10 @@ TEST(Regress513496) { |
i::FLAG_allow_natives_syntax = true; |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
HandleScope scope(isolate); |
// Perfrom one initial GC to enable code flushing. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Prepare an optimized closure with containing an inlined function. Then age |
// the inlined unoptimized code to trigger code flushing but make sure the |
@@ -4586,7 +4592,7 @@ TEST(Regress513496) { |
// Finish a full GC cycle so that the unoptimized code of 'g' is flushed even |
// though the optimized code for 'f' is reachable via the optimized code map. |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Make a new closure that will get code installed from the code map. |
// Unoptimized code is missing and the deoptimizer will go ballistic. |
@@ -4629,7 +4635,7 @@ TEST(LargeObjectSlotRecording) { |
} |
// Move the evaucation candidate object. |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
// Verify that the pointers in the large object got updated. |
for (int i = 0; i < size; i += kStep) { |
@@ -4676,7 +4682,8 @@ TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) { |
"f(10 * 1024 * 1024);"); |
IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
if (marking->IsStopped()) { |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking( |
+ i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting); |
} |
// This big step should be sufficient to mark the whole array. |
marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, |
@@ -4787,7 +4794,7 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) { |
// Now make sure that a gc should get rid of the function, even though we |
// still have the allocation site alive. |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
// The site still exists because of our global handle, but the code is no |
@@ -4835,7 +4842,7 @@ TEST(CellsInOptimizedCodeAreWeak) { |
// Now make sure that a gc should get rid of the function |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
CHECK(code->marked_for_deoptimization()); |
@@ -4878,7 +4885,7 @@ TEST(ObjectsInOptimizedCodeAreWeak) { |
// Now make sure that a gc should get rid of the function |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
CHECK(code->marked_for_deoptimization()); |
@@ -4928,8 +4935,8 @@ TEST(NewSpaceObjectsInOptimizedCode) { |
.ToLocalChecked()))); |
CHECK(heap->InNewSpace(*foo)); |
- heap->CollectGarbage(NEW_SPACE); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(!heap->InNewSpace(*foo)); |
#ifdef VERIFY_HEAP |
heap->Verify(); |
@@ -4940,7 +4947,7 @@ TEST(NewSpaceObjectsInOptimizedCode) { |
// Now make sure that a gc should get rid of the function |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
CHECK(code->marked_for_deoptimization()); |
@@ -4963,7 +4970,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) { |
// Get a clean slate regarding optimized functions on the heap. |
i::Deoptimizer::DeoptimizeAll(isolate); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
if (!isolate->use_crankshaft()) return; |
HandleScope outer_scope(heap->isolate()); |
@@ -4987,7 +4994,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) { |
CompileRun(source.start()); |
} |
// We have to abort incremental marking here to abandon black pages. |
- heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
} |
int elements = 0; |
if (heap->weak_object_to_code_table()->IsHashTable()) { |
@@ -5036,7 +5043,7 @@ TEST(NextCodeLinkIsWeak) { |
if (!isolate->use_crankshaft()) return; |
HandleScope outer_scope(heap->isolate()); |
Handle<Code> code; |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
int code_chain_length_before, code_chain_length_after; |
{ |
HandleScope scope(heap->isolate()); |
@@ -5050,7 +5057,7 @@ TEST(NextCodeLinkIsWeak) { |
code = scope.CloseAndEscape(Handle<Code>(immortal->code())); |
CompileRun("mortal = null; immortal = null;"); |
} |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
// Now mortal code should be dead. |
code_chain_length_after = GetCodeChainLength(*code); |
CHECK_EQ(code_chain_length_before - 1, code_chain_length_after); |
@@ -5081,7 +5088,7 @@ TEST(NextCodeLinkIsWeak2) { |
if (!isolate->use_crankshaft()) return; |
HandleScope outer_scope(heap->isolate()); |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate); |
Handle<Code> new_head; |
Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate); |
@@ -5094,7 +5101,7 @@ TEST(NextCodeLinkIsWeak2) { |
context->set(Context::OPTIMIZED_CODE_LIST, *immortal); |
new_head = scope.CloseAndEscape(immortal); |
} |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
// Now mortal code should be dead. |
CHECK_EQ(*old_head, new_head->next_code_link()); |
} |
@@ -5144,8 +5151,7 @@ TEST(WeakFunctionInConstructor) { |
} |
weak_ic_cleared = false; |
garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); |
- Heap* heap = CcTest::i_isolate()->heap(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(weak_ic_cleared); |
// We've determined the constructor in createObj has had it's weak cell |
@@ -5157,7 +5163,7 @@ TEST(WeakFunctionInConstructor) { |
Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); |
CHECK(slot_value->IsWeakCell()); |
if (WeakCell::cast(slot_value)->cleared()) break; |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); |
@@ -5187,8 +5193,7 @@ void CheckWeakness(const char* source) { |
} |
weak_ic_cleared = false; |
garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); |
- Heap* heap = CcTest::i_isolate()->heap(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(weak_ic_cleared); |
} |
@@ -5382,7 +5387,6 @@ TEST(MonomorphicStaysMonomorphicAfterGC) { |
if (FLAG_always_opt) return; |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
v8::HandleScope scope(CcTest::isolate()); |
CompileRun( |
"function loadIC(obj) {" |
@@ -5401,7 +5405,7 @@ TEST(MonomorphicStaysMonomorphicAfterGC) { |
v8::HandleScope scope(CcTest::isolate()); |
CompileRun("(testIC())"); |
} |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC); |
{ |
v8::HandleScope scope(CcTest::isolate()); |
@@ -5415,7 +5419,6 @@ TEST(PolymorphicStaysPolymorphicAfterGC) { |
if (FLAG_always_opt) return; |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
v8::HandleScope scope(CcTest::isolate()); |
CompileRun( |
"function loadIC(obj) {" |
@@ -5437,7 +5440,7 @@ TEST(PolymorphicStaysPolymorphicAfterGC) { |
v8::HandleScope scope(CcTest::isolate()); |
CompileRun("(testIC())"); |
} |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC); |
{ |
v8::HandleScope scope(CcTest::isolate()); |
@@ -5450,7 +5453,6 @@ TEST(PolymorphicStaysPolymorphicAfterGC) { |
TEST(WeakCell) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- v8::internal::Heap* heap = CcTest::heap(); |
v8::internal::Factory* factory = isolate->factory(); |
HandleScope outer_scope(isolate); |
@@ -5469,13 +5471,13 @@ TEST(WeakCell) { |
} |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(weak_cell1->value()->IsFixedArray()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK(weak_cell1->cleared()); |
CHECK_EQ(*survivor, weak_cell2->value()); |
} |
@@ -5500,18 +5502,19 @@ TEST(WeakCellsWithIncrementalMarking) { |
CHECK(weak_cell->value()->IsFixedArray()); |
IncrementalMarking* marking = heap->incremental_marking(); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD, |
IncrementalMarking::FORCE_COMPLETION); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
CHECK(weak_cell->value()->IsFixedArray()); |
weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); |
} |
// Call collect all twice to make sure that we also cleared |
// weak cells that were allocated on black pages. |
- heap->CollectAllGarbage(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK_EQ(*survivor, weak_cells[0]->value()); |
for (int i = 1; i < N; i++) { |
CHECK(weak_cells[i]->cleared()); |
@@ -5558,7 +5561,7 @@ TEST(AddInstructionChangesNewSpacePromotion) { |
heap->DisableInlineAllocation(); |
heap->set_allocation_timeout(1); |
g->Call(env.local(), global, 1, args1).ToLocalChecked(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
@@ -5619,7 +5622,7 @@ UNINITIALIZED_TEST(Regress538257) { |
->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); |
} |
heap::SimulateFullSpace(old_space); |
- heap->CollectGarbage(OLD_SPACE); |
+ heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting); |
// If we get this far, we've successfully aborted compaction. Any further |
// allocations might trigger OOM. |
} |
@@ -5715,8 +5718,8 @@ UNINITIALIZED_TEST(PromotionQueue) { |
CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); |
// Call the scavenger two times to get an empty new space |
- heap->CollectGarbage(NEW_SPACE); |
- heap->CollectGarbage(NEW_SPACE); |
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); |
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); |
// First create a few objects which will survive a scavenge, and will get |
// promoted to the old generation later on. These objects will create |
@@ -5727,7 +5730,7 @@ UNINITIALIZED_TEST(PromotionQueue) { |
handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); |
} |
- heap->CollectGarbage(NEW_SPACE); |
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); |
CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); |
// Fill-up the first semi-space page. |
@@ -5744,7 +5747,7 @@ UNINITIALIZED_TEST(PromotionQueue) { |
// This scavenge will corrupt memory if the promotion queue is not |
// evacuated. |
- heap->CollectGarbage(NEW_SPACE); |
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); |
} |
isolate->Dispose(); |
} |
@@ -5787,7 +5790,8 @@ TEST(Regress388880) { |
// that would cause crash. |
IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
marking->Stop(); |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
CHECK(marking->IsMarking()); |
// Now everything is set up for crashing in JSObject::MigrateFastToFast() |
@@ -5813,7 +5817,8 @@ TEST(Regress3631) { |
"}" |
"weak_map"); |
if (marking->IsStopped()) { |
- CcTest::heap()->StartIncrementalMarking(); |
+ CcTest::heap()->StartIncrementalMarking( |
+ i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting); |
} |
// Incrementally mark the backing store. |
Handle<JSReceiver> obj = |
@@ -5833,14 +5838,13 @@ TEST(Regress3631) { |
" weak_map.set(future_keys[i], i);" |
"}"); |
heap->incremental_marking()->set_should_hurry(true); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
TEST(Regress442710) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
HandleScope sc(isolate); |
@@ -5851,7 +5855,7 @@ TEST(Regress442710) { |
Handle<String> name = factory->InternalizeUtf8String("testArray"); |
JSReceiver::SetProperty(global, name, array, SLOPPY).Check(); |
CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();"); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
@@ -5869,7 +5873,6 @@ HEAP_TEST(NumberStringCacheSize) { |
TEST(Regress3877) { |
CcTest::InitializeVM(); |
Isolate* isolate = CcTest::i_isolate(); |
- Heap* heap = isolate->heap(); |
Factory* factory = isolate->factory(); |
HandleScope scope(isolate); |
CompileRun("function cls() { this.x = 10; }"); |
@@ -5887,14 +5890,14 @@ TEST(Regress3877) { |
"a.x = new cls();" |
"cls.prototype = null;"); |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
// The map of a.x keeps prototype alive |
CHECK(!weak_prototype->cleared()); |
// Change the map of a.x and make the previous map garbage collectable. |
CompileRun("a.x.__proto__ = {};"); |
for (int i = 0; i < 4; i++) { |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
} |
CHECK(weak_prototype->cleared()); |
} |
@@ -5921,11 +5924,11 @@ void CheckMapRetainingFor(int n) { |
CHECK(!weak_cell->cleared()); |
for (int i = 0; i < n; i++) { |
heap::SimulateIncrementalMarking(heap); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
CHECK(!weak_cell->cleared()); |
heap::SimulateIncrementalMarking(heap); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
CHECK(weak_cell->cleared()); |
} |
@@ -5950,14 +5953,14 @@ TEST(RegressArrayListGC) { |
Heap* heap = isolate->heap(); |
AddRetainedMap(isolate, heap); |
Handle<Map> map = Map::Create(isolate, 1); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
// Force GC in old space on next addition of retained map. |
Map::WeakCellForMap(map); |
heap::SimulateFullSpace(CcTest::heap()->new_space()); |
for (int i = 0; i < 10; i++) { |
heap->AddRetainedMap(map); |
} |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
@@ -6008,7 +6011,7 @@ static void TestRightTrimFixedTypedArray(i::ExternalArrayType type, |
Address next_obj_address = array->address() + array->size(); |
CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller()); |
} |
- heap->CollectAllAvailableGarbage(); |
+ CcTest::CollectAllAvailableGarbage(); |
} |
@@ -6065,7 +6068,7 @@ TEST(PreprocessStackTrace) { |
Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); |
CHECK(code->IsCode()); |
- isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing"); |
+ CcTest::CollectAllAvailableGarbage(); |
Handle<Object> pos = |
Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); |
@@ -6118,7 +6121,7 @@ TEST(BootstrappingExports) { |
utils.SetWeak(&utils, UtilsHasBeenCollected, |
v8::WeakCallbackType::kParameter); |
- CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks"); |
+ CcTest::CollectAllAvailableGarbage(); |
CHECK(utils_has_been_collected); |
} |
@@ -6176,12 +6179,12 @@ TEST(NewSpaceAllocationCounter) { |
Isolate* isolate = CcTest::i_isolate(); |
Heap* heap = isolate->heap(); |
size_t counter1 = heap->NewSpaceAllocationCounter(); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
const size_t kSize = 1024; |
AllocateInSpace(isolate, kSize, NEW_SPACE); |
size_t counter2 = heap->NewSpaceAllocationCounter(); |
CHECK_EQ(kSize, counter2 - counter1); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
size_t counter3 = heap->NewSpaceAllocationCounter(); |
CHECK_EQ(0U, counter3 - counter2); |
// Test counter overflow. |
@@ -6203,18 +6206,18 @@ TEST(OldSpaceAllocationCounter) { |
Isolate* isolate = CcTest::i_isolate(); |
Heap* heap = isolate->heap(); |
size_t counter1 = heap->OldGenerationAllocationCounter(); |
- heap->CollectGarbage(NEW_SPACE); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
const size_t kSize = 1024; |
AllocateInSpace(isolate, kSize, OLD_SPACE); |
size_t counter2 = heap->OldGenerationAllocationCounter(); |
// TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. |
CHECK_LE(kSize, counter2 - counter1); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
size_t counter3 = heap->OldGenerationAllocationCounter(); |
CHECK_EQ(0u, counter3 - counter2); |
AllocateInSpace(isolate, kSize, OLD_SPACE); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
size_t counter4 = heap->OldGenerationAllocationCounter(); |
CHECK_LE(kSize, counter4 - counter3); |
// Test counter overflow. |
@@ -6289,7 +6292,7 @@ static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) { |
fun->ReplaceCode(*isolate->builtins()->CompileLazy()); |
fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy()); |
fun->shared()->ClearBytecodeArray(); // Bytecode is code too. |
- isolate->heap()->CollectAllAvailableGarbage("remove code and gc"); |
+ CcTest::CollectAllAvailableGarbage(); |
} |
@@ -6376,7 +6379,7 @@ TEST(ScriptIterator) { |
Heap* heap = CcTest::heap(); |
LocalContext context; |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
int script_count = 0; |
{ |
@@ -6402,8 +6405,8 @@ TEST(SharedFunctionInfoIterator) { |
Heap* heap = CcTest::heap(); |
LocalContext context; |
- heap->CollectAllGarbage(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
int sfi_count = 0; |
{ |
@@ -6443,7 +6446,7 @@ TEST(Regress519319) { |
child.Reset(isolate, v8::Object::New(isolate)); |
heap::SimulateFullSpace(heap->old_space()); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
{ |
UniqueId id = MakeUniqueId(parent); |
isolate->SetObjectGroupId(parent, id); |
@@ -6452,8 +6455,10 @@ TEST(Regress519319) { |
// The CollectGarbage call above starts sweeper threads. |
// The crash will happen if the following two functions |
// are called before sweeping finishes. |
- heap->StartIncrementalMarking(); |
- heap->FinalizeIncrementalMarkingIfComplete("test"); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
+ heap->FinalizeIncrementalMarkingIfComplete( |
+ i::GarbageCollectionReason::kTesting); |
} |
@@ -6500,7 +6505,7 @@ HEAP_TEST(Regress587004) { |
for (int i = 0; i < N; i++) { |
array->set(i, *number); |
} |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
heap::SimulateFullSpace(heap->old_space()); |
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1); |
heap->mark_compact_collector()->EnsureSweepingCompleted(); |
@@ -6516,7 +6521,7 @@ HEAP_TEST(Regress587004) { |
} |
// Re-enable old space expansion to avoid OOM crash. |
heap->set_force_oom(false); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
} |
HEAP_TEST(Regress589413) { |
@@ -6528,8 +6533,8 @@ HEAP_TEST(Regress589413) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
// Get the heap in clean state. |
- heap->CollectGarbage(OLD_SPACE); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
Isolate* isolate = CcTest::i_isolate(); |
Factory* factory = isolate->factory(); |
// Fill the new space with byte arrays with elements looking like pointers. |
@@ -6543,7 +6548,7 @@ HEAP_TEST(Regress589413) { |
handle(byte_array); |
} |
// Make sure the byte arrays will be promoted on the next GC. |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// This number is close to large free list category threshold. |
const int N = 0x3eee; |
{ |
@@ -6591,7 +6596,7 @@ HEAP_TEST(Regress589413) { |
} |
// Force allocation from the free list. |
heap->set_force_oom(true); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
TEST(Regress598319) { |
@@ -6632,7 +6637,7 @@ TEST(Regress598319) { |
CHECK_NOT_NULL(page); |
// GC to cleanup state |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
MarkCompactCollector* collector = heap->mark_compact_collector(); |
if (collector->sweeping_in_progress()) { |
collector->EnsureSweepingCompleted(); |
@@ -6649,7 +6654,8 @@ TEST(Regress598319) { |
IncrementalMarking* marking = heap->incremental_marking(); |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
@@ -6715,7 +6721,7 @@ TEST(Regress615489) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
Isolate* isolate = heap->isolate(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
i::IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6724,7 +6730,8 @@ TEST(Regress615489) { |
} |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
marking->StartBlackAllocationForTesting(); |
@@ -6742,7 +6749,7 @@ TEST(Regress615489) { |
} |
CHECK(marking->IsComplete()); |
intptr_t size_before = heap->SizeOfObjects(); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
intptr_t size_after = heap->SizeOfObjects(); |
// Live size does not increase after garbage collection. |
CHECK_LE(size_after, size_before); |
@@ -6770,8 +6777,8 @@ TEST(Regress631969) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
// Get the heap in clean state. |
- heap->CollectGarbage(OLD_SPACE); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
Isolate* isolate = CcTest::i_isolate(); |
Factory* factory = isolate->factory(); |
// Allocate two strings in a fresh page and mark the page as evacuation |
@@ -6788,8 +6795,8 @@ TEST(Regress631969) { |
heap::SimulateFullSpace(heap->old_space()); |
Handle<String> s3; |
factory->NewConsString(s1, s2).ToHandle(&s3); |
- heap->CollectGarbage(NEW_SPACE); |
- heap->CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
+ CcTest::CollectGarbage(NEW_SPACE); |
// Finish incremental marking. |
IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6804,7 +6811,7 @@ TEST(Regress631969) { |
{ |
StaticOneByteResource external_string("12345678901234"); |
s3->MakeExternal(&external_string); |
- heap->CollectGarbage(OLD_SPACE); |
+ CcTest::CollectGarbage(OLD_SPACE); |
} |
} |
@@ -6814,7 +6821,7 @@ TEST(LeftTrimFixedArrayInBlackArea) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
Isolate* isolate = heap->isolate(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
i::IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6823,7 +6830,8 @@ TEST(LeftTrimFixedArrayInBlackArea) { |
} |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
marking->StartBlackAllocationForTesting(); |
@@ -6851,7 +6859,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
Isolate* isolate = heap->isolate(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
i::IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6860,7 +6868,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { |
} |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
marking->StartBlackAllocationForTesting(); |
@@ -6915,7 +6924,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { |
v8::HandleScope scope(CcTest::isolate()); |
Heap* heap = CcTest::heap(); |
Isolate* isolate = heap->isolate(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
i::IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6924,7 +6933,8 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { |
} |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
marking->StartBlackAllocationForTesting(); |
@@ -6973,7 +6983,7 @@ TEST(SlotFilteringAfterBlackAreas) { |
Heap* heap = CcTest::heap(); |
Isolate* isolate = heap->isolate(); |
MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector(); |
- heap->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
i::IncrementalMarking* marking = heap->incremental_marking(); |
@@ -6982,7 +6992,8 @@ TEST(SlotFilteringAfterBlackAreas) { |
} |
CHECK(marking->IsMarking() || marking->IsStopped()); |
if (marking->IsStopped()) { |
- heap->StartIncrementalMarking(); |
+ heap->StartIncrementalMarking(i::Heap::kNoGCFlags, |
+ i::GarbageCollectionReason::kTesting); |
} |
CHECK(marking->IsMarking()); |
marking->StartBlackAllocationForTesting(); |
@@ -7042,7 +7053,7 @@ TEST(UncommitUnusedLargeObjectMemory) { |
array->Shrink(1); |
CHECK(array->Size() < size_before); |
- CcTest::heap()->CollectAllGarbage(); |
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); |
CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before); |
size_t shrinked_size = |
RoundUp((array->address() - chunk->address()) + array->Size(), |