Chromium Code Reviews| Index: src/heap/mark-compact.cc |
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
| index fcc99c3b6bee028963e3692d9a4304073d7a9aa2..5d561b20803ad2fbe428ca0eefb8af265d89d5c9 100644 |
| --- a/src/heap/mark-compact.cc |
| +++ b/src/heap/mark-compact.cc |
| @@ -311,13 +311,13 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) { |
| void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_STORE_BUFFER_CLEAR); |
| + GCTracer::Scope::MC_CLEAR_STORE_BUFFER); |
| heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
| } |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_SLOTS_BUFFER_CLEAR); |
| + GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER); |
| int number_of_pages = evacuation_candidates_.length(); |
| for (int i = 0; i < number_of_pages; i++) { |
| Page* p = evacuation_candidates_[i]; |
| @@ -364,24 +364,18 @@ void MarkCompactCollector::CollectGarbage() { |
| ClearNonLiveReferences(); |
| - ClearWeakCollections(); |
| - |
| #ifdef VERIFY_HEAP |
| if (FLAG_verify_heap) { |
| VerifyMarking(heap_); |
| } |
| #endif |
| - ClearInvalidStoreAndSlotsBufferEntries(); |
| - |
| -#ifdef VERIFY_HEAP |
| - if (FLAG_verify_heap) { |
| - VerifyValidStoreAndSlotsBufferEntries(); |
| - } |
| -#endif |
| + MoveEvacuationCandidatesToEndOfPagesList(); |
| SweepSpaces(); |
| + EvacuateNewSpaceAndCandidates(); |
| + |
| Finish(); |
| if (marking_parity_ == EVEN_MARKING_PARITY) { |
| @@ -867,6 +861,7 @@ void MarkCompactCollector::Prepare() { |
| void MarkCompactCollector::Finish() { |
| + GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_FINISH); |
| #ifdef DEBUG |
| DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
| state_ = IDLE; |
| @@ -1969,12 +1964,8 @@ void MarkCompactCollector::MarkLiveObjects() { |
| RootMarkingVisitor root_visitor(heap()); |
| { |
| - GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOT); |
| + GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS); |
| MarkRoots(&root_visitor); |
| - } |
| - |
| - { |
| - GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_TOPOPT); |
| ProcessTopOptimizedFrame(&root_visitor); |
| } |
| @@ -2008,19 +1999,25 @@ void MarkCompactCollector::MarkLiveObjects() { |
| ProcessEphemeralMarking(&root_visitor, true); |
| } |
| - AfterMarking(); |
| - |
| if (FLAG_print_cumulative_gc_stat) { |
| heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() - |
| start_time); |
| } |
| + if (FLAG_track_gc_object_stats) { |
| + if (FLAG_trace_gc_object_stats) { |
| + heap()->object_stats_->TraceObjectStats(); |
| + } |
| + heap()->object_stats_->CheckpointObjectStats(); |
| + } |
| } |
| -void MarkCompactCollector::AfterMarking() { |
| +void MarkCompactCollector::ClearNonLiveReferences() { |
| + GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR); |
| + |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_MARK_STRING_TABLE); |
| + GCTracer::Scope::MC_CLEAR_STRING_TABLE); |
| // Prune the string table removing all strings only pointed to by the |
| // string table. Cannot use string_table() here because the string |
| @@ -2037,8 +2034,7 @@ void MarkCompactCollector::AfterMarking() { |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_MARK_WEAK_REFERENCES); |
| - |
| + GCTracer::Scope::MC_CLEAR_WEAK_LISTS); |
| // Process the weak references. |
| MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
| heap()->ProcessAllWeakReferences(&mark_compact_object_retainer); |
| @@ -2046,7 +2042,7 @@ void MarkCompactCollector::AfterMarking() { |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_MARK_GLOBAL_HANDLES); |
| + GCTracer::Scope::MC_CLEAR_GLOBAL_HANDLES); |
| // Remove object groups after marking phase. |
| heap()->isolate()->global_handles()->RemoveObjectGroups(); |
| @@ -2056,33 +2052,31 @@ void MarkCompactCollector::AfterMarking() { |
| // Flush code from collected candidates. |
| if (is_code_flushing_enabled()) { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_MARK_CODE_FLUSH); |
| + GCTracer::Scope::MC_CLEAR_CODE_FLUSH); |
| code_flusher_->ProcessCandidates(); |
| } |
| - if (FLAG_track_gc_object_stats) { |
| - if (FLAG_trace_gc_object_stats) { |
| - heap()->object_stats_->TraceObjectStats(); |
| - } |
| - heap()->object_stats_->CheckpointObjectStats(); |
| - } |
| -} |
| - |
| - |
| -void MarkCompactCollector::ClearNonLiveReferences() { |
| - GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR); |
| DependentCode* dependent_code_list; |
| Object* non_live_map_list; |
| ClearWeakCells(&non_live_map_list, &dependent_code_list); |
| { |
| - GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP); |
| + GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); |
| ClearSimpleMapTransitions(non_live_map_list); |
| ClearFullMapTransitions(); |
| } |
| MarkDependentCodeForDeoptimization(dependent_code_list); |
| + |
| + ClearWeakCollections(); |
| + |
| + ClearInvalidStoreAndSlotsBufferEntries(); |
| +#ifdef VERIFY_HEAP |
| + if (FLAG_verify_heap) { |
| + VerifyValidStoreAndSlotsBufferEntries(); |
| + } |
| +#endif |
| } |
| @@ -2090,7 +2084,6 @@ void MarkCompactCollector::MarkDependentCodeForDeoptimization( |
| DependentCode* list_head) { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
| - |
| Isolate* isolate = this->isolate(); |
| DependentCode* current = list_head; |
| while (current->length() > 0) { |
| @@ -2286,8 +2279,6 @@ void MarkCompactCollector::TrimEnumCache(Map* map, |
| void MarkCompactCollector::ProcessWeakCollections() { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); |
| Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| while (weak_collection_obj != Smi::FromInt(0)) { |
| JSWeakCollection* weak_collection = |
| @@ -2314,7 +2305,7 @@ void MarkCompactCollector::ProcessWeakCollections() { |
| void MarkCompactCollector::ClearWeakCollections() { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); |
| + GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS); |
| Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| while (weak_collection_obj != Smi::FromInt(0)) { |
| JSWeakCollection* weak_collection = |
| @@ -2337,8 +2328,6 @@ void MarkCompactCollector::ClearWeakCollections() { |
| void MarkCompactCollector::AbortWeakCollections() { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_WEAKCOLLECTION_ABORT); |
| Object* weak_collection_obj = heap()->encountered_weak_collections(); |
| while (weak_collection_obj != Smi::FromInt(0)) { |
| JSWeakCollection* weak_collection = |
| @@ -2353,7 +2342,8 @@ void MarkCompactCollector::AbortWeakCollections() { |
| void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, |
| DependentCode** dependent_code_list) { |
| Heap* heap = this->heap(); |
| - GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL); |
| + GCTracer::Scope gc_scope(heap->tracer(), |
| + GCTracer::Scope::MC_CLEAR_WEAK_CELLS); |
| Object* weak_cell_obj = heap->encountered_weak_cells(); |
| Object* the_hole_value = heap->the_hole_value(); |
| DependentCode* dependent_code_head = |
| @@ -3449,25 +3439,65 @@ void MarkCompactCollector::SweepAbortedPages() { |
| void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| + GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE); |
| Heap::RelocationLock relocation_lock(heap()); |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| + GCTracer::Scope::MC_EVACUATE_NEW_SPACE); |
| EvacuationScope evacuation_scope(this); |
| EvacuateNewSpace(); |
| } |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_EVACUATE_PAGES); |
| + GCTracer::Scope::MC_EVACUATE_CANDIDATES); |
| EvacuationScope evacuation_scope(this); |
| EvacuatePagesInParallel(); |
| } |
| + UpdatePointersAfterEvacuation(); |
| + |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
| + GCTracer::Scope::MC_EVACUATE_CLEAN_UP); |
|
Michael Lippautz
2015/12/10 16:25:53
Not all items here are related to evacuation candi
ulan
2015/12/10 18:51:06
Done. Moved those to Finish phase.
|
| + // After updating all pointers, we can finally sweep the aborted pages, |
| + // effectively overriding any forward pointers. |
| + SweepAbortedPages(); |
| + |
| + heap_->isolate()->inner_pointer_to_code_cache()->Flush(); |
| + |
| + // The hashing of weak_object_to_code_table is no longer valid. |
| + heap()->weak_object_to_code_table()->Rehash( |
| + heap()->isolate()->factory()->undefined_value()); |
| + |
| + // EvacuateNewSpaceAndCandidates iterates over new space objects and for |
| + // ArrayBuffers either re-registers them as live or promotes them. This is |
| + // needed to properly free them. |
| + heap()->array_buffer_tracker()->FreeDead(false); |
| + |
| + // Clear the marking state of live large objects. |
| + heap_->lo_space()->ClearMarkingStateOfLiveObjects(); |
| + |
| + // Deallocate evacuated candidate pages. |
| + ReleaseEvacuationCandidates(); |
| + } |
| + |
| +#ifdef VERIFY_HEAP |
| + if (FLAG_verify_heap && !sweeping_in_progress_) { |
| + VerifyEvacuation(heap()); |
| + } |
| +#endif |
| +} |
| + |
| + |
| +void MarkCompactCollector::UpdatePointersAfterEvacuation() { |
| + GCTracer::Scope gc_scope(heap()->tracer(), |
| + GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); |
| + { |
| + GCTracer::Scope gc_scope( |
| + heap()->tracer(), |
| + GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED); |
| UpdateSlotsRecordedIn(migration_slots_buffer_); |
| if (FLAG_trace_fragmentation_verbose) { |
| PrintF(" migration slots buffer: %d\n", |
| @@ -3491,8 +3521,8 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| PointersUpdatingVisitor updating_visitor(heap()); |
| { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); |
| + GCTracer::Scope gc_scope( |
| + heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); |
| // Update pointers in to space. |
| SemiSpaceIterator to_it(heap()->new_space()); |
| for (HeapObject* object = to_it.Next(); object != NULL; |
| @@ -3501,18 +3531,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| object->IterateBody(map->instance_type(), object->SizeFromMap(map), |
| &updating_visitor); |
| } |
| - } |
| - |
| - { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); |
| // Update roots. |
| heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); |
| - } |
| - { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); |
| StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), |
| &Heap::ScavengeStoreBufferCallback); |
| heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); |
| @@ -3522,7 +3543,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| { |
| GCTracer::Scope gc_scope( |
| heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); |
| + GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); |
| for (int i = 0; i < npages; i++) { |
| Page* p = evacuation_candidates_[i]; |
| DCHECK(p->IsEvacuationCandidate() || |
| @@ -3584,7 +3605,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_UPDATE_MISC_POINTERS); |
| + GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK); |
| heap_->string_table()->Iterate(&updating_visitor); |
| // Update pointers from external string table. |
| @@ -3594,20 +3615,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| EvacuationWeakObjectRetainer evacuation_object_retainer; |
| heap()->ProcessAllWeakReferences(&evacuation_object_retainer); |
| } |
| - |
| - { |
| - GCTracer::Scope gc_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_SWEEP_ABORTED); |
| - // After updating all pointers, we can finally sweep the aborted pages, |
| - // effectively overriding any forward pointers. |
| - SweepAbortedPages(); |
| - } |
| - |
| - heap_->isolate()->inner_pointer_to_code_cache()->Flush(); |
| - |
| - // The hashing of weak_object_to_code_table is no longer valid. |
| - heap()->weak_object_to_code_table()->Rehash( |
| - heap()->isolate()->factory()->undefined_value()); |
| } |
| @@ -3829,12 +3836,10 @@ void MarkCompactCollector::SweepSpaces() { |
| state_ = SWEEP_SPACES; |
| #endif |
| - MoveEvacuationCandidatesToEndOfPagesList(); |
| - |
| { |
| { |
| GCTracer::Scope sweep_scope(heap()->tracer(), |
| - GCTracer::Scope::MC_SWEEP_OLDSPACE); |
| + GCTracer::Scope::MC_SWEEP_OLD); |
| SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); |
| } |
| { |
| @@ -3860,29 +3865,10 @@ void MarkCompactCollector::SweepSpaces() { |
| // buffer entries are already filter out. We can just release the memory. |
| heap()->FreeQueuedChunks(); |
| - EvacuateNewSpaceAndCandidates(); |
| - |
| - // EvacuateNewSpaceAndCandidates iterates over new space objects and for |
| - // ArrayBuffers either re-registers them as live or promotes them. This is |
| - // needed to properly free them. |
| - heap()->array_buffer_tracker()->FreeDead(false); |
| - |
| - // Clear the marking state of live large objects. |
| - heap_->lo_space()->ClearMarkingStateOfLiveObjects(); |
| - |
| - // Deallocate evacuated candidate pages. |
| - ReleaseEvacuationCandidates(); |
| - |
| if (FLAG_print_cumulative_gc_stat) { |
| heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() - |
| start_time); |
| } |
| - |
| -#ifdef VERIFY_HEAP |
| - if (FLAG_verify_heap && !sweeping_in_progress_) { |
| - VerifyEvacuation(heap()); |
| - } |
| -#endif |
| } |