Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1129)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1515503006: Clean up mark-compact phases and GC counter names. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix reges Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | tools/eval_gc_time.sh » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 compacting_ = evacuation_candidates_.length() > 0; 304 compacting_ = evacuation_candidates_.length() > 0;
305 } 305 }
306 306
307 return compacting_; 307 return compacting_;
308 } 308 }
309 309
310 310
311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { 311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
312 { 312 {
313 GCTracer::Scope gc_scope(heap()->tracer(), 313 GCTracer::Scope gc_scope(heap()->tracer(),
314 GCTracer::Scope::MC_STORE_BUFFER_CLEAR); 314 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); 315 heap_->store_buffer()->ClearInvalidStoreBufferEntries();
316 } 316 }
317 317
318 { 318 {
319 GCTracer::Scope gc_scope(heap()->tracer(), 319 GCTracer::Scope gc_scope(heap()->tracer(),
320 GCTracer::Scope::MC_SLOTS_BUFFER_CLEAR); 320 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
321 int number_of_pages = evacuation_candidates_.length(); 321 int number_of_pages = evacuation_candidates_.length();
322 for (int i = 0; i < number_of_pages; i++) { 322 for (int i = 0; i < number_of_pages; i++) {
323 Page* p = evacuation_candidates_[i]; 323 Page* p = evacuation_candidates_[i];
324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); 324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
325 } 325 }
326 } 326 }
327 } 327 }
328 328
329 329
330 #ifdef VERIFY_HEAP 330 #ifdef VERIFY_HEAP
(...skipping 26 matching lines...) Expand all
357 // Make sure that Prepare() has been called. The individual steps below will 357 // Make sure that Prepare() has been called. The individual steps below will
358 // update the state as they proceed. 358 // update the state as they proceed.
359 DCHECK(state_ == PREPARE_GC); 359 DCHECK(state_ == PREPARE_GC);
360 360
361 MarkLiveObjects(); 361 MarkLiveObjects();
362 362
363 DCHECK(heap_->incremental_marking()->IsStopped()); 363 DCHECK(heap_->incremental_marking()->IsStopped());
364 364
365 ClearNonLiveReferences(); 365 ClearNonLiveReferences();
366 366
367 ClearWeakCollections();
368
369 #ifdef VERIFY_HEAP 367 #ifdef VERIFY_HEAP
Hannes Payer (out of office) 2015/12/11 09:27:31 It would be nice to move this one into MarkLiveObj
ulan 2015/12/11 09:35:32 It depends on ClearNonLiveReferences which removes
370 if (FLAG_verify_heap) { 368 if (FLAG_verify_heap) {
371 VerifyMarking(heap_); 369 VerifyMarking(heap_);
372 } 370 }
373 #endif 371 #endif
374 372
375 ClearInvalidStoreAndSlotsBufferEntries(); 373 MoveEvacuationCandidatesToEndOfPagesList();
376
377 #ifdef VERIFY_HEAP
378 if (FLAG_verify_heap) {
379 VerifyValidStoreAndSlotsBufferEntries();
380 }
381 #endif
382 374
383 SweepSpaces(); 375 SweepSpaces();
384 376
377 EvacuateNewSpaceAndCandidates();
378
385 Finish(); 379 Finish();
386 380
387 if (marking_parity_ == EVEN_MARKING_PARITY) { 381 if (marking_parity_ == EVEN_MARKING_PARITY) {
Hannes Payer (out of office) 2015/12/11 09:27:31 Move that into finish.
ulan 2015/12/11 09:35:32 Done.
388 marking_parity_ = ODD_MARKING_PARITY; 382 marking_parity_ = ODD_MARKING_PARITY;
389 } else { 383 } else {
390 DCHECK(marking_parity_ == ODD_MARKING_PARITY); 384 DCHECK(marking_parity_ == ODD_MARKING_PARITY);
391 marking_parity_ = EVEN_MARKING_PARITY; 385 marking_parity_ = EVEN_MARKING_PARITY;
392 } 386 }
393 } 387 }
394 388
395 389
396 #ifdef VERIFY_HEAP 390 #ifdef VERIFY_HEAP
397 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { 391 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after
860 854
861 #ifdef VERIFY_HEAP 855 #ifdef VERIFY_HEAP
862 if (!was_marked_incrementally_ && FLAG_verify_heap) { 856 if (!was_marked_incrementally_ && FLAG_verify_heap) {
863 VerifyMarkbitsAreClean(); 857 VerifyMarkbitsAreClean();
864 } 858 }
865 #endif 859 #endif
866 } 860 }
867 861
868 862
869 void MarkCompactCollector::Finish() { 863 void MarkCompactCollector::Finish() {
864 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_FINISH);
865
866 // The hashing of weak_object_to_code_table is no longer valid.
867 heap()->weak_object_to_code_table()->Rehash(
868 heap()->isolate()->factory()->undefined_value());
869
870 // Clear the marking state of live large objects.
871 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
872
870 #ifdef DEBUG 873 #ifdef DEBUG
871 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 874 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
872 state_ = IDLE; 875 state_ = IDLE;
873 #endif 876 #endif
877 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
878
874 // The stub cache is not traversed during GC; clear the cache to 879 // The stub cache is not traversed during GC; clear the cache to
875 // force lazy re-initialization of it. This must be done after the 880 // force lazy re-initialization of it. This must be done after the
876 // GC, because it relies on the new address of certain old space 881 // GC, because it relies on the new address of certain old space
877 // objects (empty string, illegal builtin). 882 // objects (empty string, illegal builtin).
878 isolate()->stub_cache()->Clear(); 883 isolate()->stub_cache()->Clear();
879 884
880 if (have_code_to_deoptimize_) { 885 if (have_code_to_deoptimize_) {
881 // Some code objects were marked for deoptimization during the GC. 886 // Some code objects were marked for deoptimization during the GC.
882 Deoptimizer::DeoptimizeMarkedCode(isolate()); 887 Deoptimizer::DeoptimizeMarkedCode(isolate());
883 have_code_to_deoptimize_ = false; 888 have_code_to_deoptimize_ = false;
(...skipping 1078 matching lines...) Expand 10 before | Expand all | Expand 10 after
1962 1967
1963 { 1968 {
1964 GCTracer::Scope gc_scope(heap()->tracer(), 1969 GCTracer::Scope gc_scope(heap()->tracer(),
1965 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); 1970 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH);
1966 PrepareForCodeFlushing(); 1971 PrepareForCodeFlushing();
1967 } 1972 }
1968 1973
1969 RootMarkingVisitor root_visitor(heap()); 1974 RootMarkingVisitor root_visitor(heap());
1970 1975
1971 { 1976 {
1972 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOT); 1977 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
1973 MarkRoots(&root_visitor); 1978 MarkRoots(&root_visitor);
1974 }
1975
1976 {
1977 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_TOPOPT);
1978 ProcessTopOptimizedFrame(&root_visitor); 1979 ProcessTopOptimizedFrame(&root_visitor);
1979 } 1980 }
1980 1981
1981 { 1982 {
1982 GCTracer::Scope gc_scope(heap()->tracer(), 1983 GCTracer::Scope gc_scope(heap()->tracer(),
1983 GCTracer::Scope::MC_MARK_WEAK_CLOSURE); 1984 GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
1984 1985
1985 // The objects reachable from the roots are marked, yet unreachable 1986 // The objects reachable from the roots are marked, yet unreachable
1986 // objects are unmarked. Mark objects reachable due to host 1987 // objects are unmarked. Mark objects reachable due to host
1987 // application specific logic or through Harmony weak maps. 1988 // application specific logic or through Harmony weak maps.
(...skipping 13 matching lines...) Expand all
2001 ProcessMarkingDeque(); 2002 ProcessMarkingDeque();
2002 2003
2003 // Repeat Harmony weak maps marking to mark unmarked objects reachable from 2004 // Repeat Harmony weak maps marking to mark unmarked objects reachable from
2004 // the weak roots we just marked as pending destruction. 2005 // the weak roots we just marked as pending destruction.
2005 // 2006 //
2006 // We only process harmony collections, as all object groups have been fully 2007 // We only process harmony collections, as all object groups have been fully
2007 // processed and no weakly reachable node can discover new objects groups. 2008 // processed and no weakly reachable node can discover new objects groups.
2008 ProcessEphemeralMarking(&root_visitor, true); 2009 ProcessEphemeralMarking(&root_visitor, true);
2009 } 2010 }
2010 2011
2011 AfterMarking();
2012
2013 if (FLAG_print_cumulative_gc_stat) { 2012 if (FLAG_print_cumulative_gc_stat) {
2014 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() - 2013 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() -
2015 start_time); 2014 start_time);
2016 } 2015 }
2016 if (FLAG_track_gc_object_stats) {
2017 if (FLAG_trace_gc_object_stats) {
2018 heap()->object_stats_->TraceObjectStats();
2019 }
2020 heap()->object_stats_->CheckpointObjectStats();
2021 }
2017 } 2022 }
2018 2023
2019 2024
2020 void MarkCompactCollector::AfterMarking() { 2025 void MarkCompactCollector::ClearNonLiveReferences() {
2026 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2027
2021 { 2028 {
2022 GCTracer::Scope gc_scope(heap()->tracer(), 2029 GCTracer::Scope gc_scope(heap()->tracer(),
2023 GCTracer::Scope::MC_MARK_STRING_TABLE); 2030 GCTracer::Scope::MC_CLEAR_STRING_TABLE);
2024 2031
2025 // Prune the string table removing all strings only pointed to by the 2032 // Prune the string table removing all strings only pointed to by the
2026 // string table. Cannot use string_table() here because the string 2033 // string table. Cannot use string_table() here because the string
2027 // table is marked. 2034 // table is marked.
2028 StringTable* string_table = heap()->string_table(); 2035 StringTable* string_table = heap()->string_table();
2029 InternalizedStringTableCleaner internalized_visitor(heap()); 2036 InternalizedStringTableCleaner internalized_visitor(heap());
2030 string_table->IterateElements(&internalized_visitor); 2037 string_table->IterateElements(&internalized_visitor);
2031 string_table->ElementsRemoved(internalized_visitor.PointersRemoved()); 2038 string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
2032 2039
2033 ExternalStringTableCleaner external_visitor(heap()); 2040 ExternalStringTableCleaner external_visitor(heap());
2034 heap()->external_string_table_.Iterate(&external_visitor); 2041 heap()->external_string_table_.Iterate(&external_visitor);
2035 heap()->external_string_table_.CleanUp(); 2042 heap()->external_string_table_.CleanUp();
2036 } 2043 }
2037 2044
2038 { 2045 {
2039 GCTracer::Scope gc_scope(heap()->tracer(), 2046 GCTracer::Scope gc_scope(heap()->tracer(),
2040 GCTracer::Scope::MC_MARK_WEAK_REFERENCES); 2047 GCTracer::Scope::MC_CLEAR_WEAK_LISTS);
2041
2042 // Process the weak references. 2048 // Process the weak references.
2043 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 2049 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2044 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer); 2050 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer);
2045 } 2051 }
2046 2052
2047 { 2053 {
2048 GCTracer::Scope gc_scope(heap()->tracer(), 2054 GCTracer::Scope gc_scope(heap()->tracer(),
2049 GCTracer::Scope::MC_MARK_GLOBAL_HANDLES); 2055 GCTracer::Scope::MC_CLEAR_GLOBAL_HANDLES);
2050 2056
2051 // Remove object groups after marking phase. 2057 // Remove object groups after marking phase.
2052 heap()->isolate()->global_handles()->RemoveObjectGroups(); 2058 heap()->isolate()->global_handles()->RemoveObjectGroups();
2053 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); 2059 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
2054 } 2060 }
2055 2061
2056 // Flush code from collected candidates. 2062 // Flush code from collected candidates.
2057 if (is_code_flushing_enabled()) { 2063 if (is_code_flushing_enabled()) {
2058 GCTracer::Scope gc_scope(heap()->tracer(), 2064 GCTracer::Scope gc_scope(heap()->tracer(),
2059 GCTracer::Scope::MC_MARK_CODE_FLUSH); 2065 GCTracer::Scope::MC_CLEAR_CODE_FLUSH);
2060 code_flusher_->ProcessCandidates(); 2066 code_flusher_->ProcessCandidates();
2061 } 2067 }
2062 2068
2063 if (FLAG_track_gc_object_stats) {
2064 if (FLAG_trace_gc_object_stats) {
2065 heap()->object_stats_->TraceObjectStats();
2066 }
2067 heap()->object_stats_->CheckpointObjectStats();
2068 }
2069 }
2070
2071
2072 void MarkCompactCollector::ClearNonLiveReferences() {
2073 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2074 2069
2075 DependentCode* dependent_code_list; 2070 DependentCode* dependent_code_list;
2076 Object* non_live_map_list; 2071 Object* non_live_map_list;
2077 ClearWeakCells(&non_live_map_list, &dependent_code_list); 2072 ClearWeakCells(&non_live_map_list, &dependent_code_list);
2078 2073
2079 { 2074 {
2080 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP); 2075 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2081 ClearSimpleMapTransitions(non_live_map_list); 2076 ClearSimpleMapTransitions(non_live_map_list);
2082 ClearFullMapTransitions(); 2077 ClearFullMapTransitions();
2083 } 2078 }
2084 2079
2085 MarkDependentCodeForDeoptimization(dependent_code_list); 2080 MarkDependentCodeForDeoptimization(dependent_code_list);
2081
2082 ClearWeakCollections();
2083
2084 ClearInvalidStoreAndSlotsBufferEntries();
2085 #ifdef VERIFY_HEAP
Hannes Payer (out of office) 2015/12/11 09:27:31 We could move the debugging code into ClearInvalid
ulan 2015/12/11 09:35:32 Done.
2086 if (FLAG_verify_heap) {
2087 VerifyValidStoreAndSlotsBufferEntries();
2088 }
2089 #endif
2086 } 2090 }
2087 2091
2088 2092
2089 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2093 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2090 DependentCode* list_head) { 2094 DependentCode* list_head) {
2091 GCTracer::Scope gc_scope(heap()->tracer(), 2095 GCTracer::Scope gc_scope(heap()->tracer(),
2092 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2096 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2093
2094 Isolate* isolate = this->isolate(); 2097 Isolate* isolate = this->isolate();
2095 DependentCode* current = list_head; 2098 DependentCode* current = list_head;
2096 while (current->length() > 0) { 2099 while (current->length() > 0) {
2097 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( 2100 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
2098 isolate, DependentCode::kWeakCodeGroup); 2101 isolate, DependentCode::kWeakCodeGroup);
2099 current = current->next_link(); 2102 current = current->next_link();
2100 } 2103 }
2101 2104
2102 WeakHashTable* table = heap_->weak_object_to_code_table(); 2105 WeakHashTable* table = heap_->weak_object_to_code_table();
2103 uint32_t capacity = table->Capacity(); 2106 uint32_t capacity = table->Capacity();
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 descriptors->GetEnumCache(), to_trim); 2282 descriptors->GetEnumCache(), to_trim);
2280 2283
2281 if (!descriptors->HasEnumIndicesCache()) return; 2284 if (!descriptors->HasEnumIndicesCache()) return;
2282 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 2285 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
2283 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache, 2286 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache,
2284 to_trim); 2287 to_trim);
2285 } 2288 }
2286 2289
2287 2290
2288 void MarkCompactCollector::ProcessWeakCollections() { 2291 void MarkCompactCollector::ProcessWeakCollections() {
2289 GCTracer::Scope gc_scope(heap()->tracer(),
2290 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2291 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2292 Object* weak_collection_obj = heap()->encountered_weak_collections();
2292 while (weak_collection_obj != Smi::FromInt(0)) { 2293 while (weak_collection_obj != Smi::FromInt(0)) {
2293 JSWeakCollection* weak_collection = 2294 JSWeakCollection* weak_collection =
2294 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2295 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2295 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2296 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2296 if (weak_collection->table()->IsHashTable()) { 2297 if (weak_collection->table()->IsHashTable()) {
2297 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2298 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2298 for (int i = 0; i < table->Capacity(); i++) { 2299 for (int i = 0; i < table->Capacity(); i++) {
2299 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2300 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2300 Object** key_slot = 2301 Object** key_slot =
2301 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); 2302 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2302 RecordSlot(table, key_slot, *key_slot); 2303 RecordSlot(table, key_slot, *key_slot);
2303 Object** value_slot = 2304 Object** value_slot =
2304 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); 2305 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2305 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table, 2306 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
2306 value_slot); 2307 value_slot);
2307 } 2308 }
2308 } 2309 }
2309 } 2310 }
2310 weak_collection_obj = weak_collection->next(); 2311 weak_collection_obj = weak_collection->next();
2311 } 2312 }
2312 } 2313 }
2313 2314
2314 2315
2315 void MarkCompactCollector::ClearWeakCollections() { 2316 void MarkCompactCollector::ClearWeakCollections() {
2316 GCTracer::Scope gc_scope(heap()->tracer(), 2317 GCTracer::Scope gc_scope(heap()->tracer(),
2317 GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); 2318 GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
2318 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2319 Object* weak_collection_obj = heap()->encountered_weak_collections();
2319 while (weak_collection_obj != Smi::FromInt(0)) { 2320 while (weak_collection_obj != Smi::FromInt(0)) {
2320 JSWeakCollection* weak_collection = 2321 JSWeakCollection* weak_collection =
2321 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2322 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2322 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2323 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2323 if (weak_collection->table()->IsHashTable()) { 2324 if (weak_collection->table()->IsHashTable()) {
2324 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2325 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2325 for (int i = 0; i < table->Capacity(); i++) { 2326 for (int i = 0; i < table->Capacity(); i++) {
2326 HeapObject* key = HeapObject::cast(table->KeyAt(i)); 2327 HeapObject* key = HeapObject::cast(table->KeyAt(i));
2327 if (!MarkCompactCollector::IsMarked(key)) { 2328 if (!MarkCompactCollector::IsMarked(key)) {
2328 table->RemoveEntry(i); 2329 table->RemoveEntry(i);
2329 } 2330 }
2330 } 2331 }
2331 } 2332 }
2332 weak_collection_obj = weak_collection->next(); 2333 weak_collection_obj = weak_collection->next();
2333 weak_collection->set_next(heap()->undefined_value()); 2334 weak_collection->set_next(heap()->undefined_value());
2334 } 2335 }
2335 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2336 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2336 } 2337 }
2337 2338
2338 2339
2339 void MarkCompactCollector::AbortWeakCollections() { 2340 void MarkCompactCollector::AbortWeakCollections() {
2340 GCTracer::Scope gc_scope(heap()->tracer(),
2341 GCTracer::Scope::MC_WEAKCOLLECTION_ABORT);
2342 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2341 Object* weak_collection_obj = heap()->encountered_weak_collections();
2343 while (weak_collection_obj != Smi::FromInt(0)) { 2342 while (weak_collection_obj != Smi::FromInt(0)) {
2344 JSWeakCollection* weak_collection = 2343 JSWeakCollection* weak_collection =
2345 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2344 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2346 weak_collection_obj = weak_collection->next(); 2345 weak_collection_obj = weak_collection->next();
2347 weak_collection->set_next(heap()->undefined_value()); 2346 weak_collection->set_next(heap()->undefined_value());
2348 } 2347 }
2349 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2348 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2350 } 2349 }
2351 2350
2352 2351
2353 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, 2352 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
2354 DependentCode** dependent_code_list) { 2353 DependentCode** dependent_code_list) {
2355 Heap* heap = this->heap(); 2354 Heap* heap = this->heap();
2356 GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL); 2355 GCTracer::Scope gc_scope(heap->tracer(),
2356 GCTracer::Scope::MC_CLEAR_WEAK_CELLS);
2357 Object* weak_cell_obj = heap->encountered_weak_cells(); 2357 Object* weak_cell_obj = heap->encountered_weak_cells();
2358 Object* the_hole_value = heap->the_hole_value(); 2358 Object* the_hole_value = heap->the_hole_value();
2359 DependentCode* dependent_code_head = 2359 DependentCode* dependent_code_head =
2360 DependentCode::cast(heap->empty_fixed_array()); 2360 DependentCode::cast(heap->empty_fixed_array());
2361 Object* non_live_map_head = Smi::FromInt(0); 2361 Object* non_live_map_head = Smi::FromInt(0);
2362 while (weak_cell_obj != Smi::FromInt(0)) { 2362 while (weak_cell_obj != Smi::FromInt(0)) {
2363 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2363 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2364 Object* next_weak_cell = weak_cell->next(); 2364 Object* next_weak_cell = weak_cell->next();
2365 bool clear_value = true; 2365 bool clear_value = true;
2366 bool clear_next = true; 2366 bool clear_next = true;
(...skipping 1075 matching lines...) Expand 10 before | Expand all | Expand 10 after
3442 default: 3442 default:
3443 UNREACHABLE(); 3443 UNREACHABLE();
3444 break; 3444 break;
3445 } 3445 }
3446 } 3446 }
3447 } 3447 }
3448 } 3448 }
3449 3449
3450 3450
3451 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3451 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3452 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
3452 Heap::RelocationLock relocation_lock(heap()); 3453 Heap::RelocationLock relocation_lock(heap());
3453 3454
3454 { 3455 {
3455 GCTracer::Scope gc_scope(heap()->tracer(), 3456 GCTracer::Scope gc_scope(heap()->tracer(),
3456 GCTracer::Scope::MC_SWEEP_NEWSPACE); 3457 GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
3457 EvacuationScope evacuation_scope(this); 3458 EvacuationScope evacuation_scope(this);
3458 EvacuateNewSpace(); 3459 EvacuateNewSpace();
3459 } 3460 }
3460 3461
3461 { 3462 {
3462 GCTracer::Scope gc_scope(heap()->tracer(), 3463 GCTracer::Scope gc_scope(heap()->tracer(),
3463 GCTracer::Scope::MC_EVACUATE_PAGES); 3464 GCTracer::Scope::MC_EVACUATE_CANDIDATES);
3464 EvacuationScope evacuation_scope(this); 3465 EvacuationScope evacuation_scope(this);
3465 EvacuatePagesInParallel(); 3466 EvacuatePagesInParallel();
3466 } 3467 }
3467 3468
3469 UpdatePointersAfterEvacuation();
3470
3468 { 3471 {
3469 GCTracer::Scope gc_scope(heap()->tracer(), 3472 GCTracer::Scope gc_scope(heap()->tracer(),
3470 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3473 GCTracer::Scope::MC_EVACUATE_CLEAN_UP);
3474 // After updating all pointers, we can finally sweep the aborted pages,
3475 // effectively overriding any forward pointers.
3476 SweepAbortedPages();
3477
3478 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3479 // ArrayBuffers either re-registers them as live or promotes them. This is
3480 // needed to properly free them.
3481 heap()->array_buffer_tracker()->FreeDead(false);
3482
3483 // Deallocate evacuated candidate pages.
3484 ReleaseEvacuationCandidates();
3485 }
3486
3487 #ifdef VERIFY_HEAP
3488 if (FLAG_verify_heap && !sweeping_in_progress_) {
3489 VerifyEvacuation(heap());
3490 }
3491 #endif
3492 }
3493
3494
3495 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3496 GCTracer::Scope gc_scope(heap()->tracer(),
3497 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3498 {
3499 GCTracer::Scope gc_scope(
3500 heap()->tracer(),
3501 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3471 UpdateSlotsRecordedIn(migration_slots_buffer_); 3502 UpdateSlotsRecordedIn(migration_slots_buffer_);
3472 if (FLAG_trace_fragmentation_verbose) { 3503 if (FLAG_trace_fragmentation_verbose) {
3473 PrintF(" migration slots buffer: %d\n", 3504 PrintF(" migration slots buffer: %d\n",
3474 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3505 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3475 } 3506 }
3476 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_); 3507 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3477 DCHECK(migration_slots_buffer_ == NULL); 3508 DCHECK(migration_slots_buffer_ == NULL);
3478 3509
3479 // TODO(hpayer): Process the slots buffers in parallel. This has to be done 3510 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3480 // after evacuation of all pages finishes. 3511 // after evacuation of all pages finishes.
3481 int buffers = evacuation_slots_buffers_.length(); 3512 int buffers = evacuation_slots_buffers_.length();
3482 for (int i = 0; i < buffers; i++) { 3513 for (int i = 0; i < buffers; i++) {
3483 SlotsBuffer* buffer = evacuation_slots_buffers_[i]; 3514 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3484 UpdateSlotsRecordedIn(buffer); 3515 UpdateSlotsRecordedIn(buffer);
3485 slots_buffer_allocator_->DeallocateChain(&buffer); 3516 slots_buffer_allocator_->DeallocateChain(&buffer);
3486 } 3517 }
3487 evacuation_slots_buffers_.Rewind(0); 3518 evacuation_slots_buffers_.Rewind(0);
3488 } 3519 }
3489 3520
3490 // Second pass: find pointers to new space and update them. 3521 // Second pass: find pointers to new space and update them.
3491 PointersUpdatingVisitor updating_visitor(heap()); 3522 PointersUpdatingVisitor updating_visitor(heap());
3492 3523
3493 { 3524 {
3494 GCTracer::Scope gc_scope(heap()->tracer(), 3525 GCTracer::Scope gc_scope(
3495 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); 3526 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3496 // Update pointers in to space. 3527 // Update pointers in to space.
3497 SemiSpaceIterator to_it(heap()->new_space()); 3528 SemiSpaceIterator to_it(heap()->new_space());
3498 for (HeapObject* object = to_it.Next(); object != NULL; 3529 for (HeapObject* object = to_it.Next(); object != NULL;
3499 object = to_it.Next()) { 3530 object = to_it.Next()) {
3500 Map* map = object->map(); 3531 Map* map = object->map();
3501 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3532 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3502 &updating_visitor); 3533 &updating_visitor);
3503 } 3534 }
3504 }
3505
3506 {
3507 GCTracer::Scope gc_scope(heap()->tracer(),
3508 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3509 // Update roots. 3535 // Update roots.
3510 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3536 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3511 }
3512 3537
3513 {
3514 GCTracer::Scope gc_scope(heap()->tracer(),
3515 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3516 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), 3538 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3517 &Heap::ScavengeStoreBufferCallback); 3539 &Heap::ScavengeStoreBufferCallback);
3518 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 3540 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
3519 } 3541 }
3520 3542
3521 int npages = evacuation_candidates_.length(); 3543 int npages = evacuation_candidates_.length();
3522 { 3544 {
3523 GCTracer::Scope gc_scope( 3545 GCTracer::Scope gc_scope(
3524 heap()->tracer(), 3546 heap()->tracer(),
3525 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3547 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3526 for (int i = 0; i < npages; i++) { 3548 for (int i = 0; i < npages; i++) {
3527 Page* p = evacuation_candidates_[i]; 3549 Page* p = evacuation_candidates_[i];
3528 DCHECK(p->IsEvacuationCandidate() || 3550 DCHECK(p->IsEvacuationCandidate() ||
3529 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3551 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3530 3552
3531 if (p->IsEvacuationCandidate()) { 3553 if (p->IsEvacuationCandidate()) {
3532 UpdateSlotsRecordedIn(p->slots_buffer()); 3554 UpdateSlotsRecordedIn(p->slots_buffer());
3533 if (FLAG_trace_fragmentation_verbose) { 3555 if (FLAG_trace_fragmentation_verbose) {
3534 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), 3556 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3535 SlotsBuffer::SizeOfChain(p->slots_buffer())); 3557 SlotsBuffer::SizeOfChain(p->slots_buffer()));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 default: 3599 default:
3578 UNREACHABLE(); 3600 UNREACHABLE();
3579 break; 3601 break;
3580 } 3602 }
3581 } 3603 }
3582 } 3604 }
3583 } 3605 }
3584 3606
3585 { 3607 {
3586 GCTracer::Scope gc_scope(heap()->tracer(), 3608 GCTracer::Scope gc_scope(heap()->tracer(),
3587 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); 3609 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK);
3588 heap_->string_table()->Iterate(&updating_visitor); 3610 heap_->string_table()->Iterate(&updating_visitor);
3589 3611
3590 // Update pointers from external string table. 3612 // Update pointers from external string table.
3591 heap_->UpdateReferencesInExternalStringTable( 3613 heap_->UpdateReferencesInExternalStringTable(
3592 &UpdateReferenceInExternalStringTableEntry); 3614 &UpdateReferenceInExternalStringTableEntry);
3593 3615
3594 EvacuationWeakObjectRetainer evacuation_object_retainer; 3616 EvacuationWeakObjectRetainer evacuation_object_retainer;
3595 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3617 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3596 } 3618 }
3597
3598 {
3599 GCTracer::Scope gc_scope(heap()->tracer(),
3600 GCTracer::Scope::MC_SWEEP_ABORTED);
3601 // After updating all pointers, we can finally sweep the aborted pages,
3602 // effectively overriding any forward pointers.
3603 SweepAbortedPages();
3604 }
3605
3606 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3607
3608 // The hashing of weak_object_to_code_table is no longer valid.
3609 heap()->weak_object_to_code_table()->Rehash(
3610 heap()->isolate()->factory()->undefined_value());
3611 } 3619 }
3612 3620
3613 3621
3614 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { 3622 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() {
3615 int npages = evacuation_candidates_.length(); 3623 int npages = evacuation_candidates_.length();
3616 for (int i = 0; i < npages; i++) { 3624 for (int i = 0; i < npages; i++) {
3617 Page* p = evacuation_candidates_[i]; 3625 Page* p = evacuation_candidates_[i];
3618 if (!p->IsEvacuationCandidate()) continue; 3626 if (!p->IsEvacuationCandidate()) continue;
3619 p->Unlink(); 3627 p->Unlink();
3620 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3628 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
3822 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP); 3830 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
3823 double start_time = 0.0; 3831 double start_time = 0.0;
3824 if (FLAG_print_cumulative_gc_stat) { 3832 if (FLAG_print_cumulative_gc_stat) {
3825 start_time = heap_->MonotonicallyIncreasingTimeInMs(); 3833 start_time = heap_->MonotonicallyIncreasingTimeInMs();
3826 } 3834 }
3827 3835
3828 #ifdef DEBUG 3836 #ifdef DEBUG
3829 state_ = SWEEP_SPACES; 3837 state_ = SWEEP_SPACES;
3830 #endif 3838 #endif
3831 3839
3832 MoveEvacuationCandidatesToEndOfPagesList();
Hannes Payer (out of office) 2015/12/11 09:27:31 I would leave this call in here, move them there t
ulan 2015/12/11 09:35:32 Done.
3833
3834 { 3840 {
3835 { 3841 {
3836 GCTracer::Scope sweep_scope(heap()->tracer(), 3842 GCTracer::Scope sweep_scope(heap()->tracer(),
3837 GCTracer::Scope::MC_SWEEP_OLDSPACE); 3843 GCTracer::Scope::MC_SWEEP_OLD);
3838 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); 3844 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
3839 } 3845 }
3840 { 3846 {
3841 GCTracer::Scope sweep_scope(heap()->tracer(), 3847 GCTracer::Scope sweep_scope(heap()->tracer(),
3842 GCTracer::Scope::MC_SWEEP_CODE); 3848 GCTracer::Scope::MC_SWEEP_CODE);
3843 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); 3849 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
3844 } 3850 }
3845 { 3851 {
3846 GCTracer::Scope sweep_scope(heap()->tracer(), 3852 GCTracer::Scope sweep_scope(heap()->tracer(),
3847 GCTracer::Scope::MC_SWEEP_MAP); 3853 GCTracer::Scope::MC_SWEEP_MAP);
3848 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING); 3854 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING);
3849 } 3855 }
3850 sweeping_in_progress_ = true; 3856 sweeping_in_progress_ = true;
3851 if (heap()->concurrent_sweeping_enabled()) { 3857 if (heap()->concurrent_sweeping_enabled()) {
3852 StartSweeperThreads(); 3858 StartSweeperThreads();
3853 } 3859 }
3854 } 3860 }
3855 3861
3856 // Deallocate unmarked large objects. 3862 // Deallocate unmarked large objects.
3857 heap_->lo_space()->FreeUnmarkedObjects(); 3863 heap_->lo_space()->FreeUnmarkedObjects();
3858 3864
3859 // Give pages that are queued to be freed back to the OS. Invalid store 3865 // Give pages that are queued to be freed back to the OS. Invalid store
3860 // buffer entries are already filter out. We can just release the memory. 3866 // buffer entries are already filter out. We can just release the memory.
3861 heap()->FreeQueuedChunks(); 3867 heap()->FreeQueuedChunks();
3862 3868
3863 EvacuateNewSpaceAndCandidates();
3864
3865 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3866 // ArrayBuffers either re-registers them as live or promotes them. This is
3867 // needed to properly free them.
3868 heap()->array_buffer_tracker()->FreeDead(false);
3869
3870 // Clear the marking state of live large objects.
3871 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
3872
3873 // Deallocate evacuated candidate pages.
3874 ReleaseEvacuationCandidates();
3875
3876 if (FLAG_print_cumulative_gc_stat) { 3869 if (FLAG_print_cumulative_gc_stat) {
3877 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() - 3870 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() -
3878 start_time); 3871 start_time);
3879 } 3872 }
3880
3881 #ifdef VERIFY_HEAP
3882 if (FLAG_verify_heap && !sweeping_in_progress_) {
3883 VerifyEvacuation(heap());
3884 }
3885 #endif
3886 } 3873 }
3887 3874
3888 3875
3889 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) { 3876 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
3890 PageIterator it(space); 3877 PageIterator it(space);
3891 while (it.has_next()) { 3878 while (it.has_next()) {
3892 Page* p = it.next(); 3879 Page* p = it.next();
3893 if (p->parallel_sweeping_state().Value() == 3880 if (p->parallel_sweeping_state().Value() ==
3894 MemoryChunk::kSweepingFinalize) { 3881 MemoryChunk::kSweepingFinalize) {
3895 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone); 3882 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone);
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
3974 MarkBit mark_bit = Marking::MarkBitFrom(host); 3961 MarkBit mark_bit = Marking::MarkBitFrom(host);
3975 if (Marking::IsBlack(mark_bit)) { 3962 if (Marking::IsBlack(mark_bit)) {
3976 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3963 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3977 RecordRelocSlot(&rinfo, target); 3964 RecordRelocSlot(&rinfo, target);
3978 } 3965 }
3979 } 3966 }
3980 } 3967 }
3981 3968
3982 } // namespace internal 3969 } // namespace internal
3983 } // namespace v8 3970 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | tools/eval_gc_time.sh » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698