Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(733)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1515503006: Clean up mark-compact phases and GC counter names. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix order Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« src/heap/gc-tracer.cc ('K') | « src/heap/mark-compact.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 compacting_ = evacuation_candidates_.length() > 0; 304 compacting_ = evacuation_candidates_.length() > 0;
305 } 305 }
306 306
307 return compacting_; 307 return compacting_;
308 } 308 }
309 309
310 310
311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { 311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
312 { 312 {
313 GCTracer::Scope gc_scope(heap()->tracer(), 313 GCTracer::Scope gc_scope(heap()->tracer(),
314 GCTracer::Scope::MC_STORE_BUFFER_CLEAR); 314 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); 315 heap_->store_buffer()->ClearInvalidStoreBufferEntries();
316 } 316 }
317 317
318 { 318 {
319 GCTracer::Scope gc_scope(heap()->tracer(), 319 GCTracer::Scope gc_scope(heap()->tracer(),
320 GCTracer::Scope::MC_SLOTS_BUFFER_CLEAR); 320 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
321 int number_of_pages = evacuation_candidates_.length(); 321 int number_of_pages = evacuation_candidates_.length();
322 for (int i = 0; i < number_of_pages; i++) { 322 for (int i = 0; i < number_of_pages; i++) {
323 Page* p = evacuation_candidates_[i]; 323 Page* p = evacuation_candidates_[i];
324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); 324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
325 } 325 }
326 } 326 }
327 } 327 }
328 328
329 329
330 #ifdef VERIFY_HEAP 330 #ifdef VERIFY_HEAP
(...skipping 26 matching lines...) Expand all
357 // Make sure that Prepare() has been called. The individual steps below will 357 // Make sure that Prepare() has been called. The individual steps below will
358 // update the state as they proceed. 358 // update the state as they proceed.
359 DCHECK(state_ == PREPARE_GC); 359 DCHECK(state_ == PREPARE_GC);
360 360
361 MarkLiveObjects(); 361 MarkLiveObjects();
362 362
363 DCHECK(heap_->incremental_marking()->IsStopped()); 363 DCHECK(heap_->incremental_marking()->IsStopped());
364 364
365 ClearNonLiveReferences(); 365 ClearNonLiveReferences();
366 366
367 ClearWeakCollections();
368
369 #ifdef VERIFY_HEAP 367 #ifdef VERIFY_HEAP
370 if (FLAG_verify_heap) { 368 if (FLAG_verify_heap) {
371 VerifyMarking(heap_); 369 VerifyMarking(heap_);
372 } 370 }
373 #endif 371 #endif
374 372
375 ClearInvalidStoreAndSlotsBufferEntries(); 373 MoveEvacuationCandidatesToEndOfPagesList();
376
377 #ifdef VERIFY_HEAP
378 if (FLAG_verify_heap) {
379 VerifyValidStoreAndSlotsBufferEntries();
380 }
381 #endif
382 374
383 SweepSpaces(); 375 SweepSpaces();
384 376
377 EvacuateNewSpaceAndCandidates();
378
385 Finish(); 379 Finish();
386 380
387 if (marking_parity_ == EVEN_MARKING_PARITY) { 381 if (marking_parity_ == EVEN_MARKING_PARITY) {
388 marking_parity_ = ODD_MARKING_PARITY; 382 marking_parity_ = ODD_MARKING_PARITY;
389 } else { 383 } else {
390 DCHECK(marking_parity_ == ODD_MARKING_PARITY); 384 DCHECK(marking_parity_ == ODD_MARKING_PARITY);
391 marking_parity_ = EVEN_MARKING_PARITY; 385 marking_parity_ = EVEN_MARKING_PARITY;
392 } 386 }
393 } 387 }
394 388
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after
860 854
861 #ifdef VERIFY_HEAP 855 #ifdef VERIFY_HEAP
862 if (!was_marked_incrementally_ && FLAG_verify_heap) { 856 if (!was_marked_incrementally_ && FLAG_verify_heap) {
863 VerifyMarkbitsAreClean(); 857 VerifyMarkbitsAreClean();
864 } 858 }
865 #endif 859 #endif
866 } 860 }
867 861
868 862
869 void MarkCompactCollector::Finish() { 863 void MarkCompactCollector::Finish() {
864 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_FINISH);
870 #ifdef DEBUG 865 #ifdef DEBUG
871 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 866 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
872 state_ = IDLE; 867 state_ = IDLE;
873 #endif 868 #endif
874 // The stub cache is not traversed during GC; clear the cache to 869 // The stub cache is not traversed during GC; clear the cache to
875 // force lazy re-initialization of it. This must be done after the 870 // force lazy re-initialization of it. This must be done after the
876 // GC, because it relies on the new address of certain old space 871 // GC, because it relies on the new address of certain old space
877 // objects (empty string, illegal builtin). 872 // objects (empty string, illegal builtin).
878 isolate()->stub_cache()->Clear(); 873 isolate()->stub_cache()->Clear();
879 874
(...skipping 1082 matching lines...) Expand 10 before | Expand all | Expand 10 after
1962 1957
1963 { 1958 {
1964 GCTracer::Scope gc_scope(heap()->tracer(), 1959 GCTracer::Scope gc_scope(heap()->tracer(),
1965 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); 1960 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH);
1966 PrepareForCodeFlushing(); 1961 PrepareForCodeFlushing();
1967 } 1962 }
1968 1963
1969 RootMarkingVisitor root_visitor(heap()); 1964 RootMarkingVisitor root_visitor(heap());
1970 1965
1971 { 1966 {
1972 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOT); 1967 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
1973 MarkRoots(&root_visitor); 1968 MarkRoots(&root_visitor);
1974 }
1975
1976 {
1977 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_TOPOPT);
1978 ProcessTopOptimizedFrame(&root_visitor); 1969 ProcessTopOptimizedFrame(&root_visitor);
1979 } 1970 }
1980 1971
1981 { 1972 {
1982 GCTracer::Scope gc_scope(heap()->tracer(), 1973 GCTracer::Scope gc_scope(heap()->tracer(),
1983 GCTracer::Scope::MC_MARK_WEAK_CLOSURE); 1974 GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
1984 1975
1985 // The objects reachable from the roots are marked, yet unreachable 1976 // The objects reachable from the roots are marked, yet unreachable
1986 // objects are unmarked. Mark objects reachable due to host 1977 // objects are unmarked. Mark objects reachable due to host
1987 // application specific logic or through Harmony weak maps. 1978 // application specific logic or through Harmony weak maps.
(...skipping 13 matching lines...) Expand all
2001 ProcessMarkingDeque(); 1992 ProcessMarkingDeque();
2002 1993
2003 // Repeat Harmony weak maps marking to mark unmarked objects reachable from 1994 // Repeat Harmony weak maps marking to mark unmarked objects reachable from
2004 // the weak roots we just marked as pending destruction. 1995 // the weak roots we just marked as pending destruction.
2005 // 1996 //
2006 // We only process harmony collections, as all object groups have been fully 1997 // We only process harmony collections, as all object groups have been fully
2007 // processed and no weakly reachable node can discover new objects groups. 1998 // processed and no weakly reachable node can discover new objects groups.
2008 ProcessEphemeralMarking(&root_visitor, true); 1999 ProcessEphemeralMarking(&root_visitor, true);
2009 } 2000 }
2010 2001
2011 AfterMarking();
2012
2013 if (FLAG_print_cumulative_gc_stat) { 2002 if (FLAG_print_cumulative_gc_stat) {
2014 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() - 2003 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() -
2015 start_time); 2004 start_time);
2016 } 2005 }
2006 if (FLAG_track_gc_object_stats) {
2007 if (FLAG_trace_gc_object_stats) {
2008 heap()->object_stats_->TraceObjectStats();
2009 }
2010 heap()->object_stats_->CheckpointObjectStats();
2011 }
2017 } 2012 }
2018 2013
2019 2014
2020 void MarkCompactCollector::AfterMarking() { 2015 void MarkCompactCollector::ClearNonLiveReferences() {
2016 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2017
2021 { 2018 {
2022 GCTracer::Scope gc_scope(heap()->tracer(), 2019 GCTracer::Scope gc_scope(heap()->tracer(),
2023 GCTracer::Scope::MC_MARK_STRING_TABLE); 2020 GCTracer::Scope::MC_CLEAR_STRING_TABLE);
2024 2021
2025 // Prune the string table removing all strings only pointed to by the 2022 // Prune the string table removing all strings only pointed to by the
2026 // string table. Cannot use string_table() here because the string 2023 // string table. Cannot use string_table() here because the string
2027 // table is marked. 2024 // table is marked.
2028 StringTable* string_table = heap()->string_table(); 2025 StringTable* string_table = heap()->string_table();
2029 InternalizedStringTableCleaner internalized_visitor(heap()); 2026 InternalizedStringTableCleaner internalized_visitor(heap());
2030 string_table->IterateElements(&internalized_visitor); 2027 string_table->IterateElements(&internalized_visitor);
2031 string_table->ElementsRemoved(internalized_visitor.PointersRemoved()); 2028 string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
2032 2029
2033 ExternalStringTableCleaner external_visitor(heap()); 2030 ExternalStringTableCleaner external_visitor(heap());
2034 heap()->external_string_table_.Iterate(&external_visitor); 2031 heap()->external_string_table_.Iterate(&external_visitor);
2035 heap()->external_string_table_.CleanUp(); 2032 heap()->external_string_table_.CleanUp();
2036 } 2033 }
2037 2034
2038 { 2035 {
2039 GCTracer::Scope gc_scope(heap()->tracer(), 2036 GCTracer::Scope gc_scope(heap()->tracer(),
2040 GCTracer::Scope::MC_MARK_WEAK_REFERENCES); 2037 GCTracer::Scope::MC_CLEAR_WEAK_LISTS);
2041
2042 // Process the weak references. 2038 // Process the weak references.
2043 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 2039 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2044 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer); 2040 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer);
2045 } 2041 }
2046 2042
2047 { 2043 {
2048 GCTracer::Scope gc_scope(heap()->tracer(), 2044 GCTracer::Scope gc_scope(heap()->tracer(),
2049 GCTracer::Scope::MC_MARK_GLOBAL_HANDLES); 2045 GCTracer::Scope::MC_CLEAR_GLOBAL_HANDLES);
2050 2046
2051 // Remove object groups after marking phase. 2047 // Remove object groups after marking phase.
2052 heap()->isolate()->global_handles()->RemoveObjectGroups(); 2048 heap()->isolate()->global_handles()->RemoveObjectGroups();
2053 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); 2049 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
2054 } 2050 }
2055 2051
2056 // Flush code from collected candidates. 2052 // Flush code from collected candidates.
2057 if (is_code_flushing_enabled()) { 2053 if (is_code_flushing_enabled()) {
2058 GCTracer::Scope gc_scope(heap()->tracer(), 2054 GCTracer::Scope gc_scope(heap()->tracer(),
2059 GCTracer::Scope::MC_MARK_CODE_FLUSH); 2055 GCTracer::Scope::MC_CLEAR_CODE_FLUSH);
2060 code_flusher_->ProcessCandidates(); 2056 code_flusher_->ProcessCandidates();
2061 } 2057 }
2062 2058
2063 if (FLAG_track_gc_object_stats) {
2064 if (FLAG_trace_gc_object_stats) {
2065 heap()->object_stats_->TraceObjectStats();
2066 }
2067 heap()->object_stats_->CheckpointObjectStats();
2068 }
2069 }
2070
2071
2072 void MarkCompactCollector::ClearNonLiveReferences() {
2073 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2074 2059
2075 DependentCode* dependent_code_list; 2060 DependentCode* dependent_code_list;
2076 Object* non_live_map_list; 2061 Object* non_live_map_list;
2077 ClearWeakCells(&non_live_map_list, &dependent_code_list); 2062 ClearWeakCells(&non_live_map_list, &dependent_code_list);
2078 2063
2079 { 2064 {
2080 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP); 2065 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2081 ClearSimpleMapTransitions(non_live_map_list); 2066 ClearSimpleMapTransitions(non_live_map_list);
2082 ClearFullMapTransitions(); 2067 ClearFullMapTransitions();
2083 } 2068 }
2084 2069
2085 MarkDependentCodeForDeoptimization(dependent_code_list); 2070 MarkDependentCodeForDeoptimization(dependent_code_list);
2071
2072 ClearWeakCollections();
2073
2074 ClearInvalidStoreAndSlotsBufferEntries();
2075 #ifdef VERIFY_HEAP
2076 if (FLAG_verify_heap) {
2077 VerifyValidStoreAndSlotsBufferEntries();
2078 }
2079 #endif
2086 } 2080 }
2087 2081
2088 2082
2089 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2083 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2090 DependentCode* list_head) { 2084 DependentCode* list_head) {
2091 GCTracer::Scope gc_scope(heap()->tracer(), 2085 GCTracer::Scope gc_scope(heap()->tracer(),
2092 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2086 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2093
2094 Isolate* isolate = this->isolate(); 2087 Isolate* isolate = this->isolate();
2095 DependentCode* current = list_head; 2088 DependentCode* current = list_head;
2096 while (current->length() > 0) { 2089 while (current->length() > 0) {
2097 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( 2090 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
2098 isolate, DependentCode::kWeakCodeGroup); 2091 isolate, DependentCode::kWeakCodeGroup);
2099 current = current->next_link(); 2092 current = current->next_link();
2100 } 2093 }
2101 2094
2102 WeakHashTable* table = heap_->weak_object_to_code_table(); 2095 WeakHashTable* table = heap_->weak_object_to_code_table();
2103 uint32_t capacity = table->Capacity(); 2096 uint32_t capacity = table->Capacity();
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 descriptors->GetEnumCache(), to_trim); 2272 descriptors->GetEnumCache(), to_trim);
2280 2273
2281 if (!descriptors->HasEnumIndicesCache()) return; 2274 if (!descriptors->HasEnumIndicesCache()) return;
2282 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 2275 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
2283 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache, 2276 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache,
2284 to_trim); 2277 to_trim);
2285 } 2278 }
2286 2279
2287 2280
2288 void MarkCompactCollector::ProcessWeakCollections() { 2281 void MarkCompactCollector::ProcessWeakCollections() {
2289 GCTracer::Scope gc_scope(heap()->tracer(),
2290 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2291 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2282 Object* weak_collection_obj = heap()->encountered_weak_collections();
2292 while (weak_collection_obj != Smi::FromInt(0)) { 2283 while (weak_collection_obj != Smi::FromInt(0)) {
2293 JSWeakCollection* weak_collection = 2284 JSWeakCollection* weak_collection =
2294 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2285 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2295 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2286 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2296 if (weak_collection->table()->IsHashTable()) { 2287 if (weak_collection->table()->IsHashTable()) {
2297 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2288 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2298 for (int i = 0; i < table->Capacity(); i++) { 2289 for (int i = 0; i < table->Capacity(); i++) {
2299 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2290 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2300 Object** key_slot = 2291 Object** key_slot =
2301 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); 2292 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2302 RecordSlot(table, key_slot, *key_slot); 2293 RecordSlot(table, key_slot, *key_slot);
2303 Object** value_slot = 2294 Object** value_slot =
2304 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); 2295 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2305 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table, 2296 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
2306 value_slot); 2297 value_slot);
2307 } 2298 }
2308 } 2299 }
2309 } 2300 }
2310 weak_collection_obj = weak_collection->next(); 2301 weak_collection_obj = weak_collection->next();
2311 } 2302 }
2312 } 2303 }
2313 2304
2314 2305
2315 void MarkCompactCollector::ClearWeakCollections() { 2306 void MarkCompactCollector::ClearWeakCollections() {
2316 GCTracer::Scope gc_scope(heap()->tracer(), 2307 GCTracer::Scope gc_scope(heap()->tracer(),
2317 GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); 2308 GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
2318 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2309 Object* weak_collection_obj = heap()->encountered_weak_collections();
2319 while (weak_collection_obj != Smi::FromInt(0)) { 2310 while (weak_collection_obj != Smi::FromInt(0)) {
2320 JSWeakCollection* weak_collection = 2311 JSWeakCollection* weak_collection =
2321 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2312 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2322 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2313 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2323 if (weak_collection->table()->IsHashTable()) { 2314 if (weak_collection->table()->IsHashTable()) {
2324 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2315 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2325 for (int i = 0; i < table->Capacity(); i++) { 2316 for (int i = 0; i < table->Capacity(); i++) {
2326 HeapObject* key = HeapObject::cast(table->KeyAt(i)); 2317 HeapObject* key = HeapObject::cast(table->KeyAt(i));
2327 if (!MarkCompactCollector::IsMarked(key)) { 2318 if (!MarkCompactCollector::IsMarked(key)) {
2328 table->RemoveEntry(i); 2319 table->RemoveEntry(i);
2329 } 2320 }
2330 } 2321 }
2331 } 2322 }
2332 weak_collection_obj = weak_collection->next(); 2323 weak_collection_obj = weak_collection->next();
2333 weak_collection->set_next(heap()->undefined_value()); 2324 weak_collection->set_next(heap()->undefined_value());
2334 } 2325 }
2335 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2326 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2336 } 2327 }
2337 2328
2338 2329
2339 void MarkCompactCollector::AbortWeakCollections() { 2330 void MarkCompactCollector::AbortWeakCollections() {
2340 GCTracer::Scope gc_scope(heap()->tracer(),
2341 GCTracer::Scope::MC_WEAKCOLLECTION_ABORT);
2342 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2331 Object* weak_collection_obj = heap()->encountered_weak_collections();
2343 while (weak_collection_obj != Smi::FromInt(0)) { 2332 while (weak_collection_obj != Smi::FromInt(0)) {
2344 JSWeakCollection* weak_collection = 2333 JSWeakCollection* weak_collection =
2345 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2334 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2346 weak_collection_obj = weak_collection->next(); 2335 weak_collection_obj = weak_collection->next();
2347 weak_collection->set_next(heap()->undefined_value()); 2336 weak_collection->set_next(heap()->undefined_value());
2348 } 2337 }
2349 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2338 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2350 } 2339 }
2351 2340
2352 2341
2353 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, 2342 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
2354 DependentCode** dependent_code_list) { 2343 DependentCode** dependent_code_list) {
2355 Heap* heap = this->heap(); 2344 Heap* heap = this->heap();
2356 GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL); 2345 GCTracer::Scope gc_scope(heap->tracer(),
2346 GCTracer::Scope::MC_CLEAR_WEAK_CELLS);
2357 Object* weak_cell_obj = heap->encountered_weak_cells(); 2347 Object* weak_cell_obj = heap->encountered_weak_cells();
2358 Object* the_hole_value = heap->the_hole_value(); 2348 Object* the_hole_value = heap->the_hole_value();
2359 DependentCode* dependent_code_head = 2349 DependentCode* dependent_code_head =
2360 DependentCode::cast(heap->empty_fixed_array()); 2350 DependentCode::cast(heap->empty_fixed_array());
2361 Object* non_live_map_head = Smi::FromInt(0); 2351 Object* non_live_map_head = Smi::FromInt(0);
2362 while (weak_cell_obj != Smi::FromInt(0)) { 2352 while (weak_cell_obj != Smi::FromInt(0)) {
2363 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2353 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2364 Object* next_weak_cell = weak_cell->next(); 2354 Object* next_weak_cell = weak_cell->next();
2365 bool clear_value = true; 2355 bool clear_value = true;
2366 bool clear_next = true; 2356 bool clear_next = true;
(...skipping 1075 matching lines...) Expand 10 before | Expand all | Expand 10 after
3442 default: 3432 default:
3443 UNREACHABLE(); 3433 UNREACHABLE();
3444 break; 3434 break;
3445 } 3435 }
3446 } 3436 }
3447 } 3437 }
3448 } 3438 }
3449 3439
3450 3440
3451 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3441 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3442 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
3452 Heap::RelocationLock relocation_lock(heap()); 3443 Heap::RelocationLock relocation_lock(heap());
3453 3444
3454 { 3445 {
3455 GCTracer::Scope gc_scope(heap()->tracer(), 3446 GCTracer::Scope gc_scope(heap()->tracer(),
3456 GCTracer::Scope::MC_SWEEP_NEWSPACE); 3447 GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
3457 EvacuationScope evacuation_scope(this); 3448 EvacuationScope evacuation_scope(this);
3458 EvacuateNewSpace(); 3449 EvacuateNewSpace();
3459 } 3450 }
3460 3451
3461 { 3452 {
3462 GCTracer::Scope gc_scope(heap()->tracer(), 3453 GCTracer::Scope gc_scope(heap()->tracer(),
3463 GCTracer::Scope::MC_EVACUATE_PAGES); 3454 GCTracer::Scope::MC_EVACUATE_CANDIDATES);
3464 EvacuationScope evacuation_scope(this); 3455 EvacuationScope evacuation_scope(this);
3465 EvacuatePagesInParallel(); 3456 EvacuatePagesInParallel();
3466 } 3457 }
3467 3458
3459 UpdatePointersAfterEvacuation();
3460
3468 { 3461 {
3469 GCTracer::Scope gc_scope(heap()->tracer(), 3462 GCTracer::Scope gc_scope(heap()->tracer(),
3470 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3463 GCTracer::Scope::MC_EVACUATE_CLEAN_UP);
Michael Lippautz 2015/12/10 16:25:53 Not all items here are related to evacuation candi
ulan 2015/12/10 18:51:06 Done. Moved those to Finish phase.
3464 // After updating all pointers, we can finally sweep the aborted pages,
3465 // effectively overriding any forward pointers.
3466 SweepAbortedPages();
3467
3468 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3469
3470 // The hashing of weak_object_to_code_table is no longer valid.
3471 heap()->weak_object_to_code_table()->Rehash(
3472 heap()->isolate()->factory()->undefined_value());
3473
3474 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3475 // ArrayBuffers either re-registers them as live or promotes them. This is
3476 // needed to properly free them.
3477 heap()->array_buffer_tracker()->FreeDead(false);
3478
3479 // Clear the marking state of live large objects.
3480 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
3481
3482 // Deallocate evacuated candidate pages.
3483 ReleaseEvacuationCandidates();
3484 }
3485
3486 #ifdef VERIFY_HEAP
3487 if (FLAG_verify_heap && !sweeping_in_progress_) {
3488 VerifyEvacuation(heap());
3489 }
3490 #endif
3491 }
3492
3493
3494 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3495 GCTracer::Scope gc_scope(heap()->tracer(),
3496 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3497 {
3498 GCTracer::Scope gc_scope(
3499 heap()->tracer(),
3500 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3471 UpdateSlotsRecordedIn(migration_slots_buffer_); 3501 UpdateSlotsRecordedIn(migration_slots_buffer_);
3472 if (FLAG_trace_fragmentation_verbose) { 3502 if (FLAG_trace_fragmentation_verbose) {
3473 PrintF(" migration slots buffer: %d\n", 3503 PrintF(" migration slots buffer: %d\n",
3474 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3504 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3475 } 3505 }
3476 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_); 3506 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3477 DCHECK(migration_slots_buffer_ == NULL); 3507 DCHECK(migration_slots_buffer_ == NULL);
3478 3508
3479 // TODO(hpayer): Process the slots buffers in parallel. This has to be done 3509 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3480 // after evacuation of all pages finishes. 3510 // after evacuation of all pages finishes.
3481 int buffers = evacuation_slots_buffers_.length(); 3511 int buffers = evacuation_slots_buffers_.length();
3482 for (int i = 0; i < buffers; i++) { 3512 for (int i = 0; i < buffers; i++) {
3483 SlotsBuffer* buffer = evacuation_slots_buffers_[i]; 3513 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3484 UpdateSlotsRecordedIn(buffer); 3514 UpdateSlotsRecordedIn(buffer);
3485 slots_buffer_allocator_->DeallocateChain(&buffer); 3515 slots_buffer_allocator_->DeallocateChain(&buffer);
3486 } 3516 }
3487 evacuation_slots_buffers_.Rewind(0); 3517 evacuation_slots_buffers_.Rewind(0);
3488 } 3518 }
3489 3519
3490 // Second pass: find pointers to new space and update them. 3520 // Second pass: find pointers to new space and update them.
3491 PointersUpdatingVisitor updating_visitor(heap()); 3521 PointersUpdatingVisitor updating_visitor(heap());
3492 3522
3493 { 3523 {
3494 GCTracer::Scope gc_scope(heap()->tracer(), 3524 GCTracer::Scope gc_scope(
3495 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); 3525 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3496 // Update pointers in to space. 3526 // Update pointers in to space.
3497 SemiSpaceIterator to_it(heap()->new_space()); 3527 SemiSpaceIterator to_it(heap()->new_space());
3498 for (HeapObject* object = to_it.Next(); object != NULL; 3528 for (HeapObject* object = to_it.Next(); object != NULL;
3499 object = to_it.Next()) { 3529 object = to_it.Next()) {
3500 Map* map = object->map(); 3530 Map* map = object->map();
3501 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3531 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3502 &updating_visitor); 3532 &updating_visitor);
3503 } 3533 }
3504 }
3505
3506 {
3507 GCTracer::Scope gc_scope(heap()->tracer(),
3508 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3509 // Update roots. 3534 // Update roots.
3510 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3535 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3511 }
3512 3536
3513 {
3514 GCTracer::Scope gc_scope(heap()->tracer(),
3515 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3516 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), 3537 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3517 &Heap::ScavengeStoreBufferCallback); 3538 &Heap::ScavengeStoreBufferCallback);
3518 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 3539 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
3519 } 3540 }
3520 3541
3521 int npages = evacuation_candidates_.length(); 3542 int npages = evacuation_candidates_.length();
3522 { 3543 {
3523 GCTracer::Scope gc_scope( 3544 GCTracer::Scope gc_scope(
3524 heap()->tracer(), 3545 heap()->tracer(),
3525 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3546 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3526 for (int i = 0; i < npages; i++) { 3547 for (int i = 0; i < npages; i++) {
3527 Page* p = evacuation_candidates_[i]; 3548 Page* p = evacuation_candidates_[i];
3528 DCHECK(p->IsEvacuationCandidate() || 3549 DCHECK(p->IsEvacuationCandidate() ||
3529 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3550 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3530 3551
3531 if (p->IsEvacuationCandidate()) { 3552 if (p->IsEvacuationCandidate()) {
3532 UpdateSlotsRecordedIn(p->slots_buffer()); 3553 UpdateSlotsRecordedIn(p->slots_buffer());
3533 if (FLAG_trace_fragmentation_verbose) { 3554 if (FLAG_trace_fragmentation_verbose) {
3534 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), 3555 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3535 SlotsBuffer::SizeOfChain(p->slots_buffer())); 3556 SlotsBuffer::SizeOfChain(p->slots_buffer()));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 default: 3598 default:
3578 UNREACHABLE(); 3599 UNREACHABLE();
3579 break; 3600 break;
3580 } 3601 }
3581 } 3602 }
3582 } 3603 }
3583 } 3604 }
3584 3605
3585 { 3606 {
3586 GCTracer::Scope gc_scope(heap()->tracer(), 3607 GCTracer::Scope gc_scope(heap()->tracer(),
3587 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); 3608 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK);
3588 heap_->string_table()->Iterate(&updating_visitor); 3609 heap_->string_table()->Iterate(&updating_visitor);
3589 3610
3590 // Update pointers from external string table. 3611 // Update pointers from external string table.
3591 heap_->UpdateReferencesInExternalStringTable( 3612 heap_->UpdateReferencesInExternalStringTable(
3592 &UpdateReferenceInExternalStringTableEntry); 3613 &UpdateReferenceInExternalStringTableEntry);
3593 3614
3594 EvacuationWeakObjectRetainer evacuation_object_retainer; 3615 EvacuationWeakObjectRetainer evacuation_object_retainer;
3595 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3616 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3596 } 3617 }
3597
3598 {
3599 GCTracer::Scope gc_scope(heap()->tracer(),
3600 GCTracer::Scope::MC_SWEEP_ABORTED);
3601 // After updating all pointers, we can finally sweep the aborted pages,
3602 // effectively overriding any forward pointers.
3603 SweepAbortedPages();
3604 }
3605
3606 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3607
3608 // The hashing of weak_object_to_code_table is no longer valid.
3609 heap()->weak_object_to_code_table()->Rehash(
3610 heap()->isolate()->factory()->undefined_value());
3611 } 3618 }
3612 3619
3613 3620
3614 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { 3621 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() {
3615 int npages = evacuation_candidates_.length(); 3622 int npages = evacuation_candidates_.length();
3616 for (int i = 0; i < npages; i++) { 3623 for (int i = 0; i < npages; i++) {
3617 Page* p = evacuation_candidates_[i]; 3624 Page* p = evacuation_candidates_[i];
3618 if (!p->IsEvacuationCandidate()) continue; 3625 if (!p->IsEvacuationCandidate()) continue;
3619 p->Unlink(); 3626 p->Unlink();
3620 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3627 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
3822 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP); 3829 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
3823 double start_time = 0.0; 3830 double start_time = 0.0;
3824 if (FLAG_print_cumulative_gc_stat) { 3831 if (FLAG_print_cumulative_gc_stat) {
3825 start_time = heap_->MonotonicallyIncreasingTimeInMs(); 3832 start_time = heap_->MonotonicallyIncreasingTimeInMs();
3826 } 3833 }
3827 3834
3828 #ifdef DEBUG 3835 #ifdef DEBUG
3829 state_ = SWEEP_SPACES; 3836 state_ = SWEEP_SPACES;
3830 #endif 3837 #endif
3831 3838
3832 MoveEvacuationCandidatesToEndOfPagesList();
3833
3834 { 3839 {
3835 { 3840 {
3836 GCTracer::Scope sweep_scope(heap()->tracer(), 3841 GCTracer::Scope sweep_scope(heap()->tracer(),
3837 GCTracer::Scope::MC_SWEEP_OLDSPACE); 3842 GCTracer::Scope::MC_SWEEP_OLD);
3838 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); 3843 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
3839 } 3844 }
3840 { 3845 {
3841 GCTracer::Scope sweep_scope(heap()->tracer(), 3846 GCTracer::Scope sweep_scope(heap()->tracer(),
3842 GCTracer::Scope::MC_SWEEP_CODE); 3847 GCTracer::Scope::MC_SWEEP_CODE);
3843 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); 3848 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
3844 } 3849 }
3845 { 3850 {
3846 GCTracer::Scope sweep_scope(heap()->tracer(), 3851 GCTracer::Scope sweep_scope(heap()->tracer(),
3847 GCTracer::Scope::MC_SWEEP_MAP); 3852 GCTracer::Scope::MC_SWEEP_MAP);
3848 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING); 3853 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING);
3849 } 3854 }
3850 sweeping_in_progress_ = true; 3855 sweeping_in_progress_ = true;
3851 if (heap()->concurrent_sweeping_enabled()) { 3856 if (heap()->concurrent_sweeping_enabled()) {
3852 StartSweeperThreads(); 3857 StartSweeperThreads();
3853 } 3858 }
3854 } 3859 }
3855 3860
3856 // Deallocate unmarked large objects. 3861 // Deallocate unmarked large objects.
3857 heap_->lo_space()->FreeUnmarkedObjects(); 3862 heap_->lo_space()->FreeUnmarkedObjects();
3858 3863
3859 // Give pages that are queued to be freed back to the OS. Invalid store 3864 // Give pages that are queued to be freed back to the OS. Invalid store
3860 // buffer entries are already filter out. We can just release the memory. 3865 // buffer entries are already filter out. We can just release the memory.
3861 heap()->FreeQueuedChunks(); 3866 heap()->FreeQueuedChunks();
3862 3867
3863 EvacuateNewSpaceAndCandidates();
3864
3865 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3866 // ArrayBuffers either re-registers them as live or promotes them. This is
3867 // needed to properly free them.
3868 heap()->array_buffer_tracker()->FreeDead(false);
3869
3870 // Clear the marking state of live large objects.
3871 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
3872
3873 // Deallocate evacuated candidate pages.
3874 ReleaseEvacuationCandidates();
3875
3876 if (FLAG_print_cumulative_gc_stat) { 3868 if (FLAG_print_cumulative_gc_stat) {
3877 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() - 3869 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() -
3878 start_time); 3870 start_time);
3879 } 3871 }
3880
3881 #ifdef VERIFY_HEAP
3882 if (FLAG_verify_heap && !sweeping_in_progress_) {
3883 VerifyEvacuation(heap());
3884 }
3885 #endif
3886 } 3872 }
3887 3873
3888 3874
3889 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) { 3875 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
3890 PageIterator it(space); 3876 PageIterator it(space);
3891 while (it.has_next()) { 3877 while (it.has_next()) {
3892 Page* p = it.next(); 3878 Page* p = it.next();
3893 if (p->parallel_sweeping_state().Value() == 3879 if (p->parallel_sweeping_state().Value() ==
3894 MemoryChunk::kSweepingFinalize) { 3880 MemoryChunk::kSweepingFinalize) {
3895 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone); 3881 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone);
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
3974 MarkBit mark_bit = Marking::MarkBitFrom(host); 3960 MarkBit mark_bit = Marking::MarkBitFrom(host);
3975 if (Marking::IsBlack(mark_bit)) { 3961 if (Marking::IsBlack(mark_bit)) {
3976 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3962 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3977 RecordRelocSlot(&rinfo, target); 3963 RecordRelocSlot(&rinfo, target);
3978 } 3964 }
3979 } 3965 }
3980 } 3966 }
3981 3967
3982 } // namespace internal 3968 } // namespace internal
3983 } // namespace v8 3969 } // namespace v8
OLDNEW
« src/heap/gc-tracer.cc ('K') | « src/heap/mark-compact.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698