Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(803)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1515503006: Clean up mark-compact phases and GC counter names. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Address comments from Hannes Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | tools/eval_gc_time.sh » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 compacting_ = evacuation_candidates_.length() > 0; 304 compacting_ = evacuation_candidates_.length() > 0;
305 } 305 }
306 306
307 return compacting_; 307 return compacting_;
308 } 308 }
309 309
310 310
311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { 311 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
312 { 312 {
313 GCTracer::Scope gc_scope(heap()->tracer(), 313 GCTracer::Scope gc_scope(heap()->tracer(),
314 GCTracer::Scope::MC_STORE_BUFFER_CLEAR); 314 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
315 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); 315 heap_->store_buffer()->ClearInvalidStoreBufferEntries();
316 } 316 }
317 317
318 { 318 {
319 GCTracer::Scope gc_scope(heap()->tracer(), 319 GCTracer::Scope gc_scope(heap()->tracer(),
320 GCTracer::Scope::MC_SLOTS_BUFFER_CLEAR); 320 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
321 int number_of_pages = evacuation_candidates_.length(); 321 int number_of_pages = evacuation_candidates_.length();
322 for (int i = 0; i < number_of_pages; i++) { 322 for (int i = 0; i < number_of_pages; i++) {
323 Page* p = evacuation_candidates_[i]; 323 Page* p = evacuation_candidates_[i];
324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); 324 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
325 } 325 }
326 } 326 }
327 #ifdef VERIFY_HEAP
328 if (FLAG_verify_heap) {
329 VerifyValidStoreAndSlotsBufferEntries();
330 }
331 #endif
327 } 332 }
328 333
329 334
330 #ifdef VERIFY_HEAP 335 #ifdef VERIFY_HEAP
331 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { 336 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
332 PageIterator it(space); 337 PageIterator it(space);
333 while (it.has_next()) { 338 while (it.has_next()) {
334 Page* p = it.next(); 339 Page* p = it.next();
335 SlotsBuffer::VerifySlots(heap, p->slots_buffer()); 340 SlotsBuffer::VerifySlots(heap, p->slots_buffer());
336 } 341 }
(...skipping 20 matching lines...) Expand all
357 // Make sure that Prepare() has been called. The individual steps below will 362 // Make sure that Prepare() has been called. The individual steps below will
358 // update the state as they proceed. 363 // update the state as they proceed.
359 DCHECK(state_ == PREPARE_GC); 364 DCHECK(state_ == PREPARE_GC);
360 365
361 MarkLiveObjects(); 366 MarkLiveObjects();
362 367
363 DCHECK(heap_->incremental_marking()->IsStopped()); 368 DCHECK(heap_->incremental_marking()->IsStopped());
364 369
365 ClearNonLiveReferences(); 370 ClearNonLiveReferences();
366 371
367 ClearWeakCollections();
368
369 #ifdef VERIFY_HEAP 372 #ifdef VERIFY_HEAP
370 if (FLAG_verify_heap) { 373 if (FLAG_verify_heap) {
371 VerifyMarking(heap_); 374 VerifyMarking(heap_);
372 } 375 }
373 #endif 376 #endif
374 377
375 ClearInvalidStoreAndSlotsBufferEntries();
376
377 #ifdef VERIFY_HEAP
378 if (FLAG_verify_heap) {
379 VerifyValidStoreAndSlotsBufferEntries();
380 }
381 #endif
382
383 SweepSpaces(); 378 SweepSpaces();
384 379
380 EvacuateNewSpaceAndCandidates();
381
385 Finish(); 382 Finish();
386
387 if (marking_parity_ == EVEN_MARKING_PARITY) {
388 marking_parity_ = ODD_MARKING_PARITY;
389 } else {
390 DCHECK(marking_parity_ == ODD_MARKING_PARITY);
391 marking_parity_ = EVEN_MARKING_PARITY;
392 }
393 } 383 }
394 384
395 385
396 #ifdef VERIFY_HEAP 386 #ifdef VERIFY_HEAP
397 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { 387 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
398 PageIterator it(space); 388 PageIterator it(space);
399 389
400 while (it.has_next()) { 390 while (it.has_next()) {
401 Page* p = it.next(); 391 Page* p = it.next();
402 CHECK(p->markbits()->IsClean()); 392 CHECK(p->markbits()->IsClean());
(...skipping 457 matching lines...) Expand 10 before | Expand all | Expand 10 after
860 850
861 #ifdef VERIFY_HEAP 851 #ifdef VERIFY_HEAP
862 if (!was_marked_incrementally_ && FLAG_verify_heap) { 852 if (!was_marked_incrementally_ && FLAG_verify_heap) {
863 VerifyMarkbitsAreClean(); 853 VerifyMarkbitsAreClean();
864 } 854 }
865 #endif 855 #endif
866 } 856 }
867 857
868 858
869 void MarkCompactCollector::Finish() { 859 void MarkCompactCollector::Finish() {
860 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_FINISH);
861
862 // The hashing of weak_object_to_code_table is no longer valid.
863 heap()->weak_object_to_code_table()->Rehash(
864 heap()->isolate()->factory()->undefined_value());
865
866 // Clear the marking state of live large objects.
867 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
868
870 #ifdef DEBUG 869 #ifdef DEBUG
871 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 870 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
872 state_ = IDLE; 871 state_ = IDLE;
873 #endif 872 #endif
873 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
874
874 // The stub cache is not traversed during GC; clear the cache to 875 // The stub cache is not traversed during GC; clear the cache to
875 // force lazy re-initialization of it. This must be done after the 876 // force lazy re-initialization of it. This must be done after the
876 // GC, because it relies on the new address of certain old space 877 // GC, because it relies on the new address of certain old space
877 // objects (empty string, illegal builtin). 878 // objects (empty string, illegal builtin).
878 isolate()->stub_cache()->Clear(); 879 isolate()->stub_cache()->Clear();
879 880
880 if (have_code_to_deoptimize_) { 881 if (have_code_to_deoptimize_) {
881 // Some code objects were marked for deoptimization during the GC. 882 // Some code objects were marked for deoptimization during the GC.
882 Deoptimizer::DeoptimizeMarkedCode(isolate()); 883 Deoptimizer::DeoptimizeMarkedCode(isolate());
883 have_code_to_deoptimize_ = false; 884 have_code_to_deoptimize_ = false;
884 } 885 }
885 886
886 heap_->incremental_marking()->ClearIdleMarkingDelayCounter(); 887 heap_->incremental_marking()->ClearIdleMarkingDelayCounter();
888
889 if (marking_parity_ == EVEN_MARKING_PARITY) {
890 marking_parity_ = ODD_MARKING_PARITY;
891 } else {
892 DCHECK(marking_parity_ == ODD_MARKING_PARITY);
893 marking_parity_ = EVEN_MARKING_PARITY;
894 }
887 } 895 }
888 896
889 897
890 // ------------------------------------------------------------------------- 898 // -------------------------------------------------------------------------
891 // Phase 1: tracing and marking live objects. 899 // Phase 1: tracing and marking live objects.
892 // before: all objects are in normal state. 900 // before: all objects are in normal state.
893 // after: a live object's map pointer is marked as '00'. 901 // after: a live object's map pointer is marked as '00'.
894 902
895 // Marking all live objects in the heap as part of mark-sweep or mark-compact 903 // Marking all live objects in the heap as part of mark-sweep or mark-compact
896 // collection. Before marking, all objects are in their normal state. After 904 // collection. Before marking, all objects are in their normal state. After
(...skipping 1065 matching lines...) Expand 10 before | Expand all | Expand 10 after
1962 1970
1963 { 1971 {
1964 GCTracer::Scope gc_scope(heap()->tracer(), 1972 GCTracer::Scope gc_scope(heap()->tracer(),
1965 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH); 1973 GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH);
1966 PrepareForCodeFlushing(); 1974 PrepareForCodeFlushing();
1967 } 1975 }
1968 1976
1969 RootMarkingVisitor root_visitor(heap()); 1977 RootMarkingVisitor root_visitor(heap());
1970 1978
1971 { 1979 {
1972 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOT); 1980 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
1973 MarkRoots(&root_visitor); 1981 MarkRoots(&root_visitor);
1974 }
1975
1976 {
1977 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK_TOPOPT);
1978 ProcessTopOptimizedFrame(&root_visitor); 1982 ProcessTopOptimizedFrame(&root_visitor);
1979 } 1983 }
1980 1984
1981 { 1985 {
1982 GCTracer::Scope gc_scope(heap()->tracer(), 1986 GCTracer::Scope gc_scope(heap()->tracer(),
1983 GCTracer::Scope::MC_MARK_WEAK_CLOSURE); 1987 GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
1984 1988
1985 // The objects reachable from the roots are marked, yet unreachable 1989 // The objects reachable from the roots are marked, yet unreachable
1986 // objects are unmarked. Mark objects reachable due to host 1990 // objects are unmarked. Mark objects reachable due to host
1987 // application specific logic or through Harmony weak maps. 1991 // application specific logic or through Harmony weak maps.
(...skipping 13 matching lines...) Expand all
2001 ProcessMarkingDeque(); 2005 ProcessMarkingDeque();
2002 2006
2003 // Repeat Harmony weak maps marking to mark unmarked objects reachable from 2007 // Repeat Harmony weak maps marking to mark unmarked objects reachable from
2004 // the weak roots we just marked as pending destruction. 2008 // the weak roots we just marked as pending destruction.
2005 // 2009 //
2006 // We only process harmony collections, as all object groups have been fully 2010 // We only process harmony collections, as all object groups have been fully
2007 // processed and no weakly reachable node can discover new objects groups. 2011 // processed and no weakly reachable node can discover new objects groups.
2008 ProcessEphemeralMarking(&root_visitor, true); 2012 ProcessEphemeralMarking(&root_visitor, true);
2009 } 2013 }
2010 2014
2011 AfterMarking();
2012
2013 if (FLAG_print_cumulative_gc_stat) { 2015 if (FLAG_print_cumulative_gc_stat) {
2014 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() - 2016 heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() -
2015 start_time); 2017 start_time);
2016 } 2018 }
2019 if (FLAG_track_gc_object_stats) {
2020 if (FLAG_trace_gc_object_stats) {
2021 heap()->object_stats_->TraceObjectStats();
2022 }
2023 heap()->object_stats_->CheckpointObjectStats();
2024 }
2017 } 2025 }
2018 2026
2019 2027
2020 void MarkCompactCollector::AfterMarking() { 2028 void MarkCompactCollector::ClearNonLiveReferences() {
2029 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2030
2021 { 2031 {
2022 GCTracer::Scope gc_scope(heap()->tracer(), 2032 GCTracer::Scope gc_scope(heap()->tracer(),
2023 GCTracer::Scope::MC_MARK_STRING_TABLE); 2033 GCTracer::Scope::MC_CLEAR_STRING_TABLE);
2024 2034
2025 // Prune the string table removing all strings only pointed to by the 2035 // Prune the string table removing all strings only pointed to by the
2026 // string table. Cannot use string_table() here because the string 2036 // string table. Cannot use string_table() here because the string
2027 // table is marked. 2037 // table is marked.
2028 StringTable* string_table = heap()->string_table(); 2038 StringTable* string_table = heap()->string_table();
2029 InternalizedStringTableCleaner internalized_visitor(heap()); 2039 InternalizedStringTableCleaner internalized_visitor(heap());
2030 string_table->IterateElements(&internalized_visitor); 2040 string_table->IterateElements(&internalized_visitor);
2031 string_table->ElementsRemoved(internalized_visitor.PointersRemoved()); 2041 string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
2032 2042
2033 ExternalStringTableCleaner external_visitor(heap()); 2043 ExternalStringTableCleaner external_visitor(heap());
2034 heap()->external_string_table_.Iterate(&external_visitor); 2044 heap()->external_string_table_.Iterate(&external_visitor);
2035 heap()->external_string_table_.CleanUp(); 2045 heap()->external_string_table_.CleanUp();
2036 } 2046 }
2037 2047
2038 { 2048 {
2039 GCTracer::Scope gc_scope(heap()->tracer(), 2049 GCTracer::Scope gc_scope(heap()->tracer(),
2040 GCTracer::Scope::MC_MARK_WEAK_REFERENCES); 2050 GCTracer::Scope::MC_CLEAR_WEAK_LISTS);
2041
2042 // Process the weak references. 2051 // Process the weak references.
2043 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 2052 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2044 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer); 2053 heap()->ProcessAllWeakReferences(&mark_compact_object_retainer);
2045 } 2054 }
2046 2055
2047 { 2056 {
2048 GCTracer::Scope gc_scope(heap()->tracer(), 2057 GCTracer::Scope gc_scope(heap()->tracer(),
2049 GCTracer::Scope::MC_MARK_GLOBAL_HANDLES); 2058 GCTracer::Scope::MC_CLEAR_GLOBAL_HANDLES);
2050 2059
2051 // Remove object groups after marking phase. 2060 // Remove object groups after marking phase.
2052 heap()->isolate()->global_handles()->RemoveObjectGroups(); 2061 heap()->isolate()->global_handles()->RemoveObjectGroups();
2053 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); 2062 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
2054 } 2063 }
2055 2064
2056 // Flush code from collected candidates. 2065 // Flush code from collected candidates.
2057 if (is_code_flushing_enabled()) { 2066 if (is_code_flushing_enabled()) {
2058 GCTracer::Scope gc_scope(heap()->tracer(), 2067 GCTracer::Scope gc_scope(heap()->tracer(),
2059 GCTracer::Scope::MC_MARK_CODE_FLUSH); 2068 GCTracer::Scope::MC_CLEAR_CODE_FLUSH);
2060 code_flusher_->ProcessCandidates(); 2069 code_flusher_->ProcessCandidates();
2061 } 2070 }
2062 2071
2063 if (FLAG_track_gc_object_stats) {
2064 if (FLAG_trace_gc_object_stats) {
2065 heap()->object_stats_->TraceObjectStats();
2066 }
2067 heap()->object_stats_->CheckpointObjectStats();
2068 }
2069 }
2070
2071
2072 void MarkCompactCollector::ClearNonLiveReferences() {
2073 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2074 2072
2075 DependentCode* dependent_code_list; 2073 DependentCode* dependent_code_list;
2076 Object* non_live_map_list; 2074 Object* non_live_map_list;
2077 ClearWeakCells(&non_live_map_list, &dependent_code_list); 2075 ClearWeakCells(&non_live_map_list, &dependent_code_list);
2078 2076
2079 { 2077 {
2080 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP); 2078 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2081 ClearSimpleMapTransitions(non_live_map_list); 2079 ClearSimpleMapTransitions(non_live_map_list);
2082 ClearFullMapTransitions(); 2080 ClearFullMapTransitions();
2083 } 2081 }
2084 2082
2085 MarkDependentCodeForDeoptimization(dependent_code_list); 2083 MarkDependentCodeForDeoptimization(dependent_code_list);
2084
2085 ClearWeakCollections();
2086
2087 ClearInvalidStoreAndSlotsBufferEntries();
2086 } 2088 }
2087 2089
2088 2090
2089 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2091 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2090 DependentCode* list_head) { 2092 DependentCode* list_head) {
2091 GCTracer::Scope gc_scope(heap()->tracer(), 2093 GCTracer::Scope gc_scope(heap()->tracer(),
2092 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2094 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2093
2094 Isolate* isolate = this->isolate(); 2095 Isolate* isolate = this->isolate();
2095 DependentCode* current = list_head; 2096 DependentCode* current = list_head;
2096 while (current->length() > 0) { 2097 while (current->length() > 0) {
2097 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( 2098 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
2098 isolate, DependentCode::kWeakCodeGroup); 2099 isolate, DependentCode::kWeakCodeGroup);
2099 current = current->next_link(); 2100 current = current->next_link();
2100 } 2101 }
2101 2102
2102 WeakHashTable* table = heap_->weak_object_to_code_table(); 2103 WeakHashTable* table = heap_->weak_object_to_code_table();
2103 uint32_t capacity = table->Capacity(); 2104 uint32_t capacity = table->Capacity();
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 descriptors->GetEnumCache(), to_trim); 2280 descriptors->GetEnumCache(), to_trim);
2280 2281
2281 if (!descriptors->HasEnumIndicesCache()) return; 2282 if (!descriptors->HasEnumIndicesCache()) return;
2282 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 2283 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
2283 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache, 2284 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(enum_indices_cache,
2284 to_trim); 2285 to_trim);
2285 } 2286 }
2286 2287
2287 2288
2288 void MarkCompactCollector::ProcessWeakCollections() { 2289 void MarkCompactCollector::ProcessWeakCollections() {
2289 GCTracer::Scope gc_scope(heap()->tracer(),
2290 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2291 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2290 Object* weak_collection_obj = heap()->encountered_weak_collections();
2292 while (weak_collection_obj != Smi::FromInt(0)) { 2291 while (weak_collection_obj != Smi::FromInt(0)) {
2293 JSWeakCollection* weak_collection = 2292 JSWeakCollection* weak_collection =
2294 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2293 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2295 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2294 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2296 if (weak_collection->table()->IsHashTable()) { 2295 if (weak_collection->table()->IsHashTable()) {
2297 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2296 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2298 for (int i = 0; i < table->Capacity(); i++) { 2297 for (int i = 0; i < table->Capacity(); i++) {
2299 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2298 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2300 Object** key_slot = 2299 Object** key_slot =
2301 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); 2300 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2302 RecordSlot(table, key_slot, *key_slot); 2301 RecordSlot(table, key_slot, *key_slot);
2303 Object** value_slot = 2302 Object** value_slot =
2304 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); 2303 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2305 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table, 2304 MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
2306 value_slot); 2305 value_slot);
2307 } 2306 }
2308 } 2307 }
2309 } 2308 }
2310 weak_collection_obj = weak_collection->next(); 2309 weak_collection_obj = weak_collection->next();
2311 } 2310 }
2312 } 2311 }
2313 2312
2314 2313
2315 void MarkCompactCollector::ClearWeakCollections() { 2314 void MarkCompactCollector::ClearWeakCollections() {
2316 GCTracer::Scope gc_scope(heap()->tracer(), 2315 GCTracer::Scope gc_scope(heap()->tracer(),
2317 GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR); 2316 GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
2318 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2317 Object* weak_collection_obj = heap()->encountered_weak_collections();
2319 while (weak_collection_obj != Smi::FromInt(0)) { 2318 while (weak_collection_obj != Smi::FromInt(0)) {
2320 JSWeakCollection* weak_collection = 2319 JSWeakCollection* weak_collection =
2321 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2320 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2322 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2321 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2323 if (weak_collection->table()->IsHashTable()) { 2322 if (weak_collection->table()->IsHashTable()) {
2324 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2323 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2325 for (int i = 0; i < table->Capacity(); i++) { 2324 for (int i = 0; i < table->Capacity(); i++) {
2326 HeapObject* key = HeapObject::cast(table->KeyAt(i)); 2325 HeapObject* key = HeapObject::cast(table->KeyAt(i));
2327 if (!MarkCompactCollector::IsMarked(key)) { 2326 if (!MarkCompactCollector::IsMarked(key)) {
2328 table->RemoveEntry(i); 2327 table->RemoveEntry(i);
2329 } 2328 }
2330 } 2329 }
2331 } 2330 }
2332 weak_collection_obj = weak_collection->next(); 2331 weak_collection_obj = weak_collection->next();
2333 weak_collection->set_next(heap()->undefined_value()); 2332 weak_collection->set_next(heap()->undefined_value());
2334 } 2333 }
2335 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2334 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2336 } 2335 }
2337 2336
2338 2337
2339 void MarkCompactCollector::AbortWeakCollections() { 2338 void MarkCompactCollector::AbortWeakCollections() {
2340 GCTracer::Scope gc_scope(heap()->tracer(),
2341 GCTracer::Scope::MC_WEAKCOLLECTION_ABORT);
2342 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2339 Object* weak_collection_obj = heap()->encountered_weak_collections();
2343 while (weak_collection_obj != Smi::FromInt(0)) { 2340 while (weak_collection_obj != Smi::FromInt(0)) {
2344 JSWeakCollection* weak_collection = 2341 JSWeakCollection* weak_collection =
2345 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2342 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2346 weak_collection_obj = weak_collection->next(); 2343 weak_collection_obj = weak_collection->next();
2347 weak_collection->set_next(heap()->undefined_value()); 2344 weak_collection->set_next(heap()->undefined_value());
2348 } 2345 }
2349 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2346 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2350 } 2347 }
2351 2348
2352 2349
2353 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, 2350 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
2354 DependentCode** dependent_code_list) { 2351 DependentCode** dependent_code_list) {
2355 Heap* heap = this->heap(); 2352 Heap* heap = this->heap();
2356 GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL); 2353 GCTracer::Scope gc_scope(heap->tracer(),
2354 GCTracer::Scope::MC_CLEAR_WEAK_CELLS);
2357 Object* weak_cell_obj = heap->encountered_weak_cells(); 2355 Object* weak_cell_obj = heap->encountered_weak_cells();
2358 Object* the_hole_value = heap->the_hole_value(); 2356 Object* the_hole_value = heap->the_hole_value();
2359 DependentCode* dependent_code_head = 2357 DependentCode* dependent_code_head =
2360 DependentCode::cast(heap->empty_fixed_array()); 2358 DependentCode::cast(heap->empty_fixed_array());
2361 Object* non_live_map_head = Smi::FromInt(0); 2359 Object* non_live_map_head = Smi::FromInt(0);
2362 while (weak_cell_obj != Smi::FromInt(0)) { 2360 while (weak_cell_obj != Smi::FromInt(0)) {
2363 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2361 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2364 Object* next_weak_cell = weak_cell->next(); 2362 Object* next_weak_cell = weak_cell->next();
2365 bool clear_value = true; 2363 bool clear_value = true;
2366 bool clear_next = true; 2364 bool clear_next = true;
(...skipping 1075 matching lines...) Expand 10 before | Expand all | Expand 10 after
3442 default: 3440 default:
3443 UNREACHABLE(); 3441 UNREACHABLE();
3444 break; 3442 break;
3445 } 3443 }
3446 } 3444 }
3447 } 3445 }
3448 } 3446 }
3449 3447
3450 3448
3451 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3449 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3450 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
3452 Heap::RelocationLock relocation_lock(heap()); 3451 Heap::RelocationLock relocation_lock(heap());
3453 3452
3454 { 3453 {
3455 GCTracer::Scope gc_scope(heap()->tracer(), 3454 GCTracer::Scope gc_scope(heap()->tracer(),
3456 GCTracer::Scope::MC_SWEEP_NEWSPACE); 3455 GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
3457 EvacuationScope evacuation_scope(this); 3456 EvacuationScope evacuation_scope(this);
3458 EvacuateNewSpace(); 3457 EvacuateNewSpace();
3459 } 3458 }
3460 3459
3461 { 3460 {
3462 GCTracer::Scope gc_scope(heap()->tracer(), 3461 GCTracer::Scope gc_scope(heap()->tracer(),
3463 GCTracer::Scope::MC_EVACUATE_PAGES); 3462 GCTracer::Scope::MC_EVACUATE_CANDIDATES);
3464 EvacuationScope evacuation_scope(this); 3463 EvacuationScope evacuation_scope(this);
3465 EvacuatePagesInParallel(); 3464 EvacuatePagesInParallel();
3466 } 3465 }
3467 3466
3467 UpdatePointersAfterEvacuation();
3468
3468 { 3469 {
3469 GCTracer::Scope gc_scope(heap()->tracer(), 3470 GCTracer::Scope gc_scope(heap()->tracer(),
3470 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3471 GCTracer::Scope::MC_EVACUATE_CLEAN_UP);
3472 // After updating all pointers, we can finally sweep the aborted pages,
3473 // effectively overriding any forward pointers.
3474 SweepAbortedPages();
3475
3476 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3477 // ArrayBuffers either re-registers them as live or promotes them. This is
3478 // needed to properly free them.
3479 heap()->array_buffer_tracker()->FreeDead(false);
3480
3481 // Deallocate evacuated candidate pages.
3482 ReleaseEvacuationCandidates();
3483 }
3484
3485 #ifdef VERIFY_HEAP
3486 if (FLAG_verify_heap && !sweeping_in_progress_) {
3487 VerifyEvacuation(heap());
3488 }
3489 #endif
3490 }
3491
3492
3493 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3494 GCTracer::Scope gc_scope(heap()->tracer(),
3495 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3496 {
3497 GCTracer::Scope gc_scope(
3498 heap()->tracer(),
3499 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3471 UpdateSlotsRecordedIn(migration_slots_buffer_); 3500 UpdateSlotsRecordedIn(migration_slots_buffer_);
3472 if (FLAG_trace_fragmentation_verbose) { 3501 if (FLAG_trace_fragmentation_verbose) {
3473 PrintF(" migration slots buffer: %d\n", 3502 PrintF(" migration slots buffer: %d\n",
3474 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3503 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3475 } 3504 }
3476 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_); 3505 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3477 DCHECK(migration_slots_buffer_ == NULL); 3506 DCHECK(migration_slots_buffer_ == NULL);
3478 3507
3479 // TODO(hpayer): Process the slots buffers in parallel. This has to be done 3508 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3480 // after evacuation of all pages finishes. 3509 // after evacuation of all pages finishes.
3481 int buffers = evacuation_slots_buffers_.length(); 3510 int buffers = evacuation_slots_buffers_.length();
3482 for (int i = 0; i < buffers; i++) { 3511 for (int i = 0; i < buffers; i++) {
3483 SlotsBuffer* buffer = evacuation_slots_buffers_[i]; 3512 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3484 UpdateSlotsRecordedIn(buffer); 3513 UpdateSlotsRecordedIn(buffer);
3485 slots_buffer_allocator_->DeallocateChain(&buffer); 3514 slots_buffer_allocator_->DeallocateChain(&buffer);
3486 } 3515 }
3487 evacuation_slots_buffers_.Rewind(0); 3516 evacuation_slots_buffers_.Rewind(0);
3488 } 3517 }
3489 3518
3490 // Second pass: find pointers to new space and update them. 3519 // Second pass: find pointers to new space and update them.
3491 PointersUpdatingVisitor updating_visitor(heap()); 3520 PointersUpdatingVisitor updating_visitor(heap());
3492 3521
3493 { 3522 {
3494 GCTracer::Scope gc_scope(heap()->tracer(), 3523 GCTracer::Scope gc_scope(
3495 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); 3524 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3496 // Update pointers in to space. 3525 // Update pointers in to space.
3497 SemiSpaceIterator to_it(heap()->new_space()); 3526 SemiSpaceIterator to_it(heap()->new_space());
3498 for (HeapObject* object = to_it.Next(); object != NULL; 3527 for (HeapObject* object = to_it.Next(); object != NULL;
3499 object = to_it.Next()) { 3528 object = to_it.Next()) {
3500 Map* map = object->map(); 3529 Map* map = object->map();
3501 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3530 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3502 &updating_visitor); 3531 &updating_visitor);
3503 } 3532 }
3504 }
3505
3506 {
3507 GCTracer::Scope gc_scope(heap()->tracer(),
3508 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
3509 // Update roots. 3533 // Update roots.
3510 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3534 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3511 }
3512 3535
3513 {
3514 GCTracer::Scope gc_scope(heap()->tracer(),
3515 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3516 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), 3536 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3517 &Heap::ScavengeStoreBufferCallback); 3537 &Heap::ScavengeStoreBufferCallback);
3518 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 3538 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
3519 } 3539 }
3520 3540
3521 int npages = evacuation_candidates_.length(); 3541 int npages = evacuation_candidates_.length();
3522 { 3542 {
3523 GCTracer::Scope gc_scope( 3543 GCTracer::Scope gc_scope(
3524 heap()->tracer(), 3544 heap()->tracer(),
3525 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3545 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3526 for (int i = 0; i < npages; i++) { 3546 for (int i = 0; i < npages; i++) {
3527 Page* p = evacuation_candidates_[i]; 3547 Page* p = evacuation_candidates_[i];
3528 DCHECK(p->IsEvacuationCandidate() || 3548 DCHECK(p->IsEvacuationCandidate() ||
3529 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3549 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3530 3550
3531 if (p->IsEvacuationCandidate()) { 3551 if (p->IsEvacuationCandidate()) {
3532 UpdateSlotsRecordedIn(p->slots_buffer()); 3552 UpdateSlotsRecordedIn(p->slots_buffer());
3533 if (FLAG_trace_fragmentation_verbose) { 3553 if (FLAG_trace_fragmentation_verbose) {
3534 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), 3554 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3535 SlotsBuffer::SizeOfChain(p->slots_buffer())); 3555 SlotsBuffer::SizeOfChain(p->slots_buffer()));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 default: 3597 default:
3578 UNREACHABLE(); 3598 UNREACHABLE();
3579 break; 3599 break;
3580 } 3600 }
3581 } 3601 }
3582 } 3602 }
3583 } 3603 }
3584 3604
3585 { 3605 {
3586 GCTracer::Scope gc_scope(heap()->tracer(), 3606 GCTracer::Scope gc_scope(heap()->tracer(),
3587 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); 3607 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK);
3588 heap_->string_table()->Iterate(&updating_visitor); 3608 heap_->string_table()->Iterate(&updating_visitor);
3589 3609
3590 // Update pointers from external string table. 3610 // Update pointers from external string table.
3591 heap_->UpdateReferencesInExternalStringTable( 3611 heap_->UpdateReferencesInExternalStringTable(
3592 &UpdateReferenceInExternalStringTableEntry); 3612 &UpdateReferenceInExternalStringTableEntry);
3593 3613
3594 EvacuationWeakObjectRetainer evacuation_object_retainer; 3614 EvacuationWeakObjectRetainer evacuation_object_retainer;
3595 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3615 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3596 } 3616 }
3597
3598 {
3599 GCTracer::Scope gc_scope(heap()->tracer(),
3600 GCTracer::Scope::MC_SWEEP_ABORTED);
3601 // After updating all pointers, we can finally sweep the aborted pages,
3602 // effectively overriding any forward pointers.
3603 SweepAbortedPages();
3604 }
3605
3606 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3607
3608 // The hashing of weak_object_to_code_table is no longer valid.
3609 heap()->weak_object_to_code_table()->Rehash(
3610 heap()->isolate()->factory()->undefined_value());
3611 } 3617 }
3612 3618
3613 3619
3614 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { 3620 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() {
3615 int npages = evacuation_candidates_.length(); 3621 int npages = evacuation_candidates_.length();
3616 for (int i = 0; i < npages; i++) { 3622 for (int i = 0; i < npages; i++) {
3617 Page* p = evacuation_candidates_[i]; 3623 Page* p = evacuation_candidates_[i];
3618 if (!p->IsEvacuationCandidate()) continue; 3624 if (!p->IsEvacuationCandidate()) continue;
3619 p->Unlink(); 3625 p->Unlink();
3620 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3626 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after
3827 3833
3828 #ifdef DEBUG 3834 #ifdef DEBUG
3829 state_ = SWEEP_SPACES; 3835 state_ = SWEEP_SPACES;
3830 #endif 3836 #endif
3831 3837
3832 MoveEvacuationCandidatesToEndOfPagesList(); 3838 MoveEvacuationCandidatesToEndOfPagesList();
3833 3839
3834 { 3840 {
3835 { 3841 {
3836 GCTracer::Scope sweep_scope(heap()->tracer(), 3842 GCTracer::Scope sweep_scope(heap()->tracer(),
3837 GCTracer::Scope::MC_SWEEP_OLDSPACE); 3843 GCTracer::Scope::MC_SWEEP_OLD);
3838 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); 3844 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
3839 } 3845 }
3840 { 3846 {
3841 GCTracer::Scope sweep_scope(heap()->tracer(), 3847 GCTracer::Scope sweep_scope(heap()->tracer(),
3842 GCTracer::Scope::MC_SWEEP_CODE); 3848 GCTracer::Scope::MC_SWEEP_CODE);
3843 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); 3849 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
3844 } 3850 }
3845 { 3851 {
3846 GCTracer::Scope sweep_scope(heap()->tracer(), 3852 GCTracer::Scope sweep_scope(heap()->tracer(),
3847 GCTracer::Scope::MC_SWEEP_MAP); 3853 GCTracer::Scope::MC_SWEEP_MAP);
3848 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING); 3854 SweepSpace(heap()->map_space(), CONCURRENT_SWEEPING);
3849 } 3855 }
3850 sweeping_in_progress_ = true; 3856 sweeping_in_progress_ = true;
3851 if (heap()->concurrent_sweeping_enabled()) { 3857 if (heap()->concurrent_sweeping_enabled()) {
3852 StartSweeperThreads(); 3858 StartSweeperThreads();
3853 } 3859 }
3854 } 3860 }
3855 3861
3856 // Deallocate unmarked large objects. 3862 // Deallocate unmarked large objects.
3857 heap_->lo_space()->FreeUnmarkedObjects(); 3863 heap_->lo_space()->FreeUnmarkedObjects();
3858 3864
3859 // Give pages that are queued to be freed back to the OS. Invalid store 3865 // Give pages that are queued to be freed back to the OS. Invalid store
3860 // buffer entries are already filter out. We can just release the memory. 3866 // buffer entries are already filter out. We can just release the memory.
3861 heap()->FreeQueuedChunks(); 3867 heap()->FreeQueuedChunks();
3862 3868
3863 EvacuateNewSpaceAndCandidates();
3864
3865 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3866 // ArrayBuffers either re-registers them as live or promotes them. This is
3867 // needed to properly free them.
3868 heap()->array_buffer_tracker()->FreeDead(false);
3869
3870 // Clear the marking state of live large objects.
3871 heap_->lo_space()->ClearMarkingStateOfLiveObjects();
3872
3873 // Deallocate evacuated candidate pages.
3874 ReleaseEvacuationCandidates();
3875
3876 if (FLAG_print_cumulative_gc_stat) { 3869 if (FLAG_print_cumulative_gc_stat) {
3877 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() - 3870 heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() -
3878 start_time); 3871 start_time);
3879 } 3872 }
3880
3881 #ifdef VERIFY_HEAP
3882 if (FLAG_verify_heap && !sweeping_in_progress_) {
3883 VerifyEvacuation(heap());
3884 }
3885 #endif
3886 } 3873 }
3887 3874
3888 3875
3889 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) { 3876 void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
3890 PageIterator it(space); 3877 PageIterator it(space);
3891 while (it.has_next()) { 3878 while (it.has_next()) {
3892 Page* p = it.next(); 3879 Page* p = it.next();
3893 if (p->parallel_sweeping_state().Value() == 3880 if (p->parallel_sweeping_state().Value() ==
3894 MemoryChunk::kSweepingFinalize) { 3881 MemoryChunk::kSweepingFinalize) {
3895 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone); 3882 p->parallel_sweeping_state().SetValue(MemoryChunk::kSweepingDone);
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
3974 MarkBit mark_bit = Marking::MarkBitFrom(host); 3961 MarkBit mark_bit = Marking::MarkBitFrom(host);
3975 if (Marking::IsBlack(mark_bit)) { 3962 if (Marking::IsBlack(mark_bit)) {
3976 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3963 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3977 RecordRelocSlot(&rinfo, target); 3964 RecordRelocSlot(&rinfo, target);
3978 } 3965 }
3979 } 3966 }
3980 } 3967 }
3981 3968
3982 } // namespace internal 3969 } // namespace internal
3983 } // namespace v8 3970 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | tools/eval_gc_time.sh » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698