OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
294 | 294 |
295 | 295 |
296 void MarkCompactCollector::CollectGarbage() { | 296 void MarkCompactCollector::CollectGarbage() { |
297 // Make sure that Prepare() has been called. The individual steps below will | 297 // Make sure that Prepare() has been called. The individual steps below will |
298 // update the state as they proceed. | 298 // update the state as they proceed. |
299 DCHECK(state_ == PREPARE_GC); | 299 DCHECK(state_ == PREPARE_GC); |
300 | 300 |
301 MarkLiveObjects(); | 301 MarkLiveObjects(); |
302 DCHECK(heap_->incremental_marking()->IsStopped()); | 302 DCHECK(heap_->incremental_marking()->IsStopped()); |
303 | 303 |
| 304 // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
| 305 // Process weak cells before so that weak cells in dependent code |
| 306 // arrays are cleared or contain only live code objects. |
| 307 ProcessAndClearWeakCells(); |
| 308 |
304 if (FLAG_collect_maps) ClearNonLiveReferences(); | 309 if (FLAG_collect_maps) ClearNonLiveReferences(); |
305 | 310 |
306 ProcessAndClearWeakCells(); | |
307 | 311 |
308 ClearWeakCollections(); | 312 ClearWeakCollections(); |
309 | 313 |
310 heap_->set_encountered_weak_cells(Smi::FromInt(0)); | 314 heap_->set_encountered_weak_cells(Smi::FromInt(0)); |
311 | 315 |
312 isolate()->global_handles()->CollectPhantomCallbackData(); | 316 isolate()->global_handles()->CollectPhantomCallbackData(); |
313 | 317 |
314 #ifdef VERIFY_HEAP | 318 #ifdef VERIFY_HEAP |
315 if (FLAG_verify_heap) { | 319 if (FLAG_verify_heap) { |
316 VerifyMarking(heap_); | 320 VerifyMarking(heap_); |
(...skipping 1635 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1952 | 1956 |
1953 | 1957 |
1954 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1958 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
1955 // Mark the heap roots including global variables, stack variables, | 1959 // Mark the heap roots including global variables, stack variables, |
1956 // etc., and all objects reachable from them. | 1960 // etc., and all objects reachable from them. |
1957 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1961 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
1958 | 1962 |
1959 // Handle the string table specially. | 1963 // Handle the string table specially. |
1960 MarkStringTable(visitor); | 1964 MarkStringTable(visitor); |
1961 | 1965 |
1962 MarkWeakObjectToCodeTable(); | 1966 MarkWeakObjectToCodeTable(visitor); |
1963 | 1967 |
1964 // There may be overflowed objects in the heap. Visit them now. | 1968 // There may be overflowed objects in the heap. Visit them now. |
1965 while (marking_deque_.overflowed()) { | 1969 while (marking_deque_.overflowed()) { |
1966 RefillMarkingDeque(); | 1970 RefillMarkingDeque(); |
1967 EmptyMarkingDeque(); | 1971 EmptyMarkingDeque(); |
1968 } | 1972 } |
1969 } | 1973 } |
1970 | 1974 |
1971 | 1975 |
1972 void MarkCompactCollector::MarkImplicitRefGroups() { | 1976 void MarkCompactCollector::MarkImplicitRefGroups() { |
(...skipping 21 matching lines...) Expand all Loading... |
1994 } | 1998 } |
1995 | 1999 |
1996 // Once the entire group has been marked, dispose it because it's | 2000 // Once the entire group has been marked, dispose it because it's |
1997 // not needed anymore. | 2001 // not needed anymore. |
1998 delete entry; | 2002 delete entry; |
1999 } | 2003 } |
2000 ref_groups->Rewind(last); | 2004 ref_groups->Rewind(last); |
2001 } | 2005 } |
2002 | 2006 |
2003 | 2007 |
2004 void MarkCompactCollector::MarkWeakObjectToCodeTable() { | 2008 void MarkCompactCollector::MarkWeakObjectToCodeTable(ObjectVisitor* visitor) { |
2005 HeapObject* weak_object_to_code_table = | 2009 HeapObject* weak_object_to_code_table = |
2006 HeapObject::cast(heap()->weak_object_to_code_table()); | 2010 HeapObject::cast(heap()->weak_object_to_code_table()); |
2007 if (!IsMarked(weak_object_to_code_table)) { | 2011 if (!IsMarked(weak_object_to_code_table)) { |
2008 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table); | 2012 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table); |
2009 SetMark(weak_object_to_code_table, mark); | 2013 SetMark(weak_object_to_code_table, mark); |
| 2014 if (weak_object_to_code_table->IsHashTable()) { |
| 2015 // Values of the table are strong references to dependent code. Mark them. |
| 2016 WeakHashTable* table = WeakHashTable::cast(weak_object_to_code_table); |
| 2017 uint32_t capacity = table->Capacity(); |
| 2018 for (uint32_t i = 0; i < capacity; i++) { |
| 2019 uint32_t value_index = table->EntryToValueIndex(i); |
| 2020 visitor->VisitPointer(table->RawFieldOfElementAt(value_index)); |
| 2021 } |
| 2022 } |
2010 } | 2023 } |
2011 } | 2024 } |
2012 | 2025 |
2013 | 2026 |
2014 // Mark all objects reachable from the objects on the marking stack. | 2027 // Mark all objects reachable from the objects on the marking stack. |
2015 // Before: the marking stack contains zero or more heap object pointers. | 2028 // Before: the marking stack contains zero or more heap object pointers. |
2016 // After: the marking stack is empty, and all objects reachable from the | 2029 // After: the marking stack is empty, and all objects reachable from the |
2017 // marking stack have been marked, or are overflowed in the heap. | 2030 // marking stack have been marked, or are overflowed in the heap. |
2018 void MarkCompactCollector::EmptyMarkingDeque() { | 2031 void MarkCompactCollector::EmptyMarkingDeque() { |
2019 Map* filler_map = heap_->one_pointer_filler_map(); | 2032 Map* filler_map = heap_->one_pointer_filler_map(); |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2306 for (HeapObject* obj = map_iterator.Next(); obj != NULL; | 2319 for (HeapObject* obj = map_iterator.Next(); obj != NULL; |
2307 obj = map_iterator.Next()) { | 2320 obj = map_iterator.Next()) { |
2308 Map* map = Map::cast(obj); | 2321 Map* map = Map::cast(obj); |
2309 | 2322 |
2310 if (!map->CanTransition()) continue; | 2323 if (!map->CanTransition()) continue; |
2311 | 2324 |
2312 MarkBit map_mark = Marking::MarkBitFrom(map); | 2325 MarkBit map_mark = Marking::MarkBitFrom(map); |
2313 ClearNonLivePrototypeTransitions(map); | 2326 ClearNonLivePrototypeTransitions(map); |
2314 ClearNonLiveMapTransitions(map, map_mark); | 2327 ClearNonLiveMapTransitions(map, map_mark); |
2315 | 2328 |
2316 if (map_mark.Get()) { | 2329 if (!map_mark.Get()) { |
2317 ClearNonLiveDependentCode(map->dependent_code()); | 2330 have_code_to_deoptimize_ |= |
2318 } else { | 2331 map->dependent_code()->MarkCodeForDeoptimization( |
2319 ClearDependentCode(map->dependent_code()); | 2332 isolate(), DependentCode::kWeakCodeGroup); |
2320 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); | 2333 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
2321 } | 2334 } |
2322 } | 2335 } |
2323 | 2336 |
2324 // Iterate over property cell space, removing dependent code that is not | |
2325 // otherwise kept alive by strong references. | |
2326 HeapObjectIterator cell_iterator(heap_->property_cell_space()); | |
2327 for (HeapObject* cell = cell_iterator.Next(); cell != NULL; | |
2328 cell = cell_iterator.Next()) { | |
2329 if (IsMarked(cell)) { | |
2330 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); | |
2331 } | |
2332 } | |
2333 | |
2334 // Iterate over allocation sites, removing dependent code that is not | |
2335 // otherwise kept alive by strong references. | |
2336 Object* undefined = heap()->undefined_value(); | |
2337 for (Object* site = heap()->allocation_sites_list(); site != undefined; | |
2338 site = AllocationSite::cast(site)->weak_next()) { | |
2339 if (IsMarked(site)) { | |
2340 ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code()); | |
2341 } | |
2342 } | |
2343 | |
2344 if (heap_->weak_object_to_code_table()->IsHashTable()) { | 2337 if (heap_->weak_object_to_code_table()->IsHashTable()) { |
2345 WeakHashTable* table = | 2338 WeakHashTable* table = |
2346 WeakHashTable::cast(heap_->weak_object_to_code_table()); | 2339 WeakHashTable::cast(heap_->weak_object_to_code_table()); |
2347 uint32_t capacity = table->Capacity(); | 2340 uint32_t capacity = table->Capacity(); |
2348 for (uint32_t i = 0; i < capacity; i++) { | 2341 for (uint32_t i = 0; i < capacity; i++) { |
2349 uint32_t key_index = table->EntryToIndex(i); | 2342 uint32_t key_index = table->EntryToIndex(i); |
2350 Object* key = table->get(key_index); | 2343 Object* key = table->get(key_index); |
2351 if (!table->IsKey(key)) continue; | 2344 if (!table->IsKey(key)) continue; |
2352 uint32_t value_index = table->EntryToValueIndex(i); | 2345 uint32_t value_index = table->EntryToValueIndex(i); |
2353 Object* value = table->get(value_index); | 2346 Object* value = table->get(value_index); |
2354 if (key->IsCell() && !IsMarked(key)) { | 2347 if (key->IsCell() && !IsMarked(key)) { |
2355 Cell* cell = Cell::cast(key); | 2348 Cell* cell = Cell::cast(key); |
2356 Object* object = cell->value(); | 2349 Object* object = cell->value(); |
2357 if (IsMarked(object)) { | 2350 if (IsMarked(object)) { |
2358 MarkBit mark = Marking::MarkBitFrom(cell); | 2351 MarkBit mark = Marking::MarkBitFrom(cell); |
2359 SetMark(cell, mark); | 2352 SetMark(cell, mark); |
2360 Object** value_slot = HeapObject::RawField(cell, Cell::kValueOffset); | 2353 Object** value_slot = HeapObject::RawField(cell, Cell::kValueOffset); |
2361 RecordSlot(value_slot, value_slot, *value_slot); | 2354 RecordSlot(value_slot, value_slot, *value_slot); |
2362 } | 2355 } |
2363 } | 2356 } |
2364 if (IsMarked(key)) { | 2357 if (IsMarked(key)) { |
2365 if (!IsMarked(value)) { | 2358 if (!IsMarked(value)) { |
2366 HeapObject* obj = HeapObject::cast(value); | 2359 HeapObject* obj = HeapObject::cast(value); |
2367 MarkBit mark = Marking::MarkBitFrom(obj); | 2360 MarkBit mark = Marking::MarkBitFrom(obj); |
2368 SetMark(obj, mark); | 2361 SetMark(obj, mark); |
2369 } | 2362 } |
2370 ClearNonLiveDependentCode(DependentCode::cast(value)); | |
2371 } else { | 2363 } else { |
2372 ClearDependentCode(DependentCode::cast(value)); | 2364 have_code_to_deoptimize_ |= |
| 2365 DependentCode::cast(value)->MarkCodeForDeoptimization( |
| 2366 isolate(), DependentCode::kWeakCodeGroup); |
2373 table->set(key_index, heap_->the_hole_value()); | 2367 table->set(key_index, heap_->the_hole_value()); |
2374 table->set(value_index, heap_->the_hole_value()); | 2368 table->set(value_index, heap_->the_hole_value()); |
2375 table->ElementRemoved(); | 2369 table->ElementRemoved(); |
2376 } | 2370 } |
2377 } | 2371 } |
2378 } | 2372 } |
2379 } | 2373 } |
2380 | 2374 |
2381 | 2375 |
2382 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { | 2376 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2538 if (to_trim <= 0) return; | 2532 if (to_trim <= 0) return; |
2539 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(), | 2533 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(), |
2540 to_trim); | 2534 to_trim); |
2541 | 2535 |
2542 if (!descriptors->HasEnumIndicesCache()) return; | 2536 if (!descriptors->HasEnumIndicesCache()) return; |
2543 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); | 2537 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); |
2544 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim); | 2538 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim); |
2545 } | 2539 } |
2546 | 2540 |
2547 | 2541 |
2548 void MarkCompactCollector::ClearDependentCode(DependentCode* entries) { | |
2549 DisallowHeapAllocation no_allocation; | |
2550 DependentCode::GroupStartIndexes starts(entries); | |
2551 int number_of_entries = starts.number_of_entries(); | |
2552 if (number_of_entries == 0) return; | |
2553 int g = DependentCode::kWeakCodeGroup; | |
2554 for (int i = starts.at(g); i < starts.at(g + 1); i++) { | |
2555 // If the entry is compilation info then the map must be alive, | |
2556 // and ClearDependentCode shouldn't be called. | |
2557 DCHECK(entries->is_code_at(i)); | |
2558 Code* code = entries->code_at(i); | |
2559 if (IsMarked(code) && !code->marked_for_deoptimization()) { | |
2560 DependentCode::SetMarkedForDeoptimization( | |
2561 code, static_cast<DependentCode::DependencyGroup>(g)); | |
2562 code->InvalidateEmbeddedObjects(); | |
2563 have_code_to_deoptimize_ = true; | |
2564 } | |
2565 } | |
2566 for (int i = 0; i < number_of_entries; i++) { | |
2567 entries->clear_at(i); | |
2568 } | |
2569 } | |
2570 | |
2571 | |
2572 int MarkCompactCollector::ClearNonLiveDependentCodeInGroup( | |
2573 DependentCode* entries, int group, int start, int end, int new_start) { | |
2574 int survived = 0; | |
2575 for (int i = start; i < end; i++) { | |
2576 Object* obj = entries->object_at(i); | |
2577 DCHECK(obj->IsCode() || IsMarked(obj)); | |
2578 if (IsMarked(obj) && | |
2579 (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) { | |
2580 if (new_start + survived != i) { | |
2581 entries->set_object_at(new_start + survived, obj); | |
2582 } | |
2583 Object** slot = entries->slot_at(new_start + survived); | |
2584 RecordSlot(slot, slot, obj); | |
2585 survived++; | |
2586 } | |
2587 } | |
2588 entries->set_number_of_entries( | |
2589 static_cast<DependentCode::DependencyGroup>(group), survived); | |
2590 return survived; | |
2591 } | |
2592 | |
2593 | |
2594 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { | |
2595 DisallowHeapAllocation no_allocation; | |
2596 DependentCode::GroupStartIndexes starts(entries); | |
2597 int number_of_entries = starts.number_of_entries(); | |
2598 if (number_of_entries == 0) return; | |
2599 int new_number_of_entries = 0; | |
2600 // Go through all groups, remove dead codes and compact. | |
2601 for (int g = 0; g < DependentCode::kGroupCount; g++) { | |
2602 int survived = ClearNonLiveDependentCodeInGroup( | |
2603 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries); | |
2604 new_number_of_entries += survived; | |
2605 } | |
2606 for (int i = new_number_of_entries; i < number_of_entries; i++) { | |
2607 entries->clear_at(i); | |
2608 } | |
2609 } | |
2610 | |
2611 | |
2612 void MarkCompactCollector::ProcessWeakCollections() { | 2542 void MarkCompactCollector::ProcessWeakCollections() { |
2613 GCTracer::Scope gc_scope(heap()->tracer(), | 2543 GCTracer::Scope gc_scope(heap()->tracer(), |
2614 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); | 2544 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); |
2615 Object* weak_collection_obj = heap()->encountered_weak_collections(); | 2545 Object* weak_collection_obj = heap()->encountered_weak_collections(); |
2616 while (weak_collection_obj != Smi::FromInt(0)) { | 2546 while (weak_collection_obj != Smi::FromInt(0)) { |
2617 JSWeakCollection* weak_collection = | 2547 JSWeakCollection* weak_collection = |
2618 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2548 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
2619 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); | 2549 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); |
2620 if (weak_collection->table()->IsHashTable()) { | 2550 if (weak_collection->table()->IsHashTable()) { |
2621 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2551 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
(...skipping 1781 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4403 SlotsBuffer* buffer = *buffer_address; | 4333 SlotsBuffer* buffer = *buffer_address; |
4404 while (buffer != NULL) { | 4334 while (buffer != NULL) { |
4405 SlotsBuffer* next_buffer = buffer->next(); | 4335 SlotsBuffer* next_buffer = buffer->next(); |
4406 DeallocateBuffer(buffer); | 4336 DeallocateBuffer(buffer); |
4407 buffer = next_buffer; | 4337 buffer = next_buffer; |
4408 } | 4338 } |
4409 *buffer_address = NULL; | 4339 *buffer_address = NULL; |
4410 } | 4340 } |
4411 } | 4341 } |
4412 } // namespace v8::internal | 4342 } // namespace v8::internal |
OLD | NEW |