OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1417 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); | 1417 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor); |
1418 while (marking_stack_.overflowed()) { | 1418 while (marking_stack_.overflowed()) { |
1419 RefillMarkingStack(); | 1419 RefillMarkingStack(); |
1420 EmptyMarkingStack(); | 1420 EmptyMarkingStack(); |
1421 } | 1421 } |
1422 | 1422 |
1423 // Repeat host application specific marking to mark unmarked objects | 1423 // Repeat host application specific marking to mark unmarked objects |
1424 // reachable from the weak roots. | 1424 // reachable from the weak roots. |
1425 ProcessExternalMarking(); | 1425 ProcessExternalMarking(); |
1426 | 1426 |
| 1427 // Object literal map caches reference symbols (cache keys) and maps |
| 1428 // (cache values). At this point still useful maps have already been |
| 1429 // marked. Mark the keys for the alive values before we process the |
| 1430 // symbol table. |
| 1431 ProcessMapCaches(); |
| 1432 |
1427 // Prune the symbol table removing all symbols only pointed to by the | 1433 // Prune the symbol table removing all symbols only pointed to by the |
1428 // symbol table. Cannot use symbol_table() here because the symbol | 1434 // symbol table. Cannot use symbol_table() here because the symbol |
1429 // table is marked. | 1435 // table is marked. |
1430 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table(); | 1436 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table(); |
1431 SymbolTableCleaner v(heap()); | 1437 SymbolTableCleaner v(heap()); |
1432 symbol_table->IterateElements(&v); | 1438 symbol_table->IterateElements(&v); |
1433 symbol_table->ElementsRemoved(v.PointersRemoved()); | 1439 symbol_table->ElementsRemoved(v.PointersRemoved()); |
1434 heap()->external_string_table_.Iterate(&v); | 1440 heap()->external_string_table_.Iterate(&v); |
1435 heap()->external_string_table_.CleanUp(); | 1441 heap()->external_string_table_.CleanUp(); |
1436 | 1442 |
1437 // Process the weak references. | 1443 // Process the weak references. |
1438 MarkCompactWeakObjectRetainer mark_compact_object_retainer; | 1444 MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
1439 heap()->ProcessWeakReferences(&mark_compact_object_retainer); | 1445 heap()->ProcessWeakReferences(&mark_compact_object_retainer); |
1440 | 1446 |
1441 // Remove object groups after marking phase. | 1447 // Remove object groups after marking phase. |
1442 heap()->isolate()->global_handles()->RemoveObjectGroups(); | 1448 heap()->isolate()->global_handles()->RemoveObjectGroups(); |
1443 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); | 1449 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
1444 | 1450 |
1445 // Flush code from collected candidates. | 1451 // Flush code from collected candidates. |
1446 if (is_code_flushing_enabled()) { | 1452 if (is_code_flushing_enabled()) { |
1447 code_flusher_->ProcessCandidates(); | 1453 code_flusher_->ProcessCandidates(); |
1448 } | 1454 } |
1449 | 1455 |
1450 // Clean up dead objects from the runtime profiler. | 1456 // Clean up dead objects from the runtime profiler. |
1451 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); | 1457 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); |
1452 } | 1458 } |
1453 | 1459 |
1454 | 1460 |
| 1461 void MarkCompactCollector::ProcessMapCaches() { |
| 1462 Object* raw_context = heap()->global_contexts_list_; |
| 1463 while (raw_context != heap()->undefined_value()) { |
| 1464 Context* context = reinterpret_cast<Context*>(raw_context); |
| 1465 if (context->IsMarked()) { |
| 1466 HeapObject* raw_map_cache = |
| 1467 HeapObject::cast(context->get(Context::MAP_CACHE_INDEX)); |
| 1468 // A map cache may be reachable from the stack. In this case |
| 1469 // it's already transitively marked and it's too late clean up |
| 1470 // its parts. |
| 1471 if (!raw_map_cache->IsMarked() && |
| 1472 raw_map_cache != heap()->undefined_value()) { |
| 1473 MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache); |
| 1474 int existing_elements = map_cache->NumberOfElements(); |
| 1475 int used_elements = 0; |
| 1476 for (int i = MapCache::kElementsStartIndex; |
| 1477 i < map_cache->length(); |
| 1478 i += MapCache::kEntrySize) { |
| 1479 Object* raw_key = map_cache->get(i); |
| 1480 if (raw_key == heap()->undefined_value() || |
| 1481 raw_key == heap()->null_value()) continue; |
| 1482 STATIC_ASSERT(MapCache::kEntrySize == 2); |
| 1483 Object* raw_map = map_cache->get(i + 1); |
| 1484 if (raw_map->IsHeapObject() && |
| 1485 HeapObject::cast(raw_map)->IsMarked()) { |
| 1486 ++used_elements; |
| 1487 } else { |
| 1488 // Delete useless entries with unmarked maps. |
| 1489 ASSERT(raw_map->IsMap()); |
| 1490 map_cache->set_null_unchecked(heap(), i); |
| 1491 map_cache->set_null_unchecked(heap(), i + 1); |
| 1492 } |
| 1493 } |
| 1494 if (used_elements == 0) { |
| 1495 context->set(Context::MAP_CACHE_INDEX, heap()->undefined_value()); |
| 1496 } else { |
| 1497 // Note: we don't actually shrink the cache here to avoid |
| 1498 // extra complexity during GC. We rely on subsequent cache |
| 1499 // usages (EnsureCapacity) to do this. |
| 1500 map_cache->ElementsRemoved(existing_elements - used_elements); |
| 1501 MarkObject(map_cache); |
| 1502 } |
| 1503 } |
| 1504 } |
| 1505 // Move to next element in the list. |
| 1506 raw_context = context->get(Context::NEXT_CONTEXT_LINK); |
| 1507 } |
| 1508 ProcessMarkingStack(); |
| 1509 } |
| 1510 |
| 1511 |
1455 #ifdef DEBUG | 1512 #ifdef DEBUG |
1456 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { | 1513 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { |
1457 live_bytes_ += obj->Size(); | 1514 live_bytes_ += obj->Size(); |
1458 if (heap()->new_space()->Contains(obj)) { | 1515 if (heap()->new_space()->Contains(obj)) { |
1459 live_young_objects_size_ += obj->Size(); | 1516 live_young_objects_size_ += obj->Size(); |
1460 } else if (heap()->map_space()->Contains(obj)) { | 1517 } else if (heap()->map_space()->Contains(obj)) { |
1461 ASSERT(obj->IsMap()); | 1518 ASSERT(obj->IsMap()); |
1462 live_map_objects_size_ += obj->Size(); | 1519 live_map_objects_size_ += obj->Size(); |
1463 } else if (heap()->cell_space()->Contains(obj)) { | 1520 } else if (heap()->cell_space()->Contains(obj)) { |
1464 ASSERT(obj->IsJSGlobalPropertyCell()); | 1521 ASSERT(obj->IsJSGlobalPropertyCell()); |
(...skipping 1672 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3137 } | 3194 } |
3138 | 3195 |
3139 | 3196 |
3140 void MarkCompactCollector::Initialize() { | 3197 void MarkCompactCollector::Initialize() { |
3141 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3198 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
3142 StaticMarkingVisitor::Initialize(); | 3199 StaticMarkingVisitor::Initialize(); |
3143 } | 3200 } |
3144 | 3201 |
3145 | 3202 |
3146 } } // namespace v8::internal | 3203 } } // namespace v8::internal |
OLD | NEW |