OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1249 GCType gc_type = | 1249 GCType gc_type = |
1250 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1250 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
1251 | 1251 |
1252 { | 1252 { |
1253 GCCallbacksScope scope(this); | 1253 GCCallbacksScope scope(this); |
1254 if (scope.CheckReenter()) { | 1254 if (scope.CheckReenter()) { |
1255 AllowHeapAllocation allow_allocation; | 1255 AllowHeapAllocation allow_allocation; |
1256 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1256 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1257 VMState<EXTERNAL> state(isolate_); | 1257 VMState<EXTERNAL> state(isolate_); |
1258 HandleScope handle_scope(isolate_); | 1258 HandleScope handle_scope(isolate_); |
| 1259 // if (!(FLAG_scavenge_reclaim_unmodified_objects && |
| 1260 // (gc_type == kGCTypeScavenge))) { |
1259 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | 1261 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); |
| 1262 //} |
1260 } | 1263 } |
1261 } | 1264 } |
1262 | 1265 |
1263 EnsureFromSpaceIsCommitted(); | 1266 EnsureFromSpaceIsCommitted(); |
1264 | 1267 |
1265 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1268 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
1266 | 1269 |
1267 if (IsHighSurvivalRate()) { | 1270 if (IsHighSurvivalRate()) { |
1268 // We speed up the incremental marker if it is running so that it | 1271 // We speed up the incremental marker if it is running so that it |
1269 // does not fall behind the rate of promotion, which would cause a | 1272 // does not fall behind the rate of promotion, which would cause a |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1334 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1337 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
1335 } | 1338 } |
1336 | 1339 |
1337 { | 1340 { |
1338 GCCallbacksScope scope(this); | 1341 GCCallbacksScope scope(this); |
1339 if (scope.CheckReenter()) { | 1342 if (scope.CheckReenter()) { |
1340 AllowHeapAllocation allow_allocation; | 1343 AllowHeapAllocation allow_allocation; |
1341 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1344 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1342 VMState<EXTERNAL> state(isolate_); | 1345 VMState<EXTERNAL> state(isolate_); |
1343 HandleScope handle_scope(isolate_); | 1346 HandleScope handle_scope(isolate_); |
| 1347 // if (!(FLAG_scavenge_reclaim_unmodified_objects && |
| 1348 // (gc_type == kGCTypeScavenge))) { |
1344 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | 1349 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
| 1350 // } |
1345 } | 1351 } |
1346 } | 1352 } |
1347 | 1353 |
1348 #ifdef VERIFY_HEAP | 1354 #ifdef VERIFY_HEAP |
1349 if (FLAG_verify_heap) { | 1355 if (FLAG_verify_heap) { |
1350 VerifyStringTable(this); | 1356 VerifyStringTable(this); |
1351 } | 1357 } |
1352 #endif | 1358 #endif |
1353 | 1359 |
1354 return freed_global_handles > 0; | 1360 return freed_global_handles > 0; |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1494 } | 1500 } |
1495 } | 1501 } |
1496 | 1502 |
1497 | 1503 |
1498 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1504 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
1499 return heap->InNewSpace(*p) && | 1505 return heap->InNewSpace(*p) && |
1500 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1506 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
1501 } | 1507 } |
1502 | 1508 |
1503 | 1509 |
| 1510 static bool IsUnmodifiedHeapObject(Object** p) { |
| 1511 Object* object = *p; |
| 1512 DCHECK(object->IsHeapObject()); |
| 1513 HeapObject* heap_object = HeapObject::cast(object); |
| 1514 if (!object->IsJSObject()) return false; |
| 1515 Object* obj_constructor = (JSObject::cast(object))->map()->GetConstructor(); |
| 1516 if (!obj_constructor->IsJSFunction()) return false; |
| 1517 JSFunction* constructor = JSFunction::cast(obj_constructor); |
| 1518 if (constructor != nullptr && |
| 1519 constructor->initial_map() == heap_object->map()) { |
| 1520 return true; |
| 1521 } |
| 1522 return false; |
| 1523 } |
| 1524 |
| 1525 |
1504 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, | 1526 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, |
1505 StoreBufferEvent event) { | 1527 StoreBufferEvent event) { |
1506 heap->store_buffer_rebuilder_.Callback(page, event); | 1528 heap->store_buffer_rebuilder_.Callback(page, event); |
1507 } | 1529 } |
1508 | 1530 |
1509 | 1531 |
1510 void PromotionQueue::Initialize() { | 1532 void PromotionQueue::Initialize() { |
1511 // The last to-space page may be used for promotion queue. On promotion | 1533 // The last to-space page may be used for promotion queue. On promotion |
1512 // conflict, we use the emergency stack. | 1534 // conflict, we use the emergency stack. |
1513 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == | 1535 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1612 // updated as a side effect of promoting an object. | 1634 // updated as a side effect of promoting an object. |
1613 // | 1635 // |
1614 // There is guaranteed to be enough room at the top of the to space | 1636 // There is guaranteed to be enough room at the top of the to space |
1615 // for the addresses of promoted objects: every object promoted | 1637 // for the addresses of promoted objects: every object promoted |
1616 // frees up its size in bytes from the top of the new space, and | 1638 // frees up its size in bytes from the top of the new space, and |
1617 // objects are at least one pointer in size. | 1639 // objects are at least one pointer in size. |
1618 Address new_space_front = new_space_.ToSpaceStart(); | 1640 Address new_space_front = new_space_.ToSpaceStart(); |
1619 promotion_queue_.Initialize(); | 1641 promotion_queue_.Initialize(); |
1620 | 1642 |
1621 ScavengeVisitor scavenge_visitor(this); | 1643 ScavengeVisitor scavenge_visitor(this); |
| 1644 |
| 1645 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 1646 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( |
| 1647 &IsUnmodifiedHeapObject); |
| 1648 } |
| 1649 |
1622 { | 1650 { |
1623 // Copy roots. | 1651 // Copy roots. |
1624 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1652 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
1625 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1653 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1626 } | 1654 } |
1627 | 1655 |
1628 { | 1656 { |
1629 // Copy objects reachable from the old generation. | 1657 // Copy objects reachable from the old generation. |
1630 GCTracer::Scope gc_scope(tracer(), | 1658 GCTracer::Scope gc_scope(tracer(), |
1631 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | 1659 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
(...skipping 18 matching lines...) Expand all Loading... |
1650 if (collector->is_code_flushing_enabled()) { | 1678 if (collector->is_code_flushing_enabled()) { |
1651 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1679 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
1652 } | 1680 } |
1653 } | 1681 } |
1654 | 1682 |
1655 { | 1683 { |
1656 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1684 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1657 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1685 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1658 } | 1686 } |
1659 | 1687 |
1660 { | 1688 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 1689 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( |
| 1690 &IsUnscavengedHeapObject); |
| 1691 |
| 1692 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( |
| 1693 &scavenge_visitor); |
| 1694 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1695 } else { |
1661 GCTracer::Scope gc_scope(tracer(), | 1696 GCTracer::Scope gc_scope(tracer(), |
1662 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); | 1697 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); |
1663 while (isolate()->global_handles()->IterateObjectGroups( | 1698 while (isolate()->global_handles()->IterateObjectGroups( |
1664 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1699 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
1665 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1700 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1666 } | 1701 } |
1667 isolate()->global_handles()->RemoveObjectGroups(); | 1702 isolate()->global_handles()->RemoveObjectGroups(); |
1668 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1703 isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1704 |
| 1705 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
| 1706 &IsUnscavengedHeapObject); |
| 1707 |
| 1708 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( |
| 1709 &scavenge_visitor); |
| 1710 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1669 } | 1711 } |
1670 | 1712 |
1671 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | |
1672 &IsUnscavengedHeapObject); | |
1673 | |
1674 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( | |
1675 &scavenge_visitor); | |
1676 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
1677 | |
1678 UpdateNewSpaceReferencesInExternalStringTable( | 1713 UpdateNewSpaceReferencesInExternalStringTable( |
1679 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1714 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1680 | 1715 |
1681 promotion_queue_.Destroy(); | 1716 promotion_queue_.Destroy(); |
1682 | 1717 |
1683 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1718 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
1684 | 1719 |
1685 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1720 ScavengeWeakObjectRetainer weak_object_retainer(this); |
1686 ProcessYoungWeakReferences(&weak_object_retainer); | 1721 ProcessYoungWeakReferences(&weak_object_retainer); |
1687 | 1722 |
(...skipping 4432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6120 } | 6155 } |
6121 | 6156 |
6122 | 6157 |
6123 // static | 6158 // static |
6124 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6159 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6125 return StaticVisitorBase::GetVisitorId(map); | 6160 return StaticVisitorBase::GetVisitorId(map); |
6126 } | 6161 } |
6127 | 6162 |
6128 } // namespace internal | 6163 } // namespace internal |
6129 } // namespace v8 | 6164 } // namespace v8 |
OLD | NEW |