OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1233 GCType gc_type = | 1233 GCType gc_type = |
1234 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1234 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
1235 | 1235 |
1236 { | 1236 { |
1237 GCCallbacksScope scope(this); | 1237 GCCallbacksScope scope(this); |
1238 if (scope.CheckReenter()) { | 1238 if (scope.CheckReenter()) { |
1239 AllowHeapAllocation allow_allocation; | 1239 AllowHeapAllocation allow_allocation; |
1240 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1240 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1241 VMState<EXTERNAL> state(isolate_); | 1241 VMState<EXTERNAL> state(isolate_); |
1242 HandleScope handle_scope(isolate_); | 1242 HandleScope handle_scope(isolate_); |
1243 if (!(FLAG_scavenge_reclaim_unmodified_objects && | 1243 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); |
1244 (gc_type == kGCTypeScavenge))) { | |
1245 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | |
1246 } | |
1247 } | 1244 } |
1248 } | 1245 } |
1249 | 1246 |
1250 EnsureFromSpaceIsCommitted(); | 1247 EnsureFromSpaceIsCommitted(); |
1251 | 1248 |
1252 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1249 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
1253 | 1250 |
1254 if (IsHighSurvivalRate()) { | 1251 if (IsHighSurvivalRate()) { |
1255 // We speed up the incremental marker if it is running so that it | 1252 // We speed up the incremental marker if it is running so that it |
1256 // does not fall behind the rate of promotion, which would cause a | 1253 // does not fall behind the rate of promotion, which would cause a |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1321 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1318 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
1322 } | 1319 } |
1323 | 1320 |
1324 { | 1321 { |
1325 GCCallbacksScope scope(this); | 1322 GCCallbacksScope scope(this); |
1326 if (scope.CheckReenter()) { | 1323 if (scope.CheckReenter()) { |
1327 AllowHeapAllocation allow_allocation; | 1324 AllowHeapAllocation allow_allocation; |
1328 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1325 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1329 VMState<EXTERNAL> state(isolate_); | 1326 VMState<EXTERNAL> state(isolate_); |
1330 HandleScope handle_scope(isolate_); | 1327 HandleScope handle_scope(isolate_); |
1331 if (!(FLAG_scavenge_reclaim_unmodified_objects && | 1328 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
1332 (gc_type == kGCTypeScavenge))) { | |
1333 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | |
1334 } | |
1335 } | 1329 } |
1336 } | 1330 } |
1337 | 1331 |
1338 #ifdef VERIFY_HEAP | 1332 #ifdef VERIFY_HEAP |
1339 if (FLAG_verify_heap) { | 1333 if (FLAG_verify_heap) { |
1340 VerifyStringTable(this); | 1334 VerifyStringTable(this); |
1341 } | 1335 } |
1342 #endif | 1336 #endif |
1343 | 1337 |
1344 return freed_global_handles > 0; | 1338 return freed_global_handles > 0; |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1484 } | 1478 } |
1485 } | 1479 } |
1486 | 1480 |
1487 | 1481 |
1488 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1482 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
1489 return heap->InNewSpace(*p) && | 1483 return heap->InNewSpace(*p) && |
1490 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1484 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
1491 } | 1485 } |
1492 | 1486 |
1493 | 1487 |
1494 static bool IsUnmodifiedHeapObject(Object** p) { | |
1495 Object* object = *p; | |
1496 DCHECK(object->IsHeapObject()); | |
1497 HeapObject* heap_object = HeapObject::cast(object); | |
1498 if (!object->IsJSObject()) return false; | |
1499 Object* obj_constructor = (JSObject::cast(object))->map()->GetConstructor(); | |
1500 if (!obj_constructor->IsJSFunction()) return false; | |
1501 JSFunction* constructor = JSFunction::cast(obj_constructor); | |
1502 if (constructor != nullptr && | |
1503 constructor->initial_map() == heap_object->map()) { | |
1504 return true; | |
1505 } | |
1506 return false; | |
1507 } | |
1508 | |
1509 | |
1510 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, | 1488 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, |
1511 StoreBufferEvent event) { | 1489 StoreBufferEvent event) { |
1512 heap->store_buffer_rebuilder_.Callback(page, event); | 1490 heap->store_buffer_rebuilder_.Callback(page, event); |
1513 } | 1491 } |
1514 | 1492 |
1515 | 1493 |
1516 void PromotionQueue::Initialize() { | 1494 void PromotionQueue::Initialize() { |
1517 // The last to-space page may be used for promotion queue. On promotion | 1495 // The last to-space page may be used for promotion queue. On promotion |
1518 // conflict, we use the emergency stack. | 1496 // conflict, we use the emergency stack. |
1519 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == | 1497 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1618 // updated as a side effect of promoting an object. | 1596 // updated as a side effect of promoting an object. |
1619 // | 1597 // |
1620 // There is guaranteed to be enough room at the top of the to space | 1598 // There is guaranteed to be enough room at the top of the to space |
1621 // for the addresses of promoted objects: every object promoted | 1599 // for the addresses of promoted objects: every object promoted |
1622 // frees up its size in bytes from the top of the new space, and | 1600 // frees up its size in bytes from the top of the new space, and |
1623 // objects are at least one pointer in size. | 1601 // objects are at least one pointer in size. |
1624 Address new_space_front = new_space_.ToSpaceStart(); | 1602 Address new_space_front = new_space_.ToSpaceStart(); |
1625 promotion_queue_.Initialize(); | 1603 promotion_queue_.Initialize(); |
1626 | 1604 |
1627 ScavengeVisitor scavenge_visitor(this); | 1605 ScavengeVisitor scavenge_visitor(this); |
1628 | |
1629 if (FLAG_scavenge_reclaim_unmodified_objects) { | |
1630 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | |
1631 &IsUnmodifiedHeapObject); | |
1632 } | |
1633 | |
1634 { | 1606 { |
1635 // Copy roots. | 1607 // Copy roots. |
1636 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1608 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
1637 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1609 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1638 } | 1610 } |
1639 | 1611 |
1640 { | 1612 { |
1641 // Copy objects reachable from the old generation. | 1613 // Copy objects reachable from the old generation. |
1642 GCTracer::Scope gc_scope(tracer(), | 1614 GCTracer::Scope gc_scope(tracer(), |
1643 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | 1615 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
(...skipping 18 matching lines...) Expand all Loading... |
1662 if (collector->is_code_flushing_enabled()) { | 1634 if (collector->is_code_flushing_enabled()) { |
1663 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1635 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
1664 } | 1636 } |
1665 } | 1637 } |
1666 | 1638 |
1667 { | 1639 { |
1668 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1640 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1669 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1641 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1670 } | 1642 } |
1671 | 1643 |
1672 if (FLAG_scavenge_reclaim_unmodified_objects) { | 1644 { |
1673 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | |
1674 &IsUnscavengedHeapObject); | |
1675 | |
1676 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( | |
1677 &scavenge_visitor); | |
1678 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
1679 } else { | |
1680 GCTracer::Scope gc_scope(tracer(), | 1645 GCTracer::Scope gc_scope(tracer(), |
1681 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); | 1646 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); |
1682 while (isolate()->global_handles()->IterateObjectGroups( | 1647 while (isolate()->global_handles()->IterateObjectGroups( |
1683 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1648 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
1684 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1649 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1685 } | 1650 } |
1686 isolate()->global_handles()->RemoveObjectGroups(); | 1651 isolate()->global_handles()->RemoveObjectGroups(); |
1687 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1652 isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1653 } |
1688 | 1654 |
1689 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | 1655 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
1690 &IsUnscavengedHeapObject); | 1656 &IsUnscavengedHeapObject); |
1691 | 1657 |
1692 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( | 1658 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( |
1693 &scavenge_visitor); | 1659 &scavenge_visitor); |
1694 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1660 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1695 } | |
1696 | 1661 |
1697 UpdateNewSpaceReferencesInExternalStringTable( | 1662 UpdateNewSpaceReferencesInExternalStringTable( |
1698 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1663 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1699 | 1664 |
1700 promotion_queue_.Destroy(); | 1665 promotion_queue_.Destroy(); |
1701 | 1666 |
1702 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1667 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
1703 | 1668 |
1704 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1669 ScavengeWeakObjectRetainer weak_object_retainer(this); |
1705 ProcessYoungWeakReferences(&weak_object_retainer); | 1670 ProcessYoungWeakReferences(&weak_object_retainer); |
(...skipping 4425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6131 } | 6096 } |
6132 | 6097 |
6133 | 6098 |
6134 // static | 6099 // static |
6135 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6100 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6136 return StaticVisitorBase::GetVisitorId(map); | 6101 return StaticVisitorBase::GetVisitorId(map); |
6137 } | 6102 } |
6138 | 6103 |
6139 } // namespace internal | 6104 } // namespace internal |
6140 } // namespace v8 | 6105 } // namespace v8 |
OLD | NEW |