| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 1237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1248 GCType gc_type = | 1248 GCType gc_type = |
| 1249 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1249 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
| 1250 | 1250 |
| 1251 { | 1251 { |
| 1252 GCCallbacksScope scope(this); | 1252 GCCallbacksScope scope(this); |
| 1253 if (scope.CheckReenter()) { | 1253 if (scope.CheckReenter()) { |
| 1254 AllowHeapAllocation allow_allocation; | 1254 AllowHeapAllocation allow_allocation; |
| 1255 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1255 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
| 1256 VMState<EXTERNAL> state(isolate_); | 1256 VMState<EXTERNAL> state(isolate_); |
| 1257 HandleScope handle_scope(isolate_); | 1257 HandleScope handle_scope(isolate_); |
| 1258 if (!(FLAG_scavenge_reclaim_unmodified_objects && | 1258 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); |
| 1259 (gc_type == kGCTypeScavenge))) { | |
| 1260 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | |
| 1261 } | |
| 1262 } | 1259 } |
| 1263 } | 1260 } |
| 1264 | 1261 |
| 1265 EnsureFromSpaceIsCommitted(); | 1262 EnsureFromSpaceIsCommitted(); |
| 1266 | 1263 |
| 1267 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1264 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
| 1268 | 1265 |
| 1269 if (IsHighSurvivalRate()) { | 1266 if (IsHighSurvivalRate()) { |
| 1270 // We speed up the incremental marker if it is running so that it | 1267 // We speed up the incremental marker if it is running so that it |
| 1271 // does not fall behind the rate of promotion, which would cause a | 1268 // does not fall behind the rate of promotion, which would cause a |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1336 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1333 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
| 1337 } | 1334 } |
| 1338 | 1335 |
| 1339 { | 1336 { |
| 1340 GCCallbacksScope scope(this); | 1337 GCCallbacksScope scope(this); |
| 1341 if (scope.CheckReenter()) { | 1338 if (scope.CheckReenter()) { |
| 1342 AllowHeapAllocation allow_allocation; | 1339 AllowHeapAllocation allow_allocation; |
| 1343 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1340 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
| 1344 VMState<EXTERNAL> state(isolate_); | 1341 VMState<EXTERNAL> state(isolate_); |
| 1345 HandleScope handle_scope(isolate_); | 1342 HandleScope handle_scope(isolate_); |
| 1346 if (!(FLAG_scavenge_reclaim_unmodified_objects && | 1343 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
| 1347 (gc_type == kGCTypeScavenge))) { | |
| 1348 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | |
| 1349 } | |
| 1350 } | 1344 } |
| 1351 } | 1345 } |
| 1352 | 1346 |
| 1353 #ifdef VERIFY_HEAP | 1347 #ifdef VERIFY_HEAP |
| 1354 if (FLAG_verify_heap) { | 1348 if (FLAG_verify_heap) { |
| 1355 VerifyStringTable(this); | 1349 VerifyStringTable(this); |
| 1356 } | 1350 } |
| 1357 #endif | 1351 #endif |
| 1358 | 1352 |
| 1359 return freed_global_handles > 0; | 1353 return freed_global_handles > 0; |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1499 } | 1493 } |
| 1500 } | 1494 } |
| 1501 | 1495 |
| 1502 | 1496 |
| 1503 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1497 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
| 1504 return heap->InNewSpace(*p) && | 1498 return heap->InNewSpace(*p) && |
| 1505 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1499 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
| 1506 } | 1500 } |
| 1507 | 1501 |
| 1508 | 1502 |
| 1509 static bool IsUnmodifiedHeapObject(Object** p) { | |
| 1510 Object* object = *p; | |
| 1511 DCHECK(object->IsHeapObject()); | |
| 1512 HeapObject* heap_object = HeapObject::cast(object); | |
| 1513 if (!object->IsJSObject()) return false; | |
| 1514 Object* obj_constructor = (JSObject::cast(object))->map()->GetConstructor(); | |
| 1515 if (!obj_constructor->IsJSFunction()) return false; | |
| 1516 JSFunction* constructor = JSFunction::cast(obj_constructor); | |
| 1517 if (constructor != nullptr && | |
| 1518 constructor->initial_map() == heap_object->map()) { | |
| 1519 return true; | |
| 1520 } | |
| 1521 return false; | |
| 1522 } | |
| 1523 | |
| 1524 | |
| 1525 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, | 1503 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, |
| 1526 StoreBufferEvent event) { | 1504 StoreBufferEvent event) { |
| 1527 heap->store_buffer_rebuilder_.Callback(page, event); | 1505 heap->store_buffer_rebuilder_.Callback(page, event); |
| 1528 } | 1506 } |
| 1529 | 1507 |
| 1530 | 1508 |
| 1531 void PromotionQueue::Initialize() { | 1509 void PromotionQueue::Initialize() { |
| 1532 // The last to-space page may be used for promotion queue. On promotion | 1510 // The last to-space page may be used for promotion queue. On promotion |
| 1533 // conflict, we use the emergency stack. | 1511 // conflict, we use the emergency stack. |
| 1534 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == | 1512 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1633 // updated as a side effect of promoting an object. | 1611 // updated as a side effect of promoting an object. |
| 1634 // | 1612 // |
| 1635 // There is guaranteed to be enough room at the top of the to space | 1613 // There is guaranteed to be enough room at the top of the to space |
| 1636 // for the addresses of promoted objects: every object promoted | 1614 // for the addresses of promoted objects: every object promoted |
| 1637 // frees up its size in bytes from the top of the new space, and | 1615 // frees up its size in bytes from the top of the new space, and |
| 1638 // objects are at least one pointer in size. | 1616 // objects are at least one pointer in size. |
| 1639 Address new_space_front = new_space_.ToSpaceStart(); | 1617 Address new_space_front = new_space_.ToSpaceStart(); |
| 1640 promotion_queue_.Initialize(); | 1618 promotion_queue_.Initialize(); |
| 1641 | 1619 |
| 1642 ScavengeVisitor scavenge_visitor(this); | 1620 ScavengeVisitor scavenge_visitor(this); |
| 1643 | |
| 1644 if (FLAG_scavenge_reclaim_unmodified_objects) { | |
| 1645 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | |
| 1646 &IsUnmodifiedHeapObject); | |
| 1647 } | |
| 1648 | |
| 1649 { | 1621 { |
| 1650 // Copy roots. | 1622 // Copy roots. |
| 1651 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1623 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
| 1652 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1624 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
| 1653 } | 1625 } |
| 1654 | 1626 |
| 1655 { | 1627 { |
| 1656 // Copy objects reachable from the old generation. | 1628 // Copy objects reachable from the old generation. |
| 1657 GCTracer::Scope gc_scope(tracer(), | 1629 GCTracer::Scope gc_scope(tracer(), |
| 1658 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | 1630 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1677 if (collector->is_code_flushing_enabled()) { | 1649 if (collector->is_code_flushing_enabled()) { |
| 1678 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1650 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
| 1679 } | 1651 } |
| 1680 } | 1652 } |
| 1681 | 1653 |
| 1682 { | 1654 { |
| 1683 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1655 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
| 1684 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1656 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1685 } | 1657 } |
| 1686 | 1658 |
| 1687 if (FLAG_scavenge_reclaim_unmodified_objects) { | 1659 { |
| 1688 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | |
| 1689 &IsUnscavengedHeapObject); | |
| 1690 | |
| 1691 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( | |
| 1692 &scavenge_visitor); | |
| 1693 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
| 1694 } else { | |
| 1695 GCTracer::Scope gc_scope(tracer(), | 1660 GCTracer::Scope gc_scope(tracer(), |
| 1696 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); | 1661 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); |
| 1697 while (isolate()->global_handles()->IterateObjectGroups( | 1662 while (isolate()->global_handles()->IterateObjectGroups( |
| 1698 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1663 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
| 1699 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1664 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1700 } | 1665 } |
| 1701 isolate()->global_handles()->RemoveObjectGroups(); | 1666 isolate()->global_handles()->RemoveObjectGroups(); |
| 1702 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1667 isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1668 } |
| 1703 | 1669 |
| 1704 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | 1670 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
| 1705 &IsUnscavengedHeapObject); | 1671 &IsUnscavengedHeapObject); |
| 1706 | 1672 |
| 1707 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( | 1673 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( |
| 1708 &scavenge_visitor); | 1674 &scavenge_visitor); |
| 1709 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1675 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1710 } | |
| 1711 | 1676 |
| 1712 UpdateNewSpaceReferencesInExternalStringTable( | 1677 UpdateNewSpaceReferencesInExternalStringTable( |
| 1713 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1678 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| 1714 | 1679 |
| 1715 promotion_queue_.Destroy(); | 1680 promotion_queue_.Destroy(); |
| 1716 | 1681 |
| 1717 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1682 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
| 1718 | 1683 |
| 1719 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1684 ScavengeWeakObjectRetainer weak_object_retainer(this); |
| 1720 ProcessYoungWeakReferences(&weak_object_retainer); | 1685 ProcessYoungWeakReferences(&weak_object_retainer); |
| (...skipping 3016 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4737 initial_old_generation_size_ = FLAG_initial_old_space_size * MB; | 4702 initial_old_generation_size_ = FLAG_initial_old_space_size * MB; |
| 4738 } else { | 4703 } else { |
| 4739 initial_old_generation_size_ = | 4704 initial_old_generation_size_ = |
| 4740 max_old_generation_size_ / kInitalOldGenerationLimitFactor; | 4705 max_old_generation_size_ / kInitalOldGenerationLimitFactor; |
| 4741 } | 4706 } |
| 4742 old_generation_allocation_limit_ = initial_old_generation_size_; | 4707 old_generation_allocation_limit_ = initial_old_generation_size_; |
| 4743 | 4708 |
| 4744 // We rely on being able to allocate new arrays in paged spaces. | 4709 // We rely on being able to allocate new arrays in paged spaces. |
| 4745 DCHECK(Page::kMaxRegularHeapObjectSize >= | 4710 DCHECK(Page::kMaxRegularHeapObjectSize >= |
| 4746 (JSArray::kSize + | 4711 (JSArray::kSize + |
| 4747 FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) + | 4712 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + |
| 4748 AllocationMemento::kSize)); | 4713 AllocationMemento::kSize)); |
| 4749 | 4714 |
| 4750 code_range_size_ = code_range_size * MB; | 4715 code_range_size_ = code_range_size * MB; |
| 4751 | 4716 |
| 4752 configured_ = true; | 4717 configured_ = true; |
| 4753 return true; | 4718 return true; |
| 4754 } | 4719 } |
| 4755 | 4720 |
| 4756 | 4721 |
| 4757 void Heap::AddToRingBuffer(const char* string) { | 4722 void Heap::AddToRingBuffer(const char* string) { |
| (...skipping 1382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6140 } | 6105 } |
| 6141 | 6106 |
| 6142 | 6107 |
| 6143 // static | 6108 // static |
| 6144 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6109 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6145 return StaticVisitorBase::GetVisitorId(map); | 6110 return StaticVisitorBase::GetVisitorId(map); |
| 6146 } | 6111 } |
| 6147 | 6112 |
| 6148 } // namespace internal | 6113 } // namespace internal |
| 6149 } // namespace v8 | 6114 } // namespace v8 |
| OLD | NEW |