OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1233 GCType gc_type = | 1233 GCType gc_type = |
1234 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1234 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
1235 | 1235 |
1236 { | 1236 { |
1237 GCCallbacksScope scope(this); | 1237 GCCallbacksScope scope(this); |
1238 if (scope.CheckReenter()) { | 1238 if (scope.CheckReenter()) { |
1239 AllowHeapAllocation allow_allocation; | 1239 AllowHeapAllocation allow_allocation; |
1240 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1240 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1241 VMState<EXTERNAL> state(isolate_); | 1241 VMState<EXTERNAL> state(isolate_); |
1242 HandleScope handle_scope(isolate_); | 1242 HandleScope handle_scope(isolate_); |
1243 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | 1243 if (!(gc_type == kGCTypeScavenge && |
1244 FLAG_scavenge_reclaim_unmodified_objects)) | |
rmcilroy
2015/10/02 18:09:46
Nit - swap flag and GC_type check order.
mythria
2015/10/05 10:55:34
Done.
| |
1245 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | |
1244 } | 1246 } |
1245 } | 1247 } |
1246 | 1248 |
1247 EnsureFromSpaceIsCommitted(); | 1249 EnsureFromSpaceIsCommitted(); |
1248 | 1250 |
1249 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1251 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
1250 | 1252 |
1251 if (IsHighSurvivalRate()) { | 1253 if (IsHighSurvivalRate()) { |
1252 // We speed up the incremental marker if it is running so that it | 1254 // We speed up the incremental marker if it is running so that it |
1253 // does not fall behind the rate of promotion, which would cause a | 1255 // does not fall behind the rate of promotion, which would cause a |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1318 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1320 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
1319 } | 1321 } |
1320 | 1322 |
1321 { | 1323 { |
1322 GCCallbacksScope scope(this); | 1324 GCCallbacksScope scope(this); |
1323 if (scope.CheckReenter()) { | 1325 if (scope.CheckReenter()) { |
1324 AllowHeapAllocation allow_allocation; | 1326 AllowHeapAllocation allow_allocation; |
1325 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1327 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
1326 VMState<EXTERNAL> state(isolate_); | 1328 VMState<EXTERNAL> state(isolate_); |
1327 HandleScope handle_scope(isolate_); | 1329 HandleScope handle_scope(isolate_); |
1328 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | 1330 if (!(gc_type == kGCTypeScavenge && |
1331 FLAG_scavenge_reclaim_unmodified_objects)) | |
rmcilroy
2015/10/02 18:09:46
Ditto
mythria
2015/10/05 10:55:34
Done.
| |
1332 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | |
1329 } | 1333 } |
1330 } | 1334 } |
1331 | 1335 |
1332 #ifdef VERIFY_HEAP | 1336 #ifdef VERIFY_HEAP |
1333 if (FLAG_verify_heap) { | 1337 if (FLAG_verify_heap) { |
1334 VerifyStringTable(this); | 1338 VerifyStringTable(this); |
1335 } | 1339 } |
1336 #endif | 1340 #endif |
1337 | 1341 |
1338 return freed_global_handles > 0; | 1342 return freed_global_handles > 0; |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1478 } | 1482 } |
1479 } | 1483 } |
1480 | 1484 |
1481 | 1485 |
1482 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1486 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
1483 return heap->InNewSpace(*p) && | 1487 return heap->InNewSpace(*p) && |
1484 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1488 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
1485 } | 1489 } |
1486 | 1490 |
1487 | 1491 |
1492 static bool IsUnModifiedNewSpaceObject(Object** p) { | |
rmcilroy
2015/10/02 18:09:46
This function doesn't seem to check if the object
mythria
2015/10/05 10:55:34
Done.
| |
1493 Object* object = *p; | |
1494 DCHECK(object->IsHeapObject()); | |
1495 HeapObject* heap_object = HeapObject::cast(object); | |
1496 if (!object->IsJSObject()) { | |
1497 return false; | |
1498 } | |
1499 Object* obj_constructor = (JSObject::cast(object))->map()->GetConstructor(); | |
1500 if (!obj_constructor->IsJSFunction()) return false; | |
1501 JSFunction* constructor = JSFunction::cast(obj_constructor); | |
1502 if (constructor == NULL) return false; | |
1503 if (constructor->initial_map() == heap_object->map()) return true; | |
1504 return false; | |
rmcilroy
2015/10/02 18:09:46
Nit - just:
return constructor != nullptr && cons
mythria
2015/10/05 10:55:34
Done.
| |
1505 } | |
1506 | |
1507 | |
1488 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, | 1508 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, |
1489 StoreBufferEvent event) { | 1509 StoreBufferEvent event) { |
1490 heap->store_buffer_rebuilder_.Callback(page, event); | 1510 heap->store_buffer_rebuilder_.Callback(page, event); |
1491 } | 1511 } |
1492 | 1512 |
1493 | 1513 |
1494 void PromotionQueue::Initialize() { | 1514 void PromotionQueue::Initialize() { |
1495 // The last to-space page may be used for promotion queue. On promotion | 1515 // The last to-space page may be used for promotion queue. On promotion |
1496 // conflict, we use the emergency stack. | 1516 // conflict, we use the emergency stack. |
1497 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == | 1517 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1596 // updated as a side effect of promoting an object. | 1616 // updated as a side effect of promoting an object. |
1597 // | 1617 // |
1598 // There is guaranteed to be enough room at the top of the to space | 1618 // There is guaranteed to be enough room at the top of the to space |
1599 // for the addresses of promoted objects: every object promoted | 1619 // for the addresses of promoted objects: every object promoted |
1600 // frees up its size in bytes from the top of the new space, and | 1620 // frees up its size in bytes from the top of the new space, and |
1601 // objects are at least one pointer in size. | 1621 // objects are at least one pointer in size. |
1602 Address new_space_front = new_space_.ToSpaceStart(); | 1622 Address new_space_front = new_space_.ToSpaceStart(); |
1603 promotion_queue_.Initialize(); | 1623 promotion_queue_.Initialize(); |
1604 | 1624 |
1605 ScavengeVisitor scavenge_visitor(this); | 1625 ScavengeVisitor scavenge_visitor(this); |
1626 | |
1627 if (FLAG_scavenge_reclaim_unmodified_objects) { | |
1628 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | |
1629 &IsUnModifiedNewSpaceObject); | |
1630 } | |
1631 | |
1606 { | 1632 { |
1607 // Copy roots. | 1633 // Copy roots. |
1608 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1634 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
1609 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1635 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1610 } | 1636 } |
1611 | 1637 |
1612 { | 1638 { |
1613 // Copy objects reachable from the old generation. | 1639 // Copy objects reachable from the old generation. |
1614 GCTracer::Scope gc_scope(tracer(), | 1640 GCTracer::Scope gc_scope(tracer(), |
1615 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | 1641 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
(...skipping 18 matching lines...) Expand all Loading... | |
1634 if (collector->is_code_flushing_enabled()) { | 1660 if (collector->is_code_flushing_enabled()) { |
1635 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1661 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
1636 } | 1662 } |
1637 } | 1663 } |
1638 | 1664 |
1639 { | 1665 { |
1640 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1666 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1641 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1667 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1642 } | 1668 } |
1643 | 1669 |
1644 { | 1670 if (FLAG_scavenge_reclaim_unmodified_objects) { |
1671 isolate()->global_handles()->MarkNewSpaceWeakUnModifiedObjectsPending( | |
1672 &IsUnscavengedHeapObject); | |
1673 | |
1674 isolate()->global_handles()->IterateNewSpaceWeakUnModifiedRoots( | |
1675 &scavenge_visitor); | |
1676 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
1677 } else { | |
1645 GCTracer::Scope gc_scope(tracer(), | 1678 GCTracer::Scope gc_scope(tracer(), |
1646 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); | 1679 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); |
1647 while (isolate()->global_handles()->IterateObjectGroups( | 1680 while (isolate()->global_handles()->IterateObjectGroups( |
1648 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1681 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
1649 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1682 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1650 } | 1683 } |
1651 isolate()->global_handles()->RemoveObjectGroups(); | 1684 isolate()->global_handles()->RemoveObjectGroups(); |
1652 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1685 isolate()->global_handles()->RemoveImplicitRefGroups(); |
1686 | |
1687 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | |
1688 &IsUnscavengedHeapObject); | |
1689 | |
1690 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( | |
1691 &scavenge_visitor); | |
1692 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
1653 } | 1693 } |
1654 | 1694 |
1655 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | |
1656 &IsUnscavengedHeapObject); | |
1657 | |
1658 isolate()->global_handles()->IterateNewSpaceWeakIndependentRoots( | |
1659 &scavenge_visitor); | |
1660 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
1661 | |
1662 UpdateNewSpaceReferencesInExternalStringTable( | 1695 UpdateNewSpaceReferencesInExternalStringTable( |
1663 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1696 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1664 | 1697 |
1665 promotion_queue_.Destroy(); | 1698 promotion_queue_.Destroy(); |
1666 | 1699 |
1667 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1700 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
1668 | 1701 |
1669 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1702 ScavengeWeakObjectRetainer weak_object_retainer(this); |
1670 ProcessYoungWeakReferences(&weak_object_retainer); | 1703 ProcessYoungWeakReferences(&weak_object_retainer); |
1671 | 1704 |
(...skipping 4424 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6096 } | 6129 } |
6097 | 6130 |
6098 | 6131 |
6099 // static | 6132 // static |
6100 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6133 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6101 return StaticVisitorBase::GetVisitorId(map); | 6134 return StaticVisitorBase::GetVisitorId(map); |
6102 } | 6135 } |
6103 | 6136 |
6104 } // namespace internal | 6137 } // namespace internal |
6105 } // namespace v8 | 6138 } // namespace v8 |
OLD | NEW |