OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
812 void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) { | 812 void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) { |
813 scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated); | 813 scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated); |
814 } | 814 } |
815 | 815 |
816 | 816 |
817 void Heap::FinalizeIncrementalMarking(const char* gc_reason) { | 817 void Heap::FinalizeIncrementalMarking(const char* gc_reason) { |
818 if (FLAG_trace_incremental_marking) { | 818 if (FLAG_trace_incremental_marking) { |
819 PrintF("[IncrementalMarking] (%s).\n", gc_reason); | 819 PrintF("[IncrementalMarking] (%s).\n", gc_reason); |
820 } | 820 } |
821 | 821 |
822 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); | 822 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); |
823 HistogramTimerScope incremental_marking_scope( | 823 HistogramTimerScope incremental_marking_scope( |
824 isolate()->counters()->gc_incremental_marking_finalize()); | 824 isolate()->counters()->gc_incremental_marking_finalize()); |
825 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); | 825 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); |
826 | 826 |
827 { | 827 { |
828 GCCallbacksScope scope(this); | 828 GCCallbacksScope scope(this); |
829 if (scope.CheckReenter()) { | 829 if (scope.CheckReenter()) { |
830 AllowHeapAllocation allow_allocation; | 830 AllowHeapAllocation allow_allocation; |
831 GCTracer::Scope scope(tracer(), | 831 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE); |
832 GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE); | |
833 VMState<EXTERNAL> state(isolate_); | 832 VMState<EXTERNAL> state(isolate_); |
834 HandleScope handle_scope(isolate_); | 833 HandleScope handle_scope(isolate_); |
835 CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags); | 834 CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags); |
836 } | 835 } |
837 } | 836 } |
838 incremental_marking()->FinalizeIncrementally(); | 837 incremental_marking()->FinalizeIncrementally(); |
839 { | 838 { |
840 GCCallbacksScope scope(this); | 839 GCCallbacksScope scope(this); |
841 if (scope.CheckReenter()) { | 840 if (scope.CheckReenter()) { |
842 AllowHeapAllocation allow_allocation; | 841 AllowHeapAllocation allow_allocation; |
843 GCTracer::Scope scope(tracer(), | 842 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE); |
844 GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE); | |
845 VMState<EXTERNAL> state(isolate_); | 843 VMState<EXTERNAL> state(isolate_); |
846 HandleScope handle_scope(isolate_); | 844 HandleScope handle_scope(isolate_); |
847 CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags); | 845 CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags); |
848 } | 846 } |
849 } | 847 } |
850 } | 848 } |
851 | 849 |
852 | 850 |
853 HistogramTimer* Heap::GCTypeTimer(GarbageCollector collector) { | 851 HistogramTimer* Heap::GCTypeTimer(GarbageCollector collector) { |
854 if (collector == SCAVENGER) { | 852 if (collector == SCAVENGER) { |
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1284 } | 1282 } |
1285 #endif | 1283 #endif |
1286 | 1284 |
1287 GCType gc_type = | 1285 GCType gc_type = |
1288 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1286 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
1289 | 1287 |
1290 { | 1288 { |
1291 GCCallbacksScope scope(this); | 1289 GCCallbacksScope scope(this); |
1292 if (scope.CheckReenter()) { | 1290 if (scope.CheckReenter()) { |
1293 AllowHeapAllocation allow_allocation; | 1291 AllowHeapAllocation allow_allocation; |
1294 GCTracer::Scope scope(tracer(), | 1292 TRACE_GC(tracer(), collector == MARK_COMPACTOR |
1295 collector == MARK_COMPACTOR | 1293 ? GCTracer::Scope::MC_EXTERNAL_PROLOGUE |
1296 ? GCTracer::Scope::MC_EXTERNAL_PROLOGUE | 1294 : GCTracer::Scope::SCAVENGER_EXTERNAL_PROLOGUE); |
1297 : GCTracer::Scope::SCAVENGER_EXTERNAL_PROLOGUE); | |
1298 VMState<EXTERNAL> state(isolate_); | 1295 VMState<EXTERNAL> state(isolate_); |
1299 HandleScope handle_scope(isolate_); | 1296 HandleScope handle_scope(isolate_); |
1300 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | 1297 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); |
1301 } | 1298 } |
1302 } | 1299 } |
1303 | 1300 |
1304 EnsureFromSpaceIsCommitted(); | 1301 EnsureFromSpaceIsCommitted(); |
1305 | 1302 |
1306 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1303 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
1307 | 1304 |
(...skipping 26 matching lines...) Expand all Loading... |
1334 } | 1331 } |
1335 | 1332 |
1336 UpdateSurvivalStatistics(start_new_space_size); | 1333 UpdateSurvivalStatistics(start_new_space_size); |
1337 ConfigureInitialOldGenerationSize(); | 1334 ConfigureInitialOldGenerationSize(); |
1338 | 1335 |
1339 isolate_->counters()->objs_since_last_young()->Set(0); | 1336 isolate_->counters()->objs_since_last_young()->Set(0); |
1340 | 1337 |
1341 gc_post_processing_depth_++; | 1338 gc_post_processing_depth_++; |
1342 { | 1339 { |
1343 AllowHeapAllocation allow_allocation; | 1340 AllowHeapAllocation allow_allocation; |
1344 GCTracer::Scope scope(tracer(), | 1341 TRACE_GC(tracer(), GCTracer::Scope::EXTERNAL_WEAK_GLOBAL_HANDLES); |
1345 GCTracer::Scope::EXTERNAL_WEAK_GLOBAL_HANDLES); | |
1346 freed_global_handles = | 1342 freed_global_handles = |
1347 isolate_->global_handles()->PostGarbageCollectionProcessing( | 1343 isolate_->global_handles()->PostGarbageCollectionProcessing( |
1348 collector, gc_callback_flags); | 1344 collector, gc_callback_flags); |
1349 } | 1345 } |
1350 gc_post_processing_depth_--; | 1346 gc_post_processing_depth_--; |
1351 | 1347 |
1352 isolate_->eternal_handles()->PostGarbageCollectionProcessing(this); | 1348 isolate_->eternal_handles()->PostGarbageCollectionProcessing(this); |
1353 | 1349 |
1354 // Update relocatables. | 1350 // Update relocatables. |
1355 Relocatable::PostGarbageCollectionProcessing(isolate_); | 1351 Relocatable::PostGarbageCollectionProcessing(isolate_); |
1356 | 1352 |
1357 double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond(); | 1353 double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond(); |
1358 double mutator_speed = | 1354 double mutator_speed = |
1359 tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond(); | 1355 tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond(); |
1360 intptr_t old_gen_size = PromotedSpaceSizeOfObjects(); | 1356 intptr_t old_gen_size = PromotedSpaceSizeOfObjects(); |
1361 if (collector == MARK_COMPACTOR) { | 1357 if (collector == MARK_COMPACTOR) { |
1362 // Register the amount of external allocated memory. | 1358 // Register the amount of external allocated memory. |
1363 amount_of_external_allocated_memory_at_last_global_gc_ = | 1359 amount_of_external_allocated_memory_at_last_global_gc_ = |
1364 amount_of_external_allocated_memory_; | 1360 amount_of_external_allocated_memory_; |
1365 SetOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1361 SetOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
1366 } else if (HasLowYoungGenerationAllocationRate() && | 1362 } else if (HasLowYoungGenerationAllocationRate() && |
1367 old_generation_size_configured_) { | 1363 old_generation_size_configured_) { |
1368 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); | 1364 DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed); |
1369 } | 1365 } |
1370 | 1366 |
1371 { | 1367 { |
1372 GCCallbacksScope scope(this); | 1368 GCCallbacksScope scope(this); |
1373 if (scope.CheckReenter()) { | 1369 if (scope.CheckReenter()) { |
1374 AllowHeapAllocation allow_allocation; | 1370 AllowHeapAllocation allow_allocation; |
1375 GCTracer::Scope scope(tracer(), | 1371 TRACE_GC(tracer(), collector == MARK_COMPACTOR |
1376 collector == MARK_COMPACTOR | 1372 ? GCTracer::Scope::MC_EXTERNAL_EPILOGUE |
1377 ? GCTracer::Scope::MC_EXTERNAL_EPILOGUE | 1373 : GCTracer::Scope::SCAVENGER_EXTERNAL_EPILOGUE); |
1378 : GCTracer::Scope::SCAVENGER_EXTERNAL_EPILOGUE); | |
1379 VMState<EXTERNAL> state(isolate_); | 1374 VMState<EXTERNAL> state(isolate_); |
1380 HandleScope handle_scope(isolate_); | 1375 HandleScope handle_scope(isolate_); |
1381 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | 1376 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
1382 } | 1377 } |
1383 } | 1378 } |
1384 | 1379 |
1385 #ifdef VERIFY_HEAP | 1380 #ifdef VERIFY_HEAP |
1386 if (FLAG_verify_heap) { | 1381 if (FLAG_verify_heap) { |
1387 VerifyStringTable(this); | 1382 VerifyStringTable(this); |
1388 } | 1383 } |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1621 } | 1616 } |
1622 return NULL; | 1617 return NULL; |
1623 } | 1618 } |
1624 | 1619 |
1625 private: | 1620 private: |
1626 Heap* heap_; | 1621 Heap* heap_; |
1627 }; | 1622 }; |
1628 | 1623 |
1629 | 1624 |
1630 void Heap::Scavenge() { | 1625 void Heap::Scavenge() { |
1631 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); | 1626 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); |
1632 RelocationLock relocation_lock(this); | 1627 RelocationLock relocation_lock(this); |
1633 // There are soft limits in the allocation code, designed to trigger a mark | 1628 // There are soft limits in the allocation code, designed to trigger a mark |
1634 // sweep collection by failing allocations. There is no sense in trying to | 1629 // sweep collection by failing allocations. There is no sense in trying to |
1635 // trigger one during scavenge: scavenges allocation should always succeed. | 1630 // trigger one during scavenge: scavenges allocation should always succeed. |
1636 AlwaysAllocateScope scope(isolate()); | 1631 AlwaysAllocateScope scope(isolate()); |
1637 | 1632 |
1638 // Bump-pointer allocations done during scavenge are not real allocations. | 1633 // Bump-pointer allocations done during scavenge are not real allocations. |
1639 // Pause the inline allocation steps. | 1634 // Pause the inline allocation steps. |
1640 PauseAllocationObserversScope pause_observers(this); | 1635 PauseAllocationObserversScope pause_observers(this); |
1641 | 1636 |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1682 | 1677 |
1683 ScavengeVisitor scavenge_visitor(this); | 1678 ScavengeVisitor scavenge_visitor(this); |
1684 | 1679 |
1685 if (FLAG_scavenge_reclaim_unmodified_objects) { | 1680 if (FLAG_scavenge_reclaim_unmodified_objects) { |
1686 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | 1681 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( |
1687 &IsUnmodifiedHeapObject); | 1682 &IsUnmodifiedHeapObject); |
1688 } | 1683 } |
1689 | 1684 |
1690 { | 1685 { |
1691 // Copy roots. | 1686 // Copy roots. |
1692 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); | 1687 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_ROOTS); |
1693 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1688 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
1694 } | 1689 } |
1695 | 1690 |
1696 { | 1691 { |
1697 // Copy objects reachable from the old generation. | 1692 // Copy objects reachable from the old generation. |
1698 GCTracer::Scope gc_scope(tracer(), | 1693 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); |
1699 GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); | |
1700 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(this, | 1694 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(this, |
1701 Scavenger::ScavengeObject); | 1695 Scavenger::ScavengeObject); |
1702 } | 1696 } |
1703 | 1697 |
1704 { | 1698 { |
1705 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_WEAK); | 1699 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); |
1706 // Copy objects reachable from the encountered weak collections list. | 1700 // Copy objects reachable from the encountered weak collections list. |
1707 scavenge_visitor.VisitPointer(&encountered_weak_collections_); | 1701 scavenge_visitor.VisitPointer(&encountered_weak_collections_); |
1708 // Copy objects reachable from the encountered weak cells. | 1702 // Copy objects reachable from the encountered weak cells. |
1709 scavenge_visitor.VisitPointer(&encountered_weak_cells_); | 1703 scavenge_visitor.VisitPointer(&encountered_weak_cells_); |
1710 } | 1704 } |
1711 | 1705 |
1712 { | 1706 { |
1713 // Copy objects reachable from the code flushing candidates list. | 1707 // Copy objects reachable from the code flushing candidates list. |
1714 GCTracer::Scope gc_scope(tracer(), | 1708 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); |
1715 GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); | |
1716 MarkCompactCollector* collector = mark_compact_collector(); | 1709 MarkCompactCollector* collector = mark_compact_collector(); |
1717 if (collector->is_code_flushing_enabled()) { | 1710 if (collector->is_code_flushing_enabled()) { |
1718 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1711 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
1719 } | 1712 } |
1720 } | 1713 } |
1721 | 1714 |
1722 { | 1715 { |
1723 GCTracer::Scope gc_scope(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1716 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1724 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1717 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1725 } | 1718 } |
1726 | 1719 |
1727 if (FLAG_scavenge_reclaim_unmodified_objects) { | 1720 if (FLAG_scavenge_reclaim_unmodified_objects) { |
1728 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | 1721 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( |
1729 &IsUnscavengedHeapObject); | 1722 &IsUnscavengedHeapObject); |
1730 | 1723 |
1731 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( | 1724 isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( |
1732 &scavenge_visitor); | 1725 &scavenge_visitor); |
1733 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1726 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1734 } else { | 1727 } else { |
1735 GCTracer::Scope gc_scope(tracer(), | 1728 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); |
1736 GCTracer::Scope::SCAVENGER_OBJECT_GROUPS); | |
1737 while (isolate()->global_handles()->IterateObjectGroups( | 1729 while (isolate()->global_handles()->IterateObjectGroups( |
1738 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1730 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
1739 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1731 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1740 } | 1732 } |
1741 isolate()->global_handles()->RemoveObjectGroups(); | 1733 isolate()->global_handles()->RemoveObjectGroups(); |
1742 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1734 isolate()->global_handles()->RemoveImplicitRefGroups(); |
1743 | 1735 |
1744 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | 1736 isolate()->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
1745 &IsUnscavengedHeapObject); | 1737 &IsUnscavengedHeapObject); |
1746 | 1738 |
(...skipping 4737 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6484 } | 6476 } |
6485 | 6477 |
6486 | 6478 |
6487 // static | 6479 // static |
6488 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6480 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6489 return StaticVisitorBase::GetVisitorId(map); | 6481 return StaticVisitorBase::GetVisitorId(map); |
6490 } | 6482 } |
6491 | 6483 |
6492 } // namespace internal | 6484 } // namespace internal |
6493 } // namespace v8 | 6485 } // namespace v8 |
OLD | NEW |