| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 #include <map> | 9 #include <map> |
| 10 | 10 |
| (...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 317 class AllocationObserver; | 317 class AllocationObserver; |
| 318 class ArrayBufferTracker; | 318 class ArrayBufferTracker; |
| 319 class GCIdleTimeAction; | 319 class GCIdleTimeAction; |
| 320 class GCIdleTimeHandler; | 320 class GCIdleTimeHandler; |
| 321 class GCIdleTimeHeapState; | 321 class GCIdleTimeHeapState; |
| 322 class GCTracer; | 322 class GCTracer; |
| 323 class HeapObjectsFilter; | 323 class HeapObjectsFilter; |
| 324 class HeapStats; | 324 class HeapStats; |
| 325 class HistogramTimer; | 325 class HistogramTimer; |
| 326 class Isolate; | 326 class Isolate; |
| 327 class MemoryAllocator; |
| 327 class MemoryReducer; | 328 class MemoryReducer; |
| 328 class ObjectStats; | 329 class ObjectStats; |
| 330 class PagedSpace; |
| 329 class Scavenger; | 331 class Scavenger; |
| 330 class ScavengeJob; | 332 class ScavengeJob; |
| 333 class Space; |
| 331 class StoreBuffer; | 334 class StoreBuffer; |
| 332 class WeakObjectRetainer; | 335 class WeakObjectRetainer; |
| 333 | 336 |
| 334 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); | 337 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to); |
| 335 | 338 |
| 336 enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION }; | 339 enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION }; |
| 337 | 340 |
| 338 enum ArrayStorageAllocationMode { | 341 enum ArrayStorageAllocationMode { |
| 339 DONT_INITIALIZE_ARRAY_ELEMENTS, | 342 DONT_INITIALIZE_ARRAY_ELEMENTS, |
| 340 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE | 343 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE |
| (...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 582 // Notifies the heap that is ok to start marking or other activities that | 585 // Notifies the heap that is ok to start marking or other activities that |
| 583 // should not happen during deserialization. | 586 // should not happen during deserialization. |
| 584 void NotifyDeserializationComplete(); | 587 void NotifyDeserializationComplete(); |
| 585 | 588 |
| 586 intptr_t old_generation_allocation_limit() const { | 589 intptr_t old_generation_allocation_limit() const { |
| 587 return old_generation_allocation_limit_; | 590 return old_generation_allocation_limit_; |
| 588 } | 591 } |
| 589 | 592 |
| 590 bool always_allocate() { return always_allocate_scope_count_.Value() != 0; } | 593 bool always_allocate() { return always_allocate_scope_count_.Value() != 0; } |
| 591 | 594 |
| 592 Address* NewSpaceAllocationTopAddress() { | 595 inline Address* NewSpaceAllocationTopAddress(); |
| 593 return new_space_.allocation_top_address(); | 596 inline Address* NewSpaceAllocationLimitAddress(); |
| 594 } | 597 inline Address* OldSpaceAllocationTopAddress(); |
| 595 Address* NewSpaceAllocationLimitAddress() { | 598 inline Address* OldSpaceAllocationLimitAddress(); |
| 596 return new_space_.allocation_limit_address(); | |
| 597 } | |
| 598 | |
| 599 Address* OldSpaceAllocationTopAddress() { | |
| 600 return old_space_->allocation_top_address(); | |
| 601 } | |
| 602 Address* OldSpaceAllocationLimitAddress() { | |
| 603 return old_space_->allocation_limit_address(); | |
| 604 } | |
| 605 | 599 |
| 606 bool CanExpandOldGeneration(int size) { | 600 bool CanExpandOldGeneration(int size) { |
| 607 if (force_oom_) return false; | 601 if (force_oom_) return false; |
| 608 return (OldGenerationCapacity() + size) < MaxOldGenerationSize(); | 602 return (OldGenerationCapacity() + size) < MaxOldGenerationSize(); |
| 609 } | 603 } |
| 610 | 604 |
| 611 // Clear the Instanceof cache (used when a prototype changes). | 605 // Clear the Instanceof cache (used when a prototype changes). |
| 612 inline void ClearInstanceofCache(); | 606 inline void ClearInstanceofCache(); |
| 613 | 607 |
| 614 // FreeSpace objects have a null map after deserialization. Update the map. | 608 // FreeSpace objects have a null map after deserialization. Update the map. |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 723 bool is_isolate_locked); | 717 bool is_isolate_locked); |
| 724 void CheckMemoryPressure(); | 718 void CheckMemoryPressure(); |
| 725 | 719 |
| 726 double MonotonicallyIncreasingTimeInMs(); | 720 double MonotonicallyIncreasingTimeInMs(); |
| 727 | 721 |
| 728 void RecordStats(HeapStats* stats, bool take_snapshot = false); | 722 void RecordStats(HeapStats* stats, bool take_snapshot = false); |
| 729 | 723 |
| 730 // Check new space expansion criteria and expand semispaces if it was hit. | 724 // Check new space expansion criteria and expand semispaces if it was hit. |
| 731 void CheckNewSpaceExpansionCriteria(); | 725 void CheckNewSpaceExpansionCriteria(); |
| 732 | 726 |
| 733 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) { | 727 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit); |
| 734 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true; | |
| 735 | |
| 736 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity(); | |
| 737 | |
| 738 if (PromotedTotalSize() >= adjusted_allocation_limit) return true; | |
| 739 | |
| 740 if (HighMemoryPressure()) return true; | |
| 741 | |
| 742 return false; | |
| 743 } | |
| 744 | 728 |
| 745 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); | 729 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); |
| 746 | 730 |
| 747 // An object should be promoted if the object has survived a | 731 // An object should be promoted if the object has survived a |
| 748 // scavenge operation. | 732 // scavenge operation. |
| 749 template <PromotionMode promotion_mode> | 733 template <PromotionMode promotion_mode> |
| 750 inline bool ShouldBePromoted(Address old_address, int object_size); | 734 inline bool ShouldBePromoted(Address old_address, int object_size); |
| 751 | 735 |
| 752 inline PromotionMode CurrentPromotionMode(); | 736 inline PromotionMode CurrentPromotionMode(); |
| 753 | 737 |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 878 // =========================================================================== | 862 // =========================================================================== |
| 879 | 863 |
| 880 Address NewSpaceTop() { return new_space_.top(); } | 864 Address NewSpaceTop() { return new_space_.top(); } |
| 881 | 865 |
| 882 NewSpace* new_space() { return &new_space_; } | 866 NewSpace* new_space() { return &new_space_; } |
| 883 OldSpace* old_space() { return old_space_; } | 867 OldSpace* old_space() { return old_space_; } |
| 884 OldSpace* code_space() { return code_space_; } | 868 OldSpace* code_space() { return code_space_; } |
| 885 MapSpace* map_space() { return map_space_; } | 869 MapSpace* map_space() { return map_space_; } |
| 886 LargeObjectSpace* lo_space() { return lo_space_; } | 870 LargeObjectSpace* lo_space() { return lo_space_; } |
| 887 | 871 |
| 888 PagedSpace* paged_space(int idx) { | 872 inline PagedSpace* paged_space(int idx); |
| 889 switch (idx) { | 873 inline Space* space(int idx); |
| 890 case OLD_SPACE: | |
| 891 return old_space(); | |
| 892 case MAP_SPACE: | |
| 893 return map_space(); | |
| 894 case CODE_SPACE: | |
| 895 return code_space(); | |
| 896 case NEW_SPACE: | |
| 897 case LO_SPACE: | |
| 898 UNREACHABLE(); | |
| 899 } | |
| 900 return NULL; | |
| 901 } | |
| 902 | |
| 903 Space* space(int idx) { | |
| 904 switch (idx) { | |
| 905 case NEW_SPACE: | |
| 906 return new_space(); | |
| 907 case LO_SPACE: | |
| 908 return lo_space(); | |
| 909 default: | |
| 910 return paged_space(idx); | |
| 911 } | |
| 912 } | |
| 913 | 874 |
| 914 // Returns name of the space. | 875 // Returns name of the space. |
| 915 const char* GetSpaceName(int idx); | 876 const char* GetSpaceName(int idx); |
| 916 | 877 |
| 917 // =========================================================================== | 878 // =========================================================================== |
| 918 // Getters to other components. ============================================== | 879 // Getters to other components. ============================================== |
| 919 // =========================================================================== | 880 // =========================================================================== |
| 920 | 881 |
| 921 GCTracer* tracer() { return tracer_; } | 882 GCTracer* tracer() { return tracer_; } |
| 922 | 883 |
| (...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1271 inline intptr_t PromotedTotalSize() { | 1232 inline intptr_t PromotedTotalSize() { |
| 1272 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); | 1233 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); |
| 1273 if (total > std::numeric_limits<intptr_t>::max()) { | 1234 if (total > std::numeric_limits<intptr_t>::max()) { |
| 1274 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. | 1235 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. |
| 1275 return std::numeric_limits<intptr_t>::max(); | 1236 return std::numeric_limits<intptr_t>::max(); |
| 1276 } | 1237 } |
| 1277 if (total < 0) return 0; | 1238 if (total < 0) return 0; |
| 1278 return static_cast<intptr_t>(total); | 1239 return static_cast<intptr_t>(total); |
| 1279 } | 1240 } |
| 1280 | 1241 |
| 1281 void UpdateNewSpaceAllocationCounter() { | 1242 inline void UpdateNewSpaceAllocationCounter(); |
| 1282 new_space_allocation_counter_ = NewSpaceAllocationCounter(); | |
| 1283 } | |
| 1284 | 1243 |
| 1285 size_t NewSpaceAllocationCounter() { | 1244 inline size_t NewSpaceAllocationCounter(); |
| 1286 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC(); | |
| 1287 } | |
| 1288 | 1245 |
| 1289 // This should be used only for testing. | 1246 // This should be used only for testing. |
| 1290 void set_new_space_allocation_counter(size_t new_value) { | 1247 void set_new_space_allocation_counter(size_t new_value) { |
| 1291 new_space_allocation_counter_ = new_value; | 1248 new_space_allocation_counter_ = new_value; |
| 1292 } | 1249 } |
| 1293 | 1250 |
| 1294 void UpdateOldGenerationAllocationCounter() { | 1251 void UpdateOldGenerationAllocationCounter() { |
| 1295 old_generation_allocation_counter_ = OldGenerationAllocationCounter(); | 1252 old_generation_allocation_counter_ = OldGenerationAllocationCounter(); |
| 1296 } | 1253 } |
| 1297 | 1254 |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1595 NO_INLINE(void CreateJSConstructEntryStub()); | 1552 NO_INLINE(void CreateJSConstructEntryStub()); |
| 1596 | 1553 |
| 1597 void CreateFixedStubs(); | 1554 void CreateFixedStubs(); |
| 1598 | 1555 |
| 1599 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size); | 1556 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size); |
| 1600 | 1557 |
| 1601 // Commits from space if it is uncommitted. | 1558 // Commits from space if it is uncommitted. |
| 1602 void EnsureFromSpaceIsCommitted(); | 1559 void EnsureFromSpaceIsCommitted(); |
| 1603 | 1560 |
| 1604 // Uncommit unused semi space. | 1561 // Uncommit unused semi space. |
| 1605 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } | 1562 bool UncommitFromSpace(); |
| 1606 | 1563 |
| 1607 // Fill in bogus values in from space | 1564 // Fill in bogus values in from space |
| 1608 void ZapFromSpace(); | 1565 void ZapFromSpace(); |
| 1609 | 1566 |
| 1610 // Deopts all code that contains allocation instruction which are tenured or | 1567 // Deopts all code that contains allocation instruction which are tenured or |
| 1611 // not tenured. Moreover it clears the pretenuring allocation site statistics. | 1568 // not tenured. Moreover it clears the pretenuring allocation site statistics. |
| 1612 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); | 1569 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); |
| 1613 | 1570 |
| 1614 // Evaluates local pretenuring for the old space and calls | 1571 // Evaluates local pretenuring for the old space and calls |
| 1615 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in | 1572 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1756 | 1713 |
| 1757 // Calculates the allocation limit based on a given growing factor and a | 1714 // Calculates the allocation limit based on a given growing factor and a |
| 1758 // given old generation size. | 1715 // given old generation size. |
| 1759 intptr_t CalculateOldGenerationAllocationLimit(double factor, | 1716 intptr_t CalculateOldGenerationAllocationLimit(double factor, |
| 1760 intptr_t old_gen_size); | 1717 intptr_t old_gen_size); |
| 1761 | 1718 |
| 1762 // Sets the allocation limit to trigger the next full garbage collection. | 1719 // Sets the allocation limit to trigger the next full garbage collection. |
| 1763 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed, | 1720 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed, |
| 1764 double mutator_speed); | 1721 double mutator_speed); |
| 1765 | 1722 |
| 1766 intptr_t MinimumAllocationLimitGrowingStep() { | 1723 intptr_t MinimumAllocationLimitGrowingStep(); |
| 1767 const double kRegularAllocationLimitGrowingStep = 8; | |
| 1768 const double kLowMemoryAllocationLimitGrowingStep = 2; | |
| 1769 intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB); | |
| 1770 return limit * (ShouldOptimizeForMemoryUsage() | |
| 1771 ? kLowMemoryAllocationLimitGrowingStep | |
| 1772 : kRegularAllocationLimitGrowingStep); | |
| 1773 } | |
| 1774 | 1724 |
| 1775 // =========================================================================== | 1725 // =========================================================================== |
| 1776 // Idle notification. ======================================================== | 1726 // Idle notification. ======================================================== |
| 1777 // =========================================================================== | 1727 // =========================================================================== |
| 1778 | 1728 |
| 1779 bool RecentIdleNotificationHappened(); | 1729 bool RecentIdleNotificationHappened(); |
| 1780 void ScheduleIdleScavengeIfNeeded(int bytes_allocated); | 1730 void ScheduleIdleScavengeIfNeeded(int bytes_allocated); |
| 1781 | 1731 |
| 1782 // =========================================================================== | 1732 // =========================================================================== |
| 1783 // HeapIterator helpers. ===================================================== | 1733 // HeapIterator helpers. ===================================================== |
| (...skipping 889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2673 friend class LargeObjectSpace; | 2623 friend class LargeObjectSpace; |
| 2674 friend class NewSpace; | 2624 friend class NewSpace; |
| 2675 friend class PagedSpace; | 2625 friend class PagedSpace; |
| 2676 DISALLOW_COPY_AND_ASSIGN(AllocationObserver); | 2626 DISALLOW_COPY_AND_ASSIGN(AllocationObserver); |
| 2677 }; | 2627 }; |
| 2678 | 2628 |
| 2679 } // namespace internal | 2629 } // namespace internal |
| 2680 } // namespace v8 | 2630 } // namespace v8 |
| 2681 | 2631 |
| 2682 #endif // V8_HEAP_HEAP_H_ | 2632 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |