| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 #include <map> | 9 #include <map> |
| 10 | 10 |
| (...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 634 }; | 634 }; |
| 635 | 635 |
| 636 // Indicates whether live bytes adjustment is triggered | 636 // Indicates whether live bytes adjustment is triggered |
| 637 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), | 637 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), |
| 638 // - or from within GC (CONCURRENT_TO_SWEEPER), | 638 // - or from within GC (CONCURRENT_TO_SWEEPER), |
| 639 // - or mutator code (CONCURRENT_TO_SWEEPER). | 639 // - or mutator code (CONCURRENT_TO_SWEEPER). |
| 640 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; | 640 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; |
| 641 | 641 |
| 642 enum PretenuringFeedbackInsertionMode { kCached, kGlobal }; | 642 enum PretenuringFeedbackInsertionMode { kCached, kGlobal }; |
| 643 | 643 |
| 644 enum FindMementoMode { kForRuntime, kForGC }; | |
| 645 | |
| 646 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; | 644 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; |
| 647 | 645 |
| 648 // Taking this lock prevents the GC from entering a phase that relocates | 646 // Taking this lock prevents the GC from entering a phase that relocates |
| 649 // object references. | 647 // object references. |
| 650 class RelocationLock { | 648 class RelocationLock { |
| 651 public: | 649 public: |
| 652 explicit RelocationLock(Heap* heap) : heap_(heap) { | 650 explicit RelocationLock(Heap* heap) : heap_(heap) { |
| 653 heap_->relocation_mutex_.Lock(); | 651 heap_->relocation_mutex_.Lock(); |
| 654 } | 652 } |
| 655 | 653 |
| (...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 925 | 923 |
| 926 // Print short heap statistics. | 924 // Print short heap statistics. |
| 927 void PrintShortHeapStatistics(); | 925 void PrintShortHeapStatistics(); |
| 928 | 926 |
| 929 inline HeapState gc_state() { return gc_state_; } | 927 inline HeapState gc_state() { return gc_state_; } |
| 930 | 928 |
| 931 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } | 929 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } |
| 932 | 930 |
| 933 // If an object has an AllocationMemento trailing it, return it, otherwise | 931 // If an object has an AllocationMemento trailing it, return it, otherwise |
| 934 // return NULL; | 932 // return NULL; |
| 935 template <FindMementoMode mode> | |
| 936 inline AllocationMemento* FindAllocationMemento(HeapObject* object); | 933 inline AllocationMemento* FindAllocationMemento(HeapObject* object); |
| 937 | 934 |
| 938 // Returns false if not able to reserve. | 935 // Returns false if not able to reserve. |
| 939 bool ReserveSpace(Reservation* reservations); | 936 bool ReserveSpace(Reservation* reservations); |
| 940 | 937 |
| 941 // | 938 // |
| 942 // Support for the API. | 939 // Support for the API. |
| 943 // | 940 // |
| 944 | 941 |
| 945 void CreateApiObjects(); | 942 void CreateApiObjects(); |
| (...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1407 // Returns the available bytes in space w/o growing. | 1404 // Returns the available bytes in space w/o growing. |
| 1408 // Heap doesn't guarantee that it can allocate an object that requires | 1405 // Heap doesn't guarantee that it can allocate an object that requires |
| 1409 // all available bytes. Check MaxHeapObjectSize() instead. | 1406 // all available bytes. Check MaxHeapObjectSize() instead. |
| 1410 intptr_t Available(); | 1407 intptr_t Available(); |
| 1411 | 1408 |
| 1412 // Returns of size of all objects residing in the heap. | 1409 // Returns of size of all objects residing in the heap. |
| 1413 intptr_t SizeOfObjects(); | 1410 intptr_t SizeOfObjects(); |
| 1414 | 1411 |
| 1415 void UpdateSurvivalStatistics(int start_new_space_size); | 1412 void UpdateSurvivalStatistics(int start_new_space_size); |
| 1416 | 1413 |
| 1417 inline void IncrementPromotedObjectsSize(intptr_t object_size) { | 1414 inline void IncrementPromotedObjectsSize(int object_size) { |
| 1418 DCHECK_GE(object_size, 0); | 1415 DCHECK_GE(object_size, 0); |
| 1419 promoted_objects_size_ += object_size; | 1416 promoted_objects_size_ += object_size; |
| 1420 } | 1417 } |
| 1421 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } | 1418 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } |
| 1422 | 1419 |
| 1423 inline void IncrementSemiSpaceCopiedObjectSize(intptr_t object_size) { | 1420 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) { |
| 1424 DCHECK_GE(object_size, 0); | 1421 DCHECK_GE(object_size, 0); |
| 1425 semi_space_copied_object_size_ += object_size; | 1422 semi_space_copied_object_size_ += object_size; |
| 1426 } | 1423 } |
| 1427 inline intptr_t semi_space_copied_object_size() { | 1424 inline intptr_t semi_space_copied_object_size() { |
| 1428 return semi_space_copied_object_size_; | 1425 return semi_space_copied_object_size_; |
| 1429 } | 1426 } |
| 1430 | 1427 |
| 1431 inline intptr_t SurvivedNewSpaceObjectSize() { | 1428 inline intptr_t SurvivedNewSpaceObjectSize() { |
| 1432 return promoted_objects_size_ + semi_space_copied_object_size_; | 1429 return promoted_objects_size_ + semi_space_copied_object_size_; |
| 1433 } | 1430 } |
| 1434 | 1431 |
| 1435 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } | 1432 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } |
| 1436 | 1433 |
| 1437 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } | 1434 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } |
| 1438 | 1435 |
| 1439 inline void IncrementNodesPromoted() { nodes_promoted_++; } | 1436 inline void IncrementNodesPromoted() { nodes_promoted_++; } |
| 1440 | 1437 |
| 1441 inline void IncrementYoungSurvivorsCounter(intptr_t survived) { | 1438 inline void IncrementYoungSurvivorsCounter(int survived) { |
| 1442 DCHECK_GE(survived, 0); | 1439 DCHECK(survived >= 0); |
| 1443 survived_last_scavenge_ = survived; | 1440 survived_last_scavenge_ = survived; |
| 1444 survived_since_last_expansion_ += survived; | 1441 survived_since_last_expansion_ += survived; |
| 1445 } | 1442 } |
| 1446 | 1443 |
| 1447 inline intptr_t PromotedTotalSize() { | 1444 inline intptr_t PromotedTotalSize() { |
| 1448 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); | 1445 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); |
| 1449 if (total > std::numeric_limits<intptr_t>::max()) { | 1446 if (total > std::numeric_limits<intptr_t>::max()) { |
| 1450 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. | 1447 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. |
| 1451 return std::numeric_limits<intptr_t>::max(); | 1448 return std::numeric_limits<intptr_t>::max(); |
| 1452 } | 1449 } |
| (...skipping 728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2181 int initial_semispace_size_; | 2178 int initial_semispace_size_; |
| 2182 int target_semispace_size_; | 2179 int target_semispace_size_; |
| 2183 intptr_t max_old_generation_size_; | 2180 intptr_t max_old_generation_size_; |
| 2184 intptr_t initial_old_generation_size_; | 2181 intptr_t initial_old_generation_size_; |
| 2185 bool old_generation_size_configured_; | 2182 bool old_generation_size_configured_; |
| 2186 intptr_t max_executable_size_; | 2183 intptr_t max_executable_size_; |
| 2187 intptr_t maximum_committed_; | 2184 intptr_t maximum_committed_; |
| 2188 | 2185 |
| 2189 // For keeping track of how much data has survived | 2186 // For keeping track of how much data has survived |
| 2190 // scavenge since last new space expansion. | 2187 // scavenge since last new space expansion. |
| 2191 intptr_t survived_since_last_expansion_; | 2188 int survived_since_last_expansion_; |
| 2192 | 2189 |
| 2193 // ... and since the last scavenge. | 2190 // ... and since the last scavenge. |
| 2194 intptr_t survived_last_scavenge_; | 2191 int survived_last_scavenge_; |
| 2195 | 2192 |
| 2196 // This is not the depth of nested AlwaysAllocateScope's but rather a single | 2193 // This is not the depth of nested AlwaysAllocateScope's but rather a single |
| 2197 // count, as scopes can be acquired from multiple tasks (read: threads). | 2194 // count, as scopes can be acquired from multiple tasks (read: threads). |
| 2198 AtomicNumber<size_t> always_allocate_scope_count_; | 2195 AtomicNumber<size_t> always_allocate_scope_count_; |
| 2199 | 2196 |
| 2200 // For keeping track of context disposals. | 2197 // For keeping track of context disposals. |
| 2201 int contexts_disposed_; | 2198 int contexts_disposed_; |
| 2202 | 2199 |
| 2203 // The length of the retained_maps array at the time of context disposal. | 2200 // The length of the retained_maps array at the time of context disposal. |
| 2204 // This separates maps in the retained_maps array that were created before | 2201 // This separates maps in the retained_maps array that were created before |
| (...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2847 | 2844 |
| 2848 private: | 2845 private: |
| 2849 friend class NewSpace; | 2846 friend class NewSpace; |
| 2850 DISALLOW_COPY_AND_ASSIGN(InlineAllocationObserver); | 2847 DISALLOW_COPY_AND_ASSIGN(InlineAllocationObserver); |
| 2851 }; | 2848 }; |
| 2852 | 2849 |
| 2853 } // namespace internal | 2850 } // namespace internal |
| 2854 } // namespace v8 | 2851 } // namespace v8 |
| 2855 | 2852 |
| 2856 #endif // V8_HEAP_HEAP_H_ | 2853 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |