OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 #include <map> | 9 #include <map> |
10 | 10 |
(...skipping 620 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
631 }; | 631 }; |
632 | 632 |
633 // Indicates whether live bytes adjustment is triggered | 633 // Indicates whether live bytes adjustment is triggered |
634 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), | 634 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), |
635 // - or from within GC (CONCURRENT_TO_SWEEPER), | 635 // - or from within GC (CONCURRENT_TO_SWEEPER), |
636 // - or mutator code (CONCURRENT_TO_SWEEPER). | 636 // - or mutator code (CONCURRENT_TO_SWEEPER). |
637 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; | 637 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; |
638 | 638 |
639 enum PretenuringFeedbackInsertionMode { kCached, kGlobal }; | 639 enum PretenuringFeedbackInsertionMode { kCached, kGlobal }; |
640 | 640 |
641 enum FindMementoMode { kForRuntime, kForParallelEvacuation }; | |
Hannes Payer (out of office)
2016/01/20 13:19:39
kForGC would be enough detail.
Michael Lippautz
2016/01/21 10:00:08
Done.
| |
642 | |
641 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; | 643 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; |
642 | 644 |
643 // Taking this lock prevents the GC from entering a phase that relocates | 645 // Taking this lock prevents the GC from entering a phase that relocates |
644 // object references. | 646 // object references. |
645 class RelocationLock { | 647 class RelocationLock { |
646 public: | 648 public: |
647 explicit RelocationLock(Heap* heap) : heap_(heap) { | 649 explicit RelocationLock(Heap* heap) : heap_(heap) { |
648 heap_->relocation_mutex_.Lock(); | 650 heap_->relocation_mutex_.Lock(); |
649 } | 651 } |
650 | 652 |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
920 | 922 |
921 // Print short heap statistics. | 923 // Print short heap statistics. |
922 void PrintShortHeapStatistics(); | 924 void PrintShortHeapStatistics(); |
923 | 925 |
924 inline HeapState gc_state() { return gc_state_; } | 926 inline HeapState gc_state() { return gc_state_; } |
925 | 927 |
926 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } | 928 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } |
927 | 929 |
928 // If an object has an AllocationMemento trailing it, return it, otherwise | 930 // If an object has an AllocationMemento trailing it, return it, otherwise |
929 // return NULL; | 931 // return NULL; |
932 template <int find_memento_mode> | |
Hannes Payer (out of office)
2016/01/20 13:19:39
s/int find_memento_mode/Heap:FindMementoMode mode/
Michael Lippautz
2016/01/21 10:00:08
Done.
| |
930 inline AllocationMemento* FindAllocationMemento(HeapObject* object); | 933 inline AllocationMemento* FindAllocationMemento(HeapObject* object); |
931 | 934 |
932 // Returns false if not able to reserve. | 935 // Returns false if not able to reserve. |
933 bool ReserveSpace(Reservation* reservations); | 936 bool ReserveSpace(Reservation* reservations); |
934 | 937 |
935 // | 938 // |
936 // Support for the API. | 939 // Support for the API. |
937 // | 940 // |
938 | 941 |
939 void CreateApiObjects(); | 942 void CreateApiObjects(); |
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1401 // Returns the available bytes in space w/o growing. | 1404 // Returns the available bytes in space w/o growing. |
1402 // Heap doesn't guarantee that it can allocate an object that requires | 1405 // Heap doesn't guarantee that it can allocate an object that requires |
1403 // all available bytes. Check MaxHeapObjectSize() instead. | 1406 // all available bytes. Check MaxHeapObjectSize() instead. |
1404 intptr_t Available(); | 1407 intptr_t Available(); |
1405 | 1408 |
1406 // Returns of size of all objects residing in the heap. | 1409 // Returns of size of all objects residing in the heap. |
1407 intptr_t SizeOfObjects(); | 1410 intptr_t SizeOfObjects(); |
1408 | 1411 |
1409 void UpdateSurvivalStatistics(int start_new_space_size); | 1412 void UpdateSurvivalStatistics(int start_new_space_size); |
1410 | 1413 |
1411 inline void IncrementPromotedObjectsSize(int object_size) { | 1414 inline void IncrementPromotedObjectsSize(intptr_t object_size) { |
1412 DCHECK_GE(object_size, 0); | 1415 DCHECK_GE(object_size, 0); |
1413 promoted_objects_size_ += object_size; | 1416 promoted_objects_size_ += object_size; |
1414 } | 1417 } |
1415 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } | 1418 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } |
1416 | 1419 |
1417 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) { | 1420 inline void IncrementSemiSpaceCopiedObjectSize(intptr_t object_size) { |
1418 DCHECK_GE(object_size, 0); | 1421 DCHECK_GE(object_size, 0); |
1419 semi_space_copied_object_size_ += object_size; | 1422 semi_space_copied_object_size_ += object_size; |
1420 } | 1423 } |
1421 inline intptr_t semi_space_copied_object_size() { | 1424 inline intptr_t semi_space_copied_object_size() { |
1422 return semi_space_copied_object_size_; | 1425 return semi_space_copied_object_size_; |
1423 } | 1426 } |
1424 | 1427 |
1425 inline intptr_t SurvivedNewSpaceObjectSize() { | 1428 inline intptr_t SurvivedNewSpaceObjectSize() { |
1426 return promoted_objects_size_ + semi_space_copied_object_size_; | 1429 return promoted_objects_size_ + semi_space_copied_object_size_; |
1427 } | 1430 } |
1428 | 1431 |
1429 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } | 1432 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } |
1430 | 1433 |
1431 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } | 1434 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } |
1432 | 1435 |
1433 inline void IncrementNodesPromoted() { nodes_promoted_++; } | 1436 inline void IncrementNodesPromoted() { nodes_promoted_++; } |
1434 | 1437 |
1435 inline void IncrementYoungSurvivorsCounter(int survived) { | 1438 inline void IncrementYoungSurvivorsCounter(intptr_t survived) { |
1436 DCHECK(survived >= 0); | 1439 DCHECK_GE(survived, 0); |
1437 survived_last_scavenge_ = survived; | 1440 survived_last_scavenge_ = survived; |
1438 survived_since_last_expansion_ += survived; | 1441 survived_since_last_expansion_ += survived; |
1439 } | 1442 } |
1440 | 1443 |
1441 inline intptr_t PromotedTotalSize() { | 1444 inline intptr_t PromotedTotalSize() { |
1442 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); | 1445 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); |
1443 if (total > std::numeric_limits<intptr_t>::max()) { | 1446 if (total > std::numeric_limits<intptr_t>::max()) { |
1444 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. | 1447 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. |
1445 return std::numeric_limits<intptr_t>::max(); | 1448 return std::numeric_limits<intptr_t>::max(); |
1446 } | 1449 } |
(...skipping 728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2175 int initial_semispace_size_; | 2178 int initial_semispace_size_; |
2176 int target_semispace_size_; | 2179 int target_semispace_size_; |
2177 intptr_t max_old_generation_size_; | 2180 intptr_t max_old_generation_size_; |
2178 intptr_t initial_old_generation_size_; | 2181 intptr_t initial_old_generation_size_; |
2179 bool old_generation_size_configured_; | 2182 bool old_generation_size_configured_; |
2180 intptr_t max_executable_size_; | 2183 intptr_t max_executable_size_; |
2181 intptr_t maximum_committed_; | 2184 intptr_t maximum_committed_; |
2182 | 2185 |
2183 // For keeping track of how much data has survived | 2186 // For keeping track of how much data has survived |
2184 // scavenge since last new space expansion. | 2187 // scavenge since last new space expansion. |
2185 int survived_since_last_expansion_; | 2188 intptr_t survived_since_last_expansion_; |
2186 | 2189 |
2187 // ... and since the last scavenge. | 2190 // ... and since the last scavenge. |
2188 int survived_last_scavenge_; | 2191 intptr_t survived_last_scavenge_; |
2189 | 2192 |
2190 // This is not the depth of nested AlwaysAllocateScope's but rather a single | 2193 // This is not the depth of nested AlwaysAllocateScope's but rather a single |
2191 // count, as scopes can be acquired from multiple tasks (read: threads). | 2194 // count, as scopes can be acquired from multiple tasks (read: threads). |
2192 AtomicNumber<size_t> always_allocate_scope_count_; | 2195 AtomicNumber<size_t> always_allocate_scope_count_; |
2193 | 2196 |
2194 // For keeping track of context disposals. | 2197 // For keeping track of context disposals. |
2195 int contexts_disposed_; | 2198 int contexts_disposed_; |
2196 | 2199 |
2197 // The length of the retained_maps array at the time of context disposal. | 2200 // The length of the retained_maps array at the time of context disposal. |
2198 // This separates maps in the retained_maps array that were created before | 2201 // This separates maps in the retained_maps array that were created before |
(...skipping 588 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2787 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2790 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
2788 | 2791 |
2789 private: | 2792 private: |
2790 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2793 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
2791 }; | 2794 }; |
2792 #endif // DEBUG | 2795 #endif // DEBUG |
2793 } // namespace internal | 2796 } // namespace internal |
2794 } // namespace v8 | 2797 } // namespace v8 |
2795 | 2798 |
2796 #endif // V8_HEAP_HEAP_H_ | 2799 #endif // V8_HEAP_HEAP_H_ |
OLD | NEW |