OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 #include <map> | 9 #include <map> |
10 | 10 |
(...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
413 V(BooleanMap) \ | 413 V(BooleanMap) \ |
414 V(UninitializedMap) \ | 414 V(UninitializedMap) \ |
415 V(ArgumentsMarkerMap) \ | 415 V(ArgumentsMarkerMap) \ |
416 V(JSMessageObjectMap) \ | 416 V(JSMessageObjectMap) \ |
417 V(ForeignMap) \ | 417 V(ForeignMap) \ |
418 V(NeanderMap) \ | 418 V(NeanderMap) \ |
419 V(empty_string) \ | 419 V(empty_string) \ |
420 PRIVATE_SYMBOL_LIST(V) | 420 PRIVATE_SYMBOL_LIST(V) |
421 | 421 |
422 // Forward declarations. | 422 // Forward declarations. |
423 class ArrayBufferTracker; | |
423 class HeapObjectsFilter; | 424 class HeapObjectsFilter; |
424 class HeapStats; | 425 class HeapStats; |
425 class Isolate; | 426 class Isolate; |
426 class MemoryReducer; | 427 class MemoryReducer; |
427 class ObjectStats; | 428 class ObjectStats; |
428 class WeakObjectRetainer; | 429 class WeakObjectRetainer; |
429 | 430 |
430 | 431 |
431 // A queue of objects promoted during scavenge. Each object is accompanied | 432 // A queue of objects promoted during scavenge. Each object is accompanied |
432 // by it's size to avoid dereferencing a map pointer for scanning. | 433 // by it's size to avoid dereferencing a map pointer for scanning. |
(...skipping 544 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
977 int global_ic_age() { return global_ic_age_; } | 978 int global_ic_age() { return global_ic_age_; } |
978 | 979 |
979 void AgeInlineCaches() { | 980 void AgeInlineCaches() { |
980 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; | 981 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; |
981 } | 982 } |
982 | 983 |
983 int64_t amount_of_external_allocated_memory() { | 984 int64_t amount_of_external_allocated_memory() { |
984 return amount_of_external_allocated_memory_; | 985 return amount_of_external_allocated_memory_; |
985 } | 986 } |
986 | 987 |
988 void update_amount_of_external_allocated_memory(int64_t delta) { | |
989 amount_of_external_allocated_memory_ += delta; | |
990 } | |
991 | |
987 void DeoptMarkedAllocationSites(); | 992 void DeoptMarkedAllocationSites(); |
988 | 993 |
989 bool DeoptMaybeTenuredAllocationSites() { | 994 bool DeoptMaybeTenuredAllocationSites() { |
990 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; | 995 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; |
991 } | 996 } |
992 | 997 |
993 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj, | 998 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj, |
994 Handle<DependentCode> dep); | 999 Handle<DependentCode> dep); |
995 | 1000 |
996 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj); | 1001 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj); |
997 | 1002 |
998 void AddRetainedMap(Handle<Map> map); | 1003 void AddRetainedMap(Handle<Map> map); |
999 | 1004 |
1000 // This event is triggered after successful allocation of a new object made | 1005 // This event is triggered after successful allocation of a new object made |
1001 // by runtime. Allocations of target space for object evacuation do not | 1006 // by runtime. Allocations of target space for object evacuation do not |
1002 // trigger the event. In order to track ALL allocations one must turn off | 1007 // trigger the event. In order to track ALL allocations one must turn off |
1003 // FLAG_inline_new and FLAG_use_allocation_folding. | 1008 // FLAG_inline_new and FLAG_use_allocation_folding. |
1004 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); | 1009 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); |
1005 | 1010 |
1006 // This event is triggered after object is moved to a new place. | 1011 // This event is triggered after object is moved to a new place. |
1007 inline void OnMoveEvent(HeapObject* target, HeapObject* source, | 1012 inline void OnMoveEvent(HeapObject* target, HeapObject* source, |
1008 int size_in_bytes); | 1013 int size_in_bytes); |
1009 | 1014 |
1010 bool deserialization_complete() const { return deserialization_complete_; } | 1015 bool deserialization_complete() const { return deserialization_complete_; } |
1011 | 1016 |
1012 // The following methods are used to track raw C++ pointers to externally | |
1013 // allocated memory used as backing store in live array buffers. | |
1014 | |
1015 // A new ArrayBuffer was created with |data| as backing store. | |
1016 void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length); | |
Michael Lippautz
2015/09/04 08:24:15
Create public wrapper calls only for RegisterNewAr
fedor.indutny
2015/09/04 08:58:05
Acknowledged.
| |
1017 | |
1018 // The backing store |data| is no longer owned by V8. | |
1019 void UnregisterArrayBuffer(bool in_new_space, void* data); | |
1020 | |
1021 // A live ArrayBuffer was discovered during marking/scavenge. | |
1022 void RegisterLiveArrayBuffer(bool in_new_space, void* data); | |
1023 | |
1024 // Frees all backing store pointers that weren't discovered in the previous | |
1025 // marking or scavenge phase. | |
1026 void FreeDeadArrayBuffers(bool from_scavenge); | |
1027 | |
1028 // Prepare for a new scavenge phase. A new marking phase is implicitly | |
1029 // prepared by finishing the previous one. | |
1030 void PrepareArrayBufferDiscoveryInNewSpace(); | |
1031 | |
1032 // An ArrayBuffer moved from new space to old space. | |
1033 void PromoteArrayBuffer(Object* buffer); | |
1034 | |
1035 bool HasLowAllocationRate(); | 1017 bool HasLowAllocationRate(); |
1036 bool HasHighFragmentation(); | 1018 bool HasHighFragmentation(); |
1037 bool HasHighFragmentation(intptr_t used, intptr_t committed); | 1019 bool HasHighFragmentation(intptr_t used, intptr_t committed); |
1038 | 1020 |
1039 // =========================================================================== | 1021 // =========================================================================== |
1040 // Initialization. =========================================================== | 1022 // Initialization. =========================================================== |
1041 // =========================================================================== | 1023 // =========================================================================== |
1042 | 1024 |
1043 // Configure heap size in MB before setup. Return false if the heap has been | 1025 // Configure heap size in MB before setup. Return false if the heap has been |
1044 // set up already. | 1026 // set up already. |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1113 GCTracer* tracer() { return tracer_; } | 1095 GCTracer* tracer() { return tracer_; } |
1114 | 1096 |
1115 PromotionQueue* promotion_queue() { return &promotion_queue_; } | 1097 PromotionQueue* promotion_queue() { return &promotion_queue_; } |
1116 | 1098 |
1117 inline Isolate* isolate(); | 1099 inline Isolate* isolate(); |
1118 | 1100 |
1119 MarkCompactCollector* mark_compact_collector() { | 1101 MarkCompactCollector* mark_compact_collector() { |
1120 return &mark_compact_collector_; | 1102 return &mark_compact_collector_; |
1121 } | 1103 } |
1122 | 1104 |
1105 inline ArrayBufferTracker* array_buffer_tracker() { | |
1106 return array_buffer_tracker_; | |
1107 } | |
1108 | |
1123 // =========================================================================== | 1109 // =========================================================================== |
1124 // Root set access. ========================================================== | 1110 // Root set access. ========================================================== |
1125 // =========================================================================== | 1111 // =========================================================================== |
1126 | 1112 |
1127 // Heap root getters. | 1113 // Heap root getters. |
1128 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name(); | 1114 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name(); |
1129 ROOT_LIST(ROOT_ACCESSOR) | 1115 ROOT_LIST(ROOT_ACCESSOR) |
1130 #undef ROOT_ACCESSOR | 1116 #undef ROOT_ACCESSOR |
1131 | 1117 |
1132 // Utility type maps. | 1118 // Utility type maps. |
(...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1748 | 1734 |
1749 // Deopts all code that contains allocation instruction which are tenured or | 1735 // Deopts all code that contains allocation instruction which are tenured or |
1750 // not tenured. Moreover it clears the pretenuring allocation site statistics. | 1736 // not tenured. Moreover it clears the pretenuring allocation site statistics. |
1751 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); | 1737 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); |
1752 | 1738 |
1753 // Evaluates local pretenuring for the old space and calls | 1739 // Evaluates local pretenuring for the old space and calls |
1754 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in | 1740 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in |
1755 // the old space. | 1741 // the old space. |
1756 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); | 1742 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); |
1757 | 1743 |
1758 // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. | |
1759 void TearDownArrayBuffers(); | |
1760 | |
1761 // Record statistics before and after garbage collection. | 1744 // Record statistics before and after garbage collection. |
1762 void ReportStatisticsBeforeGC(); | 1745 void ReportStatisticsBeforeGC(); |
1763 void ReportStatisticsAfterGC(); | 1746 void ReportStatisticsAfterGC(); |
1764 | 1747 |
1765 // Creates and installs the full-sized number string cache. | 1748 // Creates and installs the full-sized number string cache. |
1766 int FullSizeNumberStringCacheLength(); | 1749 int FullSizeNumberStringCacheLength(); |
1767 // Flush the number to string cache. | 1750 // Flush the number to string cache. |
1768 void FlushNumberStringCache(); | 1751 void FlushNumberStringCache(); |
1769 | 1752 |
1770 // Sets used allocation sites entries to undefined. | 1753 // Sets used allocation sites entries to undefined. |
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2332 base::Semaphore pending_unmapping_tasks_semaphore_; | 2315 base::Semaphore pending_unmapping_tasks_semaphore_; |
2333 | 2316 |
2334 base::Mutex relocation_mutex_; | 2317 base::Mutex relocation_mutex_; |
2335 | 2318 |
2336 int gc_callbacks_depth_; | 2319 int gc_callbacks_depth_; |
2337 | 2320 |
2338 bool deserialization_complete_; | 2321 bool deserialization_complete_; |
2339 | 2322 |
2340 bool concurrent_sweeping_enabled_; | 2323 bool concurrent_sweeping_enabled_; |
2341 | 2324 |
2342 // |live_array_buffers_| maps externally allocated memory used as backing | 2325 StrongRootsList* strong_roots_list_; |
2343 // store for ArrayBuffers to the length of the respective memory blocks. | |
2344 // | |
2345 // At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is | |
2346 // a copy of |live_array_buffers_| and we remove pointers as we discover live | |
2347 // ArrayBuffer objects during marking. At the end of mark/compact, the | |
2348 // remaining memory blocks can be freed. | |
2349 std::map<void*, size_t> live_array_buffers_; | |
2350 std::map<void*, size_t> not_yet_discovered_array_buffers_; | |
2351 | 2326 |
2352 // To be able to free memory held by ArrayBuffers during scavenge as well, we | 2327 ArrayBufferTracker* array_buffer_tracker_; |
2353 // have a separate list of allocated memory held by ArrayBuffers in new space. | |
2354 // | |
2355 // Since mark/compact also evacuates the new space, all pointers in the | |
2356 // |live_array_buffers_for_scavenge_| list are also in the | |
2357 // |live_array_buffers_| list. | |
2358 std::map<void*, size_t> live_array_buffers_for_scavenge_; | |
2359 std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_; | |
2360 | |
2361 StrongRootsList* strong_roots_list_; | |
2362 | 2328 |
2363 // Classes in "heap" can be friends. | 2329 // Classes in "heap" can be friends. |
2364 friend class AlwaysAllocateScope; | 2330 friend class AlwaysAllocateScope; |
2365 friend class GCCallbacksScope; | 2331 friend class GCCallbacksScope; |
2366 friend class GCTracer; | 2332 friend class GCTracer; |
2367 friend class HeapIterator; | 2333 friend class HeapIterator; |
2368 friend class IncrementalMarking; | 2334 friend class IncrementalMarking; |
2369 friend class MarkCompactCollector; | 2335 friend class MarkCompactCollector; |
2370 friend class MarkCompactMarkingVisitor; | 2336 friend class MarkCompactMarkingVisitor; |
2371 friend class Page; | 2337 friend class Page; |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2741 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2707 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
2742 | 2708 |
2743 private: | 2709 private: |
2744 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2710 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
2745 }; | 2711 }; |
2746 #endif // DEBUG | 2712 #endif // DEBUG |
2747 } | 2713 } |
2748 } // namespace v8::internal | 2714 } // namespace v8::internal |
2749 | 2715 |
2750 #endif // V8_HEAP_HEAP_H_ | 2716 #endif // V8_HEAP_HEAP_H_ |
OLD | NEW |