| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 #include <map> | 9 #include <map> |
| 10 | 10 |
| (...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 987 | 987 |
| 988 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). | 988 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). |
| 989 void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) { | 989 void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) { |
| 990 roots_[kNonMonomorphicCacheRootIndex] = value; | 990 roots_[kNonMonomorphicCacheRootIndex] = value; |
| 991 } | 991 } |
| 992 | 992 |
| 993 void public_set_empty_script(Script* script) { | 993 void public_set_empty_script(Script* script) { |
| 994 roots_[kEmptyScriptRootIndex] = script; | 994 roots_[kEmptyScriptRootIndex] = script; |
| 995 } | 995 } |
| 996 | 996 |
| 997 void public_set_store_buffer_top(Address* top) { | |
| 998 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top); | |
| 999 } | |
| 1000 | |
| 1001 void public_set_materialized_objects(FixedArray* objects) { | 997 void public_set_materialized_objects(FixedArray* objects) { |
| 1002 roots_[kMaterializedObjectsRootIndex] = objects; | 998 roots_[kMaterializedObjectsRootIndex] = objects; |
| 1003 } | 999 } |
| 1004 | 1000 |
| 1005 // Generated code can embed this address to get access to the roots. | 1001 // Generated code can embed this address to get access to the roots. |
| 1006 Object** roots_array_start() { return roots_; } | 1002 Object** roots_array_start() { return roots_; } |
| 1007 | 1003 |
| 1008 Address* store_buffer_top_address() { | |
| 1009 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); | |
| 1010 } | |
| 1011 | |
| 1012 void CheckHandleCount(); | 1004 void CheckHandleCount(); |
| 1013 | 1005 |
| 1014 // Number of "runtime allocations" done so far. | 1006 // Number of "runtime allocations" done so far. |
| 1015 uint32_t allocations_count() { return allocations_count_; } | 1007 uint32_t allocations_count() { return allocations_count_; } |
| 1016 | 1008 |
| 1017 // Returns deterministic "time" value in ms. Works only with | 1009 // Returns deterministic "time" value in ms. Works only with |
| 1018 // FLAG_verify_predictable. | 1010 // FLAG_verify_predictable. |
| 1019 double synthetic_time() { return allocations_count() / 2.0; } | 1011 double synthetic_time() { return allocations_count() / 2.0; } |
| 1020 | 1012 |
| 1021 // Print short heap statistics. | 1013 // Print short heap statistics. |
| 1022 void PrintShortHeapStatistics(); | 1014 void PrintShortHeapStatistics(); |
| 1023 | 1015 |
| 1024 size_t object_count_last_gc(size_t index) { | 1016 size_t object_count_last_gc(size_t index) { |
| 1025 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0; | 1017 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0; |
| 1026 } | 1018 } |
| 1027 | 1019 |
| 1028 size_t object_size_last_gc(size_t index) { | 1020 size_t object_size_last_gc(size_t index) { |
| 1029 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0; | 1021 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0; |
| 1030 } | 1022 } |
| 1031 | 1023 |
| 1032 // Write barrier support for address[offset] = o. | |
| 1033 INLINE(void RecordWrite(Address address, int offset)); | |
| 1034 | |
| 1035 // Write barrier support for address[start : start + len[ = o. | |
| 1036 INLINE(void RecordWrites(Address address, int start, int len)); | |
| 1037 | |
| 1038 inline HeapState gc_state() { return gc_state_; } | 1024 inline HeapState gc_state() { return gc_state_; } |
| 1039 | 1025 |
| 1040 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } | 1026 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } |
| 1041 | 1027 |
| 1042 // If an object has an AllocationMemento trailing it, return it, otherwise | 1028 // If an object has an AllocationMemento trailing it, return it, otherwise |
| 1043 // return NULL; | 1029 // return NULL; |
| 1044 inline AllocationMemento* FindAllocationMemento(HeapObject* object); | 1030 inline AllocationMemento* FindAllocationMemento(HeapObject* object); |
| 1045 | 1031 |
| 1046 // Returns false if not able to reserve. | 1032 // Returns false if not able to reserve. |
| 1047 bool ReserveSpace(Reservation* reservations); | 1033 bool ReserveSpace(Reservation* reservations); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1087 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); | 1073 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); |
| 1088 | 1074 |
| 1089 // An object should be promoted if the object has survived a | 1075 // An object should be promoted if the object has survived a |
| 1090 // scavenge operation. | 1076 // scavenge operation. |
| 1091 inline bool ShouldBePromoted(Address old_address, int object_size); | 1077 inline bool ShouldBePromoted(Address old_address, int object_size); |
| 1092 | 1078 |
| 1093 void ClearNormalizedMapCaches(); | 1079 void ClearNormalizedMapCaches(); |
| 1094 | 1080 |
| 1095 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature); | 1081 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature); |
| 1096 | 1082 |
| 1097 ExternalStringTable* external_string_table() { | |
| 1098 return &external_string_table_; | |
| 1099 } | |
| 1100 | |
| 1101 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; } | 1083 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; } |
| 1102 | 1084 |
| 1103 inline bool OldGenerationAllocationLimitReached(); | 1085 inline bool OldGenerationAllocationLimitReached(); |
| 1104 | 1086 |
| 1105 void QueueMemoryChunkForFree(MemoryChunk* chunk); | 1087 void QueueMemoryChunkForFree(MemoryChunk* chunk); |
| 1106 void FilterStoreBufferEntriesOnAboutToBeFreedPages(); | 1088 void FilterStoreBufferEntriesOnAboutToBeFreedPages(); |
| 1107 void FreeQueuedChunks(MemoryChunk* list_head); | 1089 void FreeQueuedChunks(MemoryChunk* list_head); |
| 1108 void FreeQueuedChunks(); | 1090 void FreeQueuedChunks(); |
| 1109 void WaitUntilUnmappingOfFreeChunksCompleted(); | 1091 void WaitUntilUnmappingOfFreeChunksCompleted(); |
| 1110 | 1092 |
| (...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1300 GCTracer* tracer() { return tracer_; } | 1282 GCTracer* tracer() { return tracer_; } |
| 1301 | 1283 |
| 1302 PromotionQueue* promotion_queue() { return &promotion_queue_; } | 1284 PromotionQueue* promotion_queue() { return &promotion_queue_; } |
| 1303 | 1285 |
| 1304 inline Isolate* isolate(); | 1286 inline Isolate* isolate(); |
| 1305 | 1287 |
| 1306 MarkCompactCollector* mark_compact_collector() { | 1288 MarkCompactCollector* mark_compact_collector() { |
| 1307 return &mark_compact_collector_; | 1289 return &mark_compact_collector_; |
| 1308 } | 1290 } |
| 1309 | 1291 |
| 1310 StoreBuffer* store_buffer() { return &store_buffer_; } | 1292 ExternalStringTable* external_string_table() { |
| 1293 return &external_string_table_; |
| 1294 } |
| 1311 | 1295 |
| 1312 // =========================================================================== | 1296 // =========================================================================== |
| 1313 // Inline allocation. ======================================================== | 1297 // Inline allocation. ======================================================== |
| 1314 // =========================================================================== | 1298 // =========================================================================== |
| 1315 | 1299 |
| 1316 // Indicates whether inline bump-pointer allocation has been disabled. | 1300 // Indicates whether inline bump-pointer allocation has been disabled. |
| 1317 bool inline_allocation_disabled() { return inline_allocation_disabled_; } | 1301 bool inline_allocation_disabled() { return inline_allocation_disabled_; } |
| 1318 | 1302 |
| 1319 // Switch whether inline bump-pointer allocation should be used. | 1303 // Switch whether inline bump-pointer allocation should be used. |
| 1320 void EnableInlineAllocation(); | 1304 void EnableInlineAllocation(); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1358 // Iterates over all the other roots in the heap. | 1342 // Iterates over all the other roots in the heap. |
| 1359 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); | 1343 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); |
| 1360 | 1344 |
| 1361 // Iterate pointers to from semispace of new space found in memory interval | 1345 // Iterate pointers to from semispace of new space found in memory interval |
| 1362 // from start to end within |object|. | 1346 // from start to end within |object|. |
| 1363 void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, | 1347 void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, |
| 1364 Address end, bool record_slots, | 1348 Address end, bool record_slots, |
| 1365 ObjectSlotCallback callback); | 1349 ObjectSlotCallback callback); |
| 1366 | 1350 |
| 1367 // =========================================================================== | 1351 // =========================================================================== |
| 1352 // Store buffer API. ========================================================= |
| 1353 // =========================================================================== |
| 1354 |
| 1355 // Write barrier support for address[offset] = o. |
| 1356 INLINE(void RecordWrite(Address address, int offset)); |
| 1357 |
| 1358 // Write barrier support for address[start : start + len[ = o. |
| 1359 INLINE(void RecordWrites(Address address, int start, int len)); |
| 1360 |
| 1361 Address* store_buffer_top_address() { |
| 1362 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); |
| 1363 } |
| 1364 |
| 1365 // =========================================================================== |
| 1368 // Incremental marking API. ================================================== | 1366 // Incremental marking API. ================================================== |
| 1369 // =========================================================================== | 1367 // =========================================================================== |
| 1370 | 1368 |
| 1371 // Start incremental marking and ensure that idle time handler can perform | 1369 // Start incremental marking and ensure that idle time handler can perform |
| 1372 // incremental steps. | 1370 // incremental steps. |
| 1373 void StartIdleIncrementalMarking(); | 1371 void StartIdleIncrementalMarking(); |
| 1374 | 1372 |
| 1375 // Starts incremental marking assuming incremental marking is currently | 1373 // Starts incremental marking assuming incremental marking is currently |
| 1376 // stopped. | 1374 // stopped. |
| 1377 void StartIncrementalMarking(int gc_flags = kNoGCFlags, | 1375 void StartIncrementalMarking(int gc_flags = kNoGCFlags, |
| (...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1676 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) { | 1674 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) { |
| 1677 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE; | 1675 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE; |
| 1678 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; | 1676 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; |
| 1679 } | 1677 } |
| 1680 | 1678 |
| 1681 #define ROOT_ACCESSOR(type, name, camel_name) \ | 1679 #define ROOT_ACCESSOR(type, name, camel_name) \ |
| 1682 inline void set_##name(type* value); | 1680 inline void set_##name(type* value); |
| 1683 ROOT_LIST(ROOT_ACCESSOR) | 1681 ROOT_LIST(ROOT_ACCESSOR) |
| 1684 #undef ROOT_ACCESSOR | 1682 #undef ROOT_ACCESSOR |
| 1685 | 1683 |
| 1684 StoreBuffer* store_buffer() { return &store_buffer_; } |
| 1685 |
| 1686 void set_current_gc_flags(int flags) { | 1686 void set_current_gc_flags(int flags) { |
| 1687 current_gc_flags_ = flags; | 1687 current_gc_flags_ = flags; |
| 1688 DCHECK(!ShouldFinalizeIncrementalMarking() || | 1688 DCHECK(!ShouldFinalizeIncrementalMarking() || |
| 1689 !ShouldAbortIncrementalMarking()); | 1689 !ShouldAbortIncrementalMarking()); |
| 1690 } | 1690 } |
| 1691 | 1691 |
| 1692 inline bool ShouldReduceMemory() const { | 1692 inline bool ShouldReduceMemory() const { |
| 1693 return current_gc_flags_ & kReduceMemoryFootprintMask; | 1693 return current_gc_flags_ & kReduceMemoryFootprintMask; |
| 1694 } | 1694 } |
| 1695 | 1695 |
| (...skipping 708 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2404 friend class Factory; | 2404 friend class Factory; |
| 2405 friend class GCCallbacksScope; | 2405 friend class GCCallbacksScope; |
| 2406 friend class GCTracer; | 2406 friend class GCTracer; |
| 2407 friend class HeapIterator; | 2407 friend class HeapIterator; |
| 2408 friend class IncrementalMarking; | 2408 friend class IncrementalMarking; |
| 2409 friend class Isolate; | 2409 friend class Isolate; |
| 2410 friend class MarkCompactCollector; | 2410 friend class MarkCompactCollector; |
| 2411 friend class MarkCompactMarkingVisitor; | 2411 friend class MarkCompactMarkingVisitor; |
| 2412 friend class MapCompact; | 2412 friend class MapCompact; |
| 2413 friend class Page; | 2413 friend class Page; |
| 2414 friend class StoreBuffer; |
| 2414 | 2415 |
| 2415 // Used in cctest. | 2416 // Used in cctest. |
| 2416 friend class HeapTester; | 2417 friend class HeapTester; |
| 2417 | 2418 |
| 2418 DISALLOW_COPY_AND_ASSIGN(Heap); | 2419 DISALLOW_COPY_AND_ASSIGN(Heap); |
| 2419 }; | 2420 }; |
| 2420 | 2421 |
| 2421 | 2422 |
| 2422 class HeapStats { | 2423 class HeapStats { |
| 2423 public: | 2424 public: |
| (...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2778 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2779 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2779 | 2780 |
| 2780 private: | 2781 private: |
| 2781 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2782 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2782 }; | 2783 }; |
| 2783 #endif // DEBUG | 2784 #endif // DEBUG |
| 2784 } | 2785 } |
| 2785 } // namespace v8::internal | 2786 } // namespace v8::internal |
| 2786 | 2787 |
| 2787 #endif // V8_HEAP_HEAP_H_ | 2788 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |