Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/heap/heap.h

Issue 1313513003: [heap] Enforce coding style decl order in {Heap} round #2. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 #include <map> 9 #include <map>
10 10
(...skipping 803 matching lines...) Expand 10 before | Expand all | Expand 10 after
814 814
815 // Set the stack limit in the roots_ array. Some architectures generate 815 // Set the stack limit in the roots_ array. Some architectures generate
816 // code that looks here, because it is faster than loading from the static 816 // code that looks here, because it is faster than loading from the static
817 // jslimit_/real_jslimit_ variable in the StackGuard. 817 // jslimit_/real_jslimit_ variable in the StackGuard.
818 void SetStackLimits(); 818 void SetStackLimits();
819 819
820 // Notifies the heap that is ok to start marking or other activities that 820 // Notifies the heap that is ok to start marking or other activities that
821 // should not happen during deserialization. 821 // should not happen during deserialization.
822 void NotifyDeserializationComplete(); 822 void NotifyDeserializationComplete();
823 823
824 // Returns whether SetUp has been called.
825 bool HasBeenSetUp();
826
827 intptr_t old_generation_allocation_limit() const { 824 intptr_t old_generation_allocation_limit() const {
828 return old_generation_allocation_limit_; 825 return old_generation_allocation_limit_;
829 } 826 }
830 827
831 bool always_allocate() { return always_allocate_scope_depth_ != 0; } 828 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
832 Address always_allocate_scope_depth_address() { 829 Address always_allocate_scope_depth_address() {
833 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 830 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
834 } 831 }
835 832
836 Address* NewSpaceAllocationTopAddress() { 833 Address* NewSpaceAllocationTopAddress() {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
883 // start and hence is only valid if there is only a single reference to it. 880 // start and hence is only valid if there is only a single reference to it.
884 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 881 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
885 882
886 // Trim the given array from the right. 883 // Trim the given array from the right.
887 template<Heap::InvocationMode mode> 884 template<Heap::InvocationMode mode>
888 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 885 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
889 886
890 // Converts the given boolean condition to JavaScript boolean value. 887 // Converts the given boolean condition to JavaScript boolean value.
891 inline Object* ToBoolean(bool condition); 888 inline Object* ToBoolean(bool condition);
892 889
893 // Attempt to over-approximate the weak closure by marking object groups and
894 // implicit references from global handles, but don't atomically complete
895 // marking. If we continue to mark incrementally, we might have marked
896 // objects that die later.
897 void OverApproximateWeakClosure(const char* gc_reason);
898
899 // Check whether the heap is currently iterable. 890 // Check whether the heap is currently iterable.
900 bool IsHeapIterable(); 891 bool IsHeapIterable();
901 892
902 // Notify the heap that a context has been disposed. 893 // Notify the heap that a context has been disposed.
903 int NotifyContextDisposed(bool dependant_context); 894 int NotifyContextDisposed(bool dependant_context);
904 895
905 inline void increment_scan_on_scavenge_pages() { 896 inline void increment_scan_on_scavenge_pages() {
906 scan_on_scavenge_pages_++; 897 scan_on_scavenge_pages_++;
907 if (FLAG_gc_verbose) { 898 if (FLAG_gc_verbose) {
908 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_); 899 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
1006 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); 997 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1007 } 998 }
1008 999
1009 void CheckHandleCount(); 1000 void CheckHandleCount();
1010 1001
1011 // Number of "runtime allocations" done so far. 1002 // Number of "runtime allocations" done so far.
1012 uint32_t allocations_count() { return allocations_count_; } 1003 uint32_t allocations_count() { return allocations_count_; }
1013 1004
1014 // Returns deterministic "time" value in ms. Works only with 1005 // Returns deterministic "time" value in ms. Works only with
1015 // FLAG_verify_predictable. 1006 // FLAG_verify_predictable.
1016 double synthetic_time() { return allocations_count_ / 2.0; } 1007 double synthetic_time() { return allocations_count() / 2.0; }
1017 1008
1018 // Print short heap statistics. 1009 // Print short heap statistics.
1019 void PrintShortHeapStatistics(); 1010 void PrintShortHeapStatistics();
1020 1011
1021 size_t object_count_last_gc(size_t index) { 1012 size_t object_count_last_gc(size_t index) {
1022 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0; 1013 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0;
1023 } 1014 }
1024 1015
1025 size_t object_size_last_gc(size_t index) { 1016 size_t object_size_last_gc(size_t index) {
1026 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0; 1017 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0;
(...skipping 15 matching lines...) Expand all
1042 1033
1043 // Returns false if not able to reserve. 1034 // Returns false if not able to reserve.
1044 bool ReserveSpace(Reservation* reservations); 1035 bool ReserveSpace(Reservation* reservations);
1045 1036
1046 // 1037 //
1047 // Support for the API. 1038 // Support for the API.
1048 // 1039 //
1049 1040
1050 void CreateApiObjects(); 1041 void CreateApiObjects();
1051 1042
1052 // Calculates the allocation limit based on a given growing factor and a
1053 // given old generation size.
1054 intptr_t CalculateOldGenerationAllocationLimit(double factor,
1055 intptr_t old_gen_size);
1056
1057 // Sets the allocation limit to trigger the next full garbage collection.
1058 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
1059 double mutator_speed);
1060
1061 // Decrease the allocation limit if the new limit based on the given
1062 // parameters is lower than the current limit.
1063 void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1064 double gc_speed,
1065 double mutator_speed);
1066
1067 // Implements the corresponding V8 API function. 1043 // Implements the corresponding V8 API function.
1068 bool IdleNotification(double deadline_in_seconds); 1044 bool IdleNotification(double deadline_in_seconds);
1069 bool IdleNotification(int idle_time_in_ms); 1045 bool IdleNotification(int idle_time_in_ms);
1070 1046
1071 double MonotonicallyIncreasingTimeInMs(); 1047 double MonotonicallyIncreasingTimeInMs();
1072 1048
1073 Object* root(RootListIndex index) { return roots_[index]; } 1049 Object* root(RootListIndex index) { return roots_[index]; }
1074 1050
1075 // Generated code can treat direct references to this root as constant. 1051 // Generated code can treat direct references to this root as constant.
1076 bool RootCanBeTreatedAsConstant(RootListIndex root_index); 1052 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
(...skipping 12 matching lines...) Expand all
1089 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) { 1065 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
1090 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true; 1066 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1091 1067
1092 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity(); 1068 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
1093 1069
1094 if (PromotedTotalSize() >= adjusted_allocation_limit) return true; 1070 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1095 1071
1096 return false; 1072 return false;
1097 } 1073 }
1098 1074
1099 void UpdateNewSpaceReferencesInExternalStringTable(
1100 ExternalStringTableUpdaterCallback updater_func);
1101
1102 void UpdateReferencesInExternalStringTable(
1103 ExternalStringTableUpdaterCallback updater_func);
1104
1105 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1106 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1107
1108 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); 1075 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1109 1076
1110 // An object should be promoted if the object has survived a 1077 // An object should be promoted if the object has survived a
1111 // scavenge operation. 1078 // scavenge operation.
1112 inline bool ShouldBePromoted(Address old_address, int object_size); 1079 inline bool ShouldBePromoted(Address old_address, int object_size);
1113 1080
1114 void ClearNormalizedMapCaches(); 1081 void ClearNormalizedMapCaches();
1115 1082
1116 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature); 1083 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
1117 1084
1118 ExternalStringTable* external_string_table() { 1085 ExternalStringTable* external_string_table() {
1119 return &external_string_table_; 1086 return &external_string_table_;
1120 } 1087 }
1121 1088
1122 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; } 1089 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; }
1123 1090
1124 inline bool OldGenerationAllocationLimitReached(); 1091 inline bool OldGenerationAllocationLimitReached();
1125 1092
1126 void QueueMemoryChunkForFree(MemoryChunk* chunk); 1093 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1127 void FilterStoreBufferEntriesOnAboutToBeFreedPages(); 1094 void FilterStoreBufferEntriesOnAboutToBeFreedPages();
1128 void FreeQueuedChunks(MemoryChunk* list_head); 1095 void FreeQueuedChunks(MemoryChunk* list_head);
1129 void FreeQueuedChunks(); 1096 void FreeQueuedChunks();
1130 void WaitUntilUnmappingOfFreeChunksCompleted(); 1097 void WaitUntilUnmappingOfFreeChunksCompleted();
1131 1098
1132 bool RecentIdleNotificationHappened();
1133
1134 // Completely clear the Instanceof cache (to stop it keeping objects alive 1099 // Completely clear the Instanceof cache (to stop it keeping objects alive
1135 // around a GC). 1100 // around a GC).
1136 inline void CompletelyClearInstanceofCache(); 1101 inline void CompletelyClearInstanceofCache();
1137 1102
1138 inline uint32_t HashSeed(); 1103 inline uint32_t HashSeed();
1139 1104
1140 inline Smi* NextScriptId(); 1105 inline Smi* NextScriptId();
1141 1106
1142 inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset); 1107 inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
1143 inline void SetConstructStubDeoptPCOffset(int pc_offset); 1108 inline void SetConstructStubDeoptPCOffset(int pc_offset);
(...skipping 10 matching lines...) Expand all
1154 void AgeInlineCaches() { 1119 void AgeInlineCaches() {
1155 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; 1120 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1156 } 1121 }
1157 1122
1158 int64_t amount_of_external_allocated_memory() { 1123 int64_t amount_of_external_allocated_memory() {
1159 return amount_of_external_allocated_memory_; 1124 return amount_of_external_allocated_memory_;
1160 } 1125 }
1161 1126
1162 void DeoptMarkedAllocationSites(); 1127 void DeoptMarkedAllocationSites();
1163 1128
1164 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1165
1166 bool DeoptMaybeTenuredAllocationSites() { 1129 bool DeoptMaybeTenuredAllocationSites() {
1167 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; 1130 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1168 } 1131 }
1169 1132
1170 void RecordObjectStats(InstanceType type, size_t size) { 1133 void RecordObjectStats(InstanceType type, size_t size) {
1171 DCHECK(type <= LAST_TYPE); 1134 DCHECK(type <= LAST_TYPE);
1172 object_counts_[type]++; 1135 object_counts_[type]++;
1173 object_sizes_[type] += size; 1136 object_sizes_[type] += size;
1174 } 1137 }
1175 1138
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1262 // without actually creating any objects. 1225 // without actually creating any objects.
1263 bool SetUp(); 1226 bool SetUp();
1264 1227
1265 // Bootstraps the object heap with the core set of objects required to run. 1228 // Bootstraps the object heap with the core set of objects required to run.
1266 // Returns whether it succeeded. 1229 // Returns whether it succeeded.
1267 bool CreateHeapObjects(); 1230 bool CreateHeapObjects();
1268 1231
1269 // Destroys all memory allocated by the heap. 1232 // Destroys all memory allocated by the heap.
1270 void TearDown(); 1233 void TearDown();
1271 1234
1235 // Returns whether SetUp has been called.
1236 bool HasBeenSetUp();
1237
1272 // =========================================================================== 1238 // ===========================================================================
1273 // Getters for spaces. ======================================================= 1239 // Getters for spaces. =======================================================
1274 // =========================================================================== 1240 // ===========================================================================
1275 1241
1276 // Return the starting address and a mask for the new space. And-masking an 1242 // Return the starting address and a mask for the new space. And-masking an
1277 // address with the mask will result in the start address of the new space 1243 // address with the mask will result in the start address of the new space
1278 // for all addresses in either semispace. 1244 // for all addresses in either semispace.
1279 Address NewSpaceStart() { return new_space_.start(); } 1245 Address NewSpaceStart() { return new_space_.start(); }
1280 uintptr_t NewSpaceMask() { return new_space_.mask(); } 1246 uintptr_t NewSpaceMask() { return new_space_.mask(); }
1281 Address NewSpaceTop() { return new_space_.top(); } 1247 Address NewSpaceTop() { return new_space_.top(); }
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
1495 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } 1461 inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
1496 1462
1497 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) { 1463 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1498 DCHECK(object_size > 0); 1464 DCHECK(object_size > 0);
1499 semi_space_copied_object_size_ += object_size; 1465 semi_space_copied_object_size_ += object_size;
1500 } 1466 }
1501 inline intptr_t semi_space_copied_object_size() { 1467 inline intptr_t semi_space_copied_object_size() {
1502 return semi_space_copied_object_size_; 1468 return semi_space_copied_object_size_;
1503 } 1469 }
1504 1470
1505
1506 inline intptr_t SurvivedNewSpaceObjectSize() { 1471 inline intptr_t SurvivedNewSpaceObjectSize() {
1507 return promoted_objects_size_ + semi_space_copied_object_size_; 1472 return promoted_objects_size_ + semi_space_copied_object_size_;
1508 } 1473 }
1509 1474
1510 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } 1475 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1511 1476
1512 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } 1477 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1513 1478
1514 inline void IncrementNodesPromoted() { nodes_promoted_++; } 1479 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1515 1480
1516 inline void IncrementYoungSurvivorsCounter(int survived) { 1481 inline void IncrementYoungSurvivorsCounter(int survived) {
1517 DCHECK(survived >= 0); 1482 DCHECK(survived >= 0);
1518 survived_last_scavenge_ = survived; 1483 survived_last_scavenge_ = survived;
1519 survived_since_last_expansion_ += survived; 1484 survived_since_last_expansion_ += survived;
1520 } 1485 }
1521 1486
1522 inline intptr_t PromotedTotalSize() { 1487 inline intptr_t PromotedTotalSize() {
1523 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); 1488 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1524 if (total > std::numeric_limits<intptr_t>::max()) { 1489 if (total > std::numeric_limits<intptr_t>::max()) {
1525 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. 1490 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
1526 return std::numeric_limits<intptr_t>::max(); 1491 return std::numeric_limits<intptr_t>::max();
1527 } 1492 }
1528 if (total < 0) return 0; 1493 if (total < 0) return 0;
1529 return static_cast<intptr_t>(total); 1494 return static_cast<intptr_t>(total);
1530 } 1495 }
1531 1496
1532 inline intptr_t OldGenerationSpaceAvailable() {
1533 return old_generation_allocation_limit_ - PromotedTotalSize();
1534 }
1535
1536 inline intptr_t OldGenerationCapacityAvailable() {
1537 return max_old_generation_size_ - PromotedTotalSize();
1538 }
1539
1540
1541 void UpdateNewSpaceAllocationCounter() { 1497 void UpdateNewSpaceAllocationCounter() {
1542 new_space_allocation_counter_ = NewSpaceAllocationCounter(); 1498 new_space_allocation_counter_ = NewSpaceAllocationCounter();
1543 } 1499 }
1544 1500
1545 size_t NewSpaceAllocationCounter() { 1501 size_t NewSpaceAllocationCounter() {
1546 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC(); 1502 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
1547 } 1503 }
1548 1504
1549 // This should be used only for testing. 1505 // This should be used only for testing.
1550 void set_new_space_allocation_counter(size_t new_value) { 1506 void set_new_space_allocation_counter(size_t new_value) {
(...skipping 10 matching lines...) Expand all
1561 1517
1562 // This should be used only for testing. 1518 // This should be used only for testing.
1563 void set_old_generation_allocation_counter(size_t new_value) { 1519 void set_old_generation_allocation_counter(size_t new_value) {
1564 old_generation_allocation_counter_ = new_value; 1520 old_generation_allocation_counter_ = new_value;
1565 } 1521 }
1566 1522
1567 size_t PromotedSinceLastGC() { 1523 size_t PromotedSinceLastGC() {
1568 return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_; 1524 return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
1569 } 1525 }
1570 1526
1571 // Update GC statistics that are tracked on the Heap.
1572 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1573 double marking_time);
1574
1575 // Returns maximum GC pause.
1576 double get_max_gc_pause() { return max_gc_pause_; }
1577
1578 // Returns maximum size of objects alive after GC.
1579 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1580
1581 // Returns minimal interval between two subsequent collections.
1582 double get_min_in_mutator() { return min_in_mutator_; }
1583
1584 int gc_count() const { return gc_count_; } 1527 int gc_count() const { return gc_count_; }
1585 1528
1586 // Returns the size of objects residing in non new spaces. 1529 // Returns the size of objects residing in non new spaces.
1587 intptr_t PromotedSpaceSizeOfObjects(); 1530 intptr_t PromotedSpaceSizeOfObjects();
1588 1531
1589 double total_regexp_code_generated() { return total_regexp_code_generated_; } 1532 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1590 void IncreaseTotalRegexpCodeGenerated(int size) { 1533 void IncreaseTotalRegexpCodeGenerated(int size) {
1591 total_regexp_code_generated_ += size; 1534 total_regexp_code_generated_ += size;
1592 } 1535 }
1593 1536
(...skipping 17 matching lines...) Expand all
1611 GCType gc_type_filter, bool pass_isolate = true); 1554 GCType gc_type_filter, bool pass_isolate = true);
1612 void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback); 1555 void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1613 1556
1614 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags); 1557 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1615 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags); 1558 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1616 1559
1617 // =========================================================================== 1560 // ===========================================================================
1618 // Allocation methods. ======================================================= 1561 // Allocation methods. =======================================================
1619 // =========================================================================== 1562 // ===========================================================================
1620 1563
1621 // Returns a deep copy of the JavaScript object.
1622 // Properties and elements are copied too.
1623 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1624 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1625 AllocationSite* site = NULL);
1626
1627 // Creates a filler object and returns a heap object immediately after it. 1564 // Creates a filler object and returns a heap object immediately after it.
1628 MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object, 1565 MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
1629 int filler_size); 1566 int filler_size);
1567
1630 // Creates a filler object if needed for alignment and returns a heap object 1568 // Creates a filler object if needed for alignment and returns a heap object
1631 // immediately after it. If any space is left after the returned object, 1569 // immediately after it. If any space is left after the returned object,
1632 // another filler object is created so the over allocated memory is iterable. 1570 // another filler object is created so the over allocated memory is iterable.
1633 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object, 1571 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1634 int object_size, 1572 int object_size,
1635 int allocation_size, 1573 int allocation_size,
1636 AllocationAlignment alignment); 1574 AllocationAlignment alignment);
1637 1575
1638 // ============================================================================= 1576 // =============================================================================
1639 1577
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1720 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, 1658 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1721 StoreBufferEvent event); 1659 StoreBufferEvent event);
1722 1660
1723 // Selects the proper allocation space depending on the given object 1661 // Selects the proper allocation space depending on the given object
1724 // size and pretenuring decision. 1662 // size and pretenuring decision.
1725 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) { 1663 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) {
1726 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE; 1664 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1727 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; 1665 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
1728 } 1666 }
1729 1667
1668 #define ROOT_ACCESSOR(type, name, camel_name) \
1669 inline void set_##name(type* value);
1670 ROOT_LIST(ROOT_ACCESSOR)
1671 #undef ROOT_ACCESSOR
1672
1730 int current_gc_flags() { return current_gc_flags_; } 1673 int current_gc_flags() { return current_gc_flags_; }
1731 1674
1732 void set_current_gc_flags(int flags) { 1675 void set_current_gc_flags(int flags) {
1733 current_gc_flags_ = flags; 1676 current_gc_flags_ = flags;
1734 DCHECK(!ShouldFinalizeIncrementalMarking() || 1677 DCHECK(!ShouldFinalizeIncrementalMarking() ||
1735 !ShouldAbortIncrementalMarking()); 1678 !ShouldAbortIncrementalMarking());
1736 } 1679 }
1737 1680
1738 inline bool ShouldReduceMemory() const { 1681 inline bool ShouldReduceMemory() const {
1739 return current_gc_flags_ & kReduceMemoryFootprintMask; 1682 return current_gc_flags_ & kReduceMemoryFootprintMask;
1740 } 1683 }
1741 1684
1742 inline bool ShouldAbortIncrementalMarking() const { 1685 inline bool ShouldAbortIncrementalMarking() const {
1743 return current_gc_flags_ & kAbortIncrementalMarkingMask; 1686 return current_gc_flags_ & kAbortIncrementalMarkingMask;
1744 } 1687 }
1745 1688
1746 inline bool ShouldFinalizeIncrementalMarking() const { 1689 inline bool ShouldFinalizeIncrementalMarking() const {
1747 return current_gc_flags_ & kFinalizeIncrementalMarkingMask; 1690 return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
1748 } 1691 }
1749 1692
1750 #define ROOT_ACCESSOR(type, name, camel_name) \
1751 inline void set_##name(type* value);
1752 ROOT_LIST(ROOT_ACCESSOR)
1753 #undef ROOT_ACCESSOR
1754
1755 // Code that should be run before and after each GC. Includes some
1756 // reporting/verification activities when compiled with DEBUG set.
1757 void GarbageCollectionPrologue();
1758 void GarbageCollectionEpilogue();
1759
1760 void PreprocessStackTraces(); 1693 void PreprocessStackTraces();
1761 1694
1762 // Pretenuring decisions are made based on feedback collected during new 1695 // Pretenuring decisions are made based on feedback collected during new
1763 // space evacuation. Note that between feedback collection and calling this 1696 // space evacuation. Note that between feedback collection and calling this
1764 // method object in old space must not move. 1697 // method object in old space must not move.
1765 // Right now we only process pretenuring feedback in high promotion mode. 1698 // Right now we only process pretenuring feedback in high promotion mode.
1766 bool ProcessPretenuringFeedback(); 1699 bool ProcessPretenuringFeedback();
1767 1700
1768 // Checks whether a global GC is necessary 1701 // Checks whether a global GC is necessary
1769 GarbageCollector SelectGarbageCollector(AllocationSpace space, 1702 GarbageCollector SelectGarbageCollector(AllocationSpace space,
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1806 1739
1807 // These five Create*EntryStub functions are here and forced to not be inlined 1740 // These five Create*EntryStub functions are here and forced to not be inlined
1808 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1741 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1809 NO_INLINE(void CreateJSEntryStub()); 1742 NO_INLINE(void CreateJSEntryStub());
1810 NO_INLINE(void CreateJSConstructEntryStub()); 1743 NO_INLINE(void CreateJSConstructEntryStub());
1811 1744
1812 void CreateFixedStubs(); 1745 void CreateFixedStubs();
1813 1746
1814 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size); 1747 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
1815 1748
1816 // Performs a minor collection in new generation.
1817 void Scavenge();
1818
1819 // Commits from space if it is uncommitted. 1749 // Commits from space if it is uncommitted.
1820 void EnsureFromSpaceIsCommitted(); 1750 void EnsureFromSpaceIsCommitted();
1821 1751
1822 // Uncommit unused semi space. 1752 // Uncommit unused semi space.
1823 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 1753 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1824 1754
1825 // Fill in bogus values in from space 1755 // Fill in bogus values in from space
1826 void ZapFromSpace(); 1756 void ZapFromSpace();
1827 1757
1828 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1829
1830 // Performs a major collection in the whole heap.
1831 void MarkCompact();
1832
1833 // Code to be run before and after mark-compact.
1834 void MarkCompactPrologue();
1835 void MarkCompactEpilogue();
1836
1837 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1838 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1839
1840 // Deopts all code that contains allocation instruction which are tenured or 1758 // Deopts all code that contains allocation instruction which are tenured or
1841 // not tenured. Moreover it clears the pretenuring allocation site statistics. 1759 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1842 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); 1760 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1843 1761
1844 // Evaluates local pretenuring for the old space and calls 1762 // Evaluates local pretenuring for the old space and calls
1845 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in 1763 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1846 // the old space. 1764 // the old space.
1847 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); 1765 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1848 1766
1849 // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. 1767 // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1911 void IdleNotificationEpilogue(GCIdleTimeAction action, 1829 void IdleNotificationEpilogue(GCIdleTimeAction action,
1912 GCIdleTimeHandler::HeapState heap_state, 1830 GCIdleTimeHandler::HeapState heap_state,
1913 double start_ms, double deadline_in_ms); 1831 double start_ms, double deadline_in_ms);
1914 void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms, 1832 void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms,
1915 double now_ms); 1833 double now_ms);
1916 1834
1917 void ClearObjectStats(bool clear_last_time_stats = false); 1835 void ClearObjectStats(bool clear_last_time_stats = false);
1918 1836
1919 inline void UpdateAllocationsHash(HeapObject* object); 1837 inline void UpdateAllocationsHash(HeapObject* object);
1920 inline void UpdateAllocationsHash(uint32_t value); 1838 inline void UpdateAllocationsHash(uint32_t value);
1921 inline void PrintAlloctionsHash(); 1839 void PrintAlloctionsHash();
1922 1840
1923 void AddToRingBuffer(const char* string); 1841 void AddToRingBuffer(const char* string);
1924 void GetFromRingBuffer(char* buffer); 1842 void GetFromRingBuffer(char* buffer);
1925 1843
1844 // Attempt to over-approximate the weak closure by marking object groups and
1845 // implicit references from global handles, but don't atomically complete
1846 // marking. If we continue to mark incrementally, we might have marked
1847 // objects that die later.
1848 void OverApproximateWeakClosure(const char* gc_reason);
1849
1850 // ===========================================================================
1851 // Actual GC. ================================================================
1852 // ===========================================================================
1853
1854 // Code that should be run before and after each GC. Includes some
1855 // reporting/verification activities when compiled with DEBUG set.
1856 void GarbageCollectionPrologue();
1857 void GarbageCollectionEpilogue();
1858
1859 // Performs a major collection in the whole heap.
1860 void MarkCompact();
1861
1862 // Code to be run before and after mark-compact.
1863 void MarkCompactPrologue();
1864 void MarkCompactEpilogue();
1865
1866 // Performs a minor collection in new generation.
1867 void Scavenge();
1868
1869 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1870
1871 void UpdateNewSpaceReferencesInExternalStringTable(
1872 ExternalStringTableUpdaterCallback updater_func);
1873
1874 void UpdateReferencesInExternalStringTable(
1875 ExternalStringTableUpdaterCallback updater_func);
1876
1877 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1878 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1879 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1880 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1881
1882 // ===========================================================================
1883 // GC statistics. ============================================================
1884 // ===========================================================================
1885
1886 inline intptr_t OldGenerationSpaceAvailable() {
1887 return old_generation_allocation_limit_ - PromotedTotalSize();
1888 }
1889
1890 // Returns maximum GC pause.
1891 double get_max_gc_pause() { return max_gc_pause_; }
1892
1893 // Returns maximum size of objects alive after GC.
1894 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1895
1896 // Returns minimal interval between two subsequent collections.
1897 double get_min_in_mutator() { return min_in_mutator_; }
1898
1899 // Update GC statistics that are tracked on the Heap.
1900 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1901 double marking_time);
1902
1903 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1904
1905 // ===========================================================================
1906 // Growing strategy. =========================================================
1907 // ===========================================================================
1908
1909 // Decrease the allocation limit if the new limit based on the given
1910 // parameters is lower than the current limit.
1911 void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1912 double gc_speed,
1913 double mutator_speed);
1914
1915
1916 // Calculates the allocation limit based on a given growing factor and a
1917 // given old generation size.
1918 intptr_t CalculateOldGenerationAllocationLimit(double factor,
1919 intptr_t old_gen_size);
1920
1921 // Sets the allocation limit to trigger the next full garbage collection.
1922 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
1923 double mutator_speed);
1924
1925 // ===========================================================================
1926 // Idle notification. ========================================================
1927 // ===========================================================================
1928
1929 bool RecentIdleNotificationHappened();
1930
1926 // =========================================================================== 1931 // ===========================================================================
1927 // Allocation methods. ======================================================= 1932 // Allocation methods. =======================================================
1928 // =========================================================================== 1933 // ===========================================================================
1929 1934
1935 // Returns a deep copy of the JavaScript object.
1936 // Properties and elements are copied too.
1937 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1938 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1939 AllocationSite* site = NULL);
1940
1930 // Allocates a JS Map in the heap. 1941 // Allocates a JS Map in the heap.
1931 MUST_USE_RESULT AllocationResult 1942 MUST_USE_RESULT AllocationResult
1932 AllocateMap(InstanceType instance_type, int instance_size, 1943 AllocateMap(InstanceType instance_type, int instance_size,
1933 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); 1944 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1934 1945
1935 // Allocates and initializes a new JavaScript object based on a 1946 // Allocates and initializes a new JavaScript object based on a
1936 // constructor. 1947 // constructor.
1937 // If allocation_site is non-null, then a memento is emitted after the object 1948 // If allocation_site is non-null, then a memento is emitted after the object
1938 // that points to the site. 1949 // that points to the site.
1939 MUST_USE_RESULT AllocationResult AllocateJSObject( 1950 MUST_USE_RESULT AllocationResult AllocateJSObject(
(...skipping 812 matching lines...) Expand 10 before | Expand all | Expand 10 after
2752 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2763 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2753 2764
2754 private: 2765 private:
2755 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2766 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2756 }; 2767 };
2757 #endif // DEBUG 2768 #endif // DEBUG
2758 } 2769 }
2759 } // namespace v8::internal 2770 } // namespace v8::internal
2760 2771
2761 #endif // V8_HEAP_HEAP_H_ 2772 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « no previous file | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698