Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(727)

Side by Side Diff: src/heap/heap.h

Issue 1313513003: [heap] Enforce coding style decl order in {Heap} round #2. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/heap/heap.cc » ('j') | src/heap/heap-inl.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 #include <map> 9 #include <map>
10 10
(...skipping 803 matching lines...) Expand 10 before | Expand all | Expand 10 after
814 814
815 // Set the stack limit in the roots_ array. Some architectures generate 815 // Set the stack limit in the roots_ array. Some architectures generate
816 // code that looks here, because it is faster than loading from the static 816 // code that looks here, because it is faster than loading from the static
817 // jslimit_/real_jslimit_ variable in the StackGuard. 817 // jslimit_/real_jslimit_ variable in the StackGuard.
818 void SetStackLimits(); 818 void SetStackLimits();
819 819
820 // Notifies the heap that is ok to start marking or other activities that 820 // Notifies the heap that is ok to start marking or other activities that
821 // should not happen during deserialization. 821 // should not happen during deserialization.
822 void NotifyDeserializationComplete(); 822 void NotifyDeserializationComplete();
823 823
824 // Returns whether SetUp has been called.
825 bool HasBeenSetUp();
826
827 intptr_t old_generation_allocation_limit() const { 824 intptr_t old_generation_allocation_limit() const {
828 return old_generation_allocation_limit_; 825 return old_generation_allocation_limit_;
829 } 826 }
830 827
831 bool always_allocate() { return always_allocate_scope_depth_ != 0; } 828 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
832 Address always_allocate_scope_depth_address() { 829 Address always_allocate_scope_depth_address() {
833 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 830 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
834 } 831 }
835 832
836 Address* NewSpaceAllocationTopAddress() { 833 Address* NewSpaceAllocationTopAddress() {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
883 // start and hence is only valid if there is only a single reference to it. 880 // start and hence is only valid if there is only a single reference to it.
884 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 881 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
885 882
886 // Trim the given array from the right. 883 // Trim the given array from the right.
887 template<Heap::InvocationMode mode> 884 template<Heap::InvocationMode mode>
888 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 885 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
889 886
890 // Converts the given boolean condition to JavaScript boolean value. 887 // Converts the given boolean condition to JavaScript boolean value.
891 inline Object* ToBoolean(bool condition); 888 inline Object* ToBoolean(bool condition);
892 889
893 // Attempt to over-approximate the weak closure by marking object groups and
894 // implicit references from global handles, but don't atomically complete
895 // marking. If we continue to mark incrementally, we might have marked
896 // objects that die later.
897 void OverApproximateWeakClosure(const char* gc_reason);
898
899 // Check whether the heap is currently iterable. 890 // Check whether the heap is currently iterable.
900 bool IsHeapIterable(); 891 bool IsHeapIterable();
901 892
902 // Notify the heap that a context has been disposed. 893 // Notify the heap that a context has been disposed.
903 int NotifyContextDisposed(bool dependant_context); 894 int NotifyContextDisposed(bool dependant_context);
904 895
905 void FinalizeIncrementalMarkingIfComplete(const char* comment); 896 void FinalizeIncrementalMarkingIfComplete(const char* comment);
906 897
907 inline void increment_scan_on_scavenge_pages() { 898 inline void increment_scan_on_scavenge_pages() {
908 scan_on_scavenge_pages_++; 899 scan_on_scavenge_pages_++;
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1008 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); 999 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1009 } 1000 }
1010 1001
1011 void CheckHandleCount(); 1002 void CheckHandleCount();
1012 1003
1013 // Number of "runtime allocations" done so far. 1004 // Number of "runtime allocations" done so far.
1014 uint32_t allocations_count() { return allocations_count_; } 1005 uint32_t allocations_count() { return allocations_count_; }
1015 1006
1016 // Returns deterministic "time" value in ms. Works only with 1007 // Returns deterministic "time" value in ms. Works only with
1017 // FLAG_verify_predictable. 1008 // FLAG_verify_predictable.
1018 double synthetic_time() { return allocations_count_ / 2.0; } 1009 double synthetic_time() { return allocations_count() / 2.0; }
1019 1010
1020 // Print short heap statistics. 1011 // Print short heap statistics.
1021 void PrintShortHeapStatistics(); 1012 void PrintShortHeapStatistics();
1022 1013
1023 size_t object_count_last_gc(size_t index) { 1014 size_t object_count_last_gc(size_t index) {
1024 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0; 1015 return index < OBJECT_STATS_COUNT ? object_counts_last_time_[index] : 0;
1025 } 1016 }
1026 1017
1027 size_t object_size_last_gc(size_t index) { 1018 size_t object_size_last_gc(size_t index) {
1028 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0; 1019 return index < OBJECT_STATS_COUNT ? object_sizes_last_time_[index] : 0;
(...skipping 17 matching lines...) Expand all
1046 bool ReserveSpace(Reservation* reservations); 1037 bool ReserveSpace(Reservation* reservations);
1047 1038
1048 // 1039 //
1049 // Support for the API. 1040 // Support for the API.
1050 // 1041 //
1051 1042
1052 void CreateApiObjects(); 1043 void CreateApiObjects();
1053 1044
1054 // Calculates the allocation limit based on a given growing factor and a 1045 // Calculates the allocation limit based on a given growing factor and a
1055 // given old generation size. 1046 // given old generation size.
1056 intptr_t CalculateOldGenerationAllocationLimit(double factor, 1047 intptr_t CalculateOldGenerationAllocationLimit(double factor,
Hannes Payer (out of office) 2015/08/24 16:43:53 CalculateOldGenerationAllocationLimit and SetOldGe
1057 intptr_t old_gen_size); 1048 intptr_t old_gen_size);
1058 1049
1059 // Sets the allocation limit to trigger the next full garbage collection. 1050 // Sets the allocation limit to trigger the next full garbage collection.
1060 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed, 1051 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
1061 double mutator_speed); 1052 double mutator_speed);
1062 1053
1063 // Decrease the allocation limit if the new limit based on the given
1064 // parameters is lower than the current limit.
1065 void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1066 double gc_speed,
1067 double mutator_speed);
1068
1069 // Implements the corresponding V8 API function. 1054 // Implements the corresponding V8 API function.
1070 bool IdleNotification(double deadline_in_seconds); 1055 bool IdleNotification(double deadline_in_seconds);
1071 bool IdleNotification(int idle_time_in_ms); 1056 bool IdleNotification(int idle_time_in_ms);
1072 1057
1073 double MonotonicallyIncreasingTimeInMs(); 1058 double MonotonicallyIncreasingTimeInMs();
1074 1059
1075 Object* root(RootListIndex index) { return roots_[index]; } 1060 Object* root(RootListIndex index) { return roots_[index]; }
1076 1061
1077 // Generated code can treat direct references to this root as constant. 1062 // Generated code can treat direct references to this root as constant.
1078 bool RootCanBeTreatedAsConstant(RootListIndex root_index); 1063 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
(...skipping 12 matching lines...) Expand all
1091 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) { 1076 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
1092 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true; 1077 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1093 1078
1094 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity(); 1079 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
1095 1080
1096 if (PromotedTotalSize() >= adjusted_allocation_limit) return true; 1081 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1097 1082
1098 return false; 1083 return false;
1099 } 1084 }
1100 1085
1101 void UpdateNewSpaceReferencesInExternalStringTable(
1102 ExternalStringTableUpdaterCallback updater_func);
1103
1104 void UpdateReferencesInExternalStringTable(
1105 ExternalStringTableUpdaterCallback updater_func);
1106
1107 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1108 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1109
1110 void VisitExternalResources(v8::ExternalResourceVisitor* visitor); 1086 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1111 1087
1112 // An object should be promoted if the object has survived a 1088 // An object should be promoted if the object has survived a
1113 // scavenge operation. 1089 // scavenge operation.
1114 inline bool ShouldBePromoted(Address old_address, int object_size); 1090 inline bool ShouldBePromoted(Address old_address, int object_size);
Hannes Payer (out of office) 2015/08/24 16:43:53 Also a private.
Michael Lippautz 2015/08/25 06:54:42 Can't as {ScavengingVisitor} (which is not a frien
1115 1091
1116 void ClearNormalizedMapCaches(); 1092 void ClearNormalizedMapCaches();
1117 1093
1118 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature); 1094 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
1119 1095
1120 ExternalStringTable* external_string_table() { 1096 ExternalStringTable* external_string_table() {
1121 return &external_string_table_; 1097 return &external_string_table_;
1122 } 1098 }
1123 1099
1124 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; } 1100 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; }
1125 1101
1126 inline bool OldGenerationAllocationLimitReached(); 1102 inline bool OldGenerationAllocationLimitReached();
Hannes Payer (out of office) 2015/08/24 16:43:53 This one should also be next to the other growing
Michael Lippautz 2015/08/25 06:54:42 Done.
1127 1103
1128 void QueueMemoryChunkForFree(MemoryChunk* chunk); 1104 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1129 void FilterStoreBufferEntriesOnAboutToBeFreedPages(); 1105 void FilterStoreBufferEntriesOnAboutToBeFreedPages();
1130 void FreeQueuedChunks(); 1106 void FreeQueuedChunks();
1131 1107
1132 bool RecentIdleNotificationHappened(); 1108 bool RecentIdleNotificationHappened();
Hannes Payer (out of office) 2015/08/24 16:43:53 Private. There should be a section for them.
Michael Lippautz 2015/08/25 06:54:42 Done.
1133 1109
1134 // Completely clear the Instanceof cache (to stop it keeping objects alive 1110 // Completely clear the Instanceof cache (to stop it keeping objects alive
1135 // around a GC). 1111 // around a GC).
1136 inline void CompletelyClearInstanceofCache(); 1112 inline void CompletelyClearInstanceofCache();
1137 1113
1138 inline uint32_t HashSeed(); 1114 inline uint32_t HashSeed();
1139 1115
1140 inline Smi* NextScriptId(); 1116 inline Smi* NextScriptId();
1141 1117
1142 inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset); 1118 inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
(...skipping 11 matching lines...) Expand all
1154 void AgeInlineCaches() { 1130 void AgeInlineCaches() {
1155 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; 1131 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1156 } 1132 }
1157 1133
1158 int64_t amount_of_external_allocated_memory() { 1134 int64_t amount_of_external_allocated_memory() {
1159 return amount_of_external_allocated_memory_; 1135 return amount_of_external_allocated_memory_;
1160 } 1136 }
1161 1137
1162 void DeoptMarkedAllocationSites(); 1138 void DeoptMarkedAllocationSites();
1163 1139
1164 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1165
1166 bool DeoptMaybeTenuredAllocationSites() { 1140 bool DeoptMaybeTenuredAllocationSites() {
1167 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; 1141 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1168 } 1142 }
1169 1143
1170 void RecordObjectStats(InstanceType type, size_t size) { 1144 void RecordObjectStats(InstanceType type, size_t size) {
1171 DCHECK(type <= LAST_TYPE); 1145 DCHECK(type <= LAST_TYPE);
1172 object_counts_[type]++; 1146 object_counts_[type]++;
1173 object_sizes_[type] += size; 1147 object_sizes_[type] += size;
1174 } 1148 }
1175 1149
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1264 // without actually creating any objects. 1238 // without actually creating any objects.
1265 bool SetUp(); 1239 bool SetUp();
1266 1240
1267 // Bootstraps the object heap with the core set of objects required to run. 1241 // Bootstraps the object heap with the core set of objects required to run.
1268 // Returns whether it succeeded. 1242 // Returns whether it succeeded.
1269 bool CreateHeapObjects(); 1243 bool CreateHeapObjects();
1270 1244
1271 // Destroys all memory allocated by the heap. 1245 // Destroys all memory allocated by the heap.
1272 void TearDown(); 1246 void TearDown();
1273 1247
1248 // Returns whether SetUp has been called.
1249 bool HasBeenSetUp();
1250
1274 // =========================================================================== 1251 // ===========================================================================
1275 // Getters for spaces. ======================================================= 1252 // Getters for spaces. =======================================================
1276 // =========================================================================== 1253 // ===========================================================================
1277 1254
1278 // Return the starting address and a mask for the new space. And-masking an 1255 // Return the starting address and a mask for the new space. And-masking an
1279 // address with the mask will result in the start address of the new space 1256 // address with the mask will result in the start address of the new space
1280 // for all addresses in either semispace. 1257 // for all addresses in either semispace.
1281 Address NewSpaceStart() { return new_space_.start(); } 1258 Address NewSpaceStart() { return new_space_.start(); }
1282 uintptr_t NewSpaceMask() { return new_space_.mask(); } 1259 uintptr_t NewSpaceMask() { return new_space_.mask(); }
1283 Address NewSpaceTop() { return new_space_.top(); } 1260 Address NewSpaceTop() { return new_space_.top(); }
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
1497 inline intptr_t promoted_objects_size() { return promoted_objects_size_; } 1474 inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
1498 1475
1499 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) { 1476 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1500 DCHECK(object_size > 0); 1477 DCHECK(object_size > 0);
1501 semi_space_copied_object_size_ += object_size; 1478 semi_space_copied_object_size_ += object_size;
1502 } 1479 }
1503 inline intptr_t semi_space_copied_object_size() { 1480 inline intptr_t semi_space_copied_object_size() {
1504 return semi_space_copied_object_size_; 1481 return semi_space_copied_object_size_;
1505 } 1482 }
1506 1483
1507
1508 inline intptr_t SurvivedNewSpaceObjectSize() { 1484 inline intptr_t SurvivedNewSpaceObjectSize() {
1509 return promoted_objects_size_ + semi_space_copied_object_size_; 1485 return promoted_objects_size_ + semi_space_copied_object_size_;
1510 } 1486 }
1511 1487
1512 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; } 1488 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1513 1489
1514 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; } 1490 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1515 1491
1516 inline void IncrementNodesPromoted() { nodes_promoted_++; } 1492 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1517 1493
1518 inline void IncrementYoungSurvivorsCounter(int survived) { 1494 inline void IncrementYoungSurvivorsCounter(int survived) {
1519 DCHECK(survived >= 0); 1495 DCHECK(survived >= 0);
1520 survived_last_scavenge_ = survived; 1496 survived_last_scavenge_ = survived;
1521 survived_since_last_expansion_ += survived; 1497 survived_since_last_expansion_ += survived;
1522 } 1498 }
1523 1499
1524 inline intptr_t PromotedTotalSize() { 1500 inline intptr_t PromotedTotalSize() {
1525 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); 1501 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1526 if (total > std::numeric_limits<intptr_t>::max()) { 1502 if (total > std::numeric_limits<intptr_t>::max()) {
1527 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations. 1503 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
1528 return std::numeric_limits<intptr_t>::max(); 1504 return std::numeric_limits<intptr_t>::max();
1529 } 1505 }
1530 if (total < 0) return 0; 1506 if (total < 0) return 0;
1531 return static_cast<intptr_t>(total); 1507 return static_cast<intptr_t>(total);
1532 } 1508 }
1533 1509
1534 inline intptr_t OldGenerationSpaceAvailable() {
1535 return old_generation_allocation_limit_ - PromotedTotalSize();
1536 }
1537
1538 inline intptr_t OldGenerationCapacityAvailable() {
1539 return max_old_generation_size_ - PromotedTotalSize();
1540 }
1541
1542
1543 void UpdateNewSpaceAllocationCounter() { 1510 void UpdateNewSpaceAllocationCounter() {
1544 new_space_allocation_counter_ = NewSpaceAllocationCounter(); 1511 new_space_allocation_counter_ = NewSpaceAllocationCounter();
1545 } 1512 }
1546 1513
1547 size_t NewSpaceAllocationCounter() { 1514 size_t NewSpaceAllocationCounter() {
1548 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC(); 1515 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
1549 } 1516 }
1550 1517
1551 // This should be used only for testing. 1518 // This should be used only for testing.
1552 void set_new_space_allocation_counter(size_t new_value) { 1519 void set_new_space_allocation_counter(size_t new_value) {
(...skipping 10 matching lines...) Expand all
1563 1530
1564 // This should be used only for testing. 1531 // This should be used only for testing.
1565 void set_old_generation_allocation_counter(size_t new_value) { 1532 void set_old_generation_allocation_counter(size_t new_value) {
1566 old_generation_allocation_counter_ = new_value; 1533 old_generation_allocation_counter_ = new_value;
1567 } 1534 }
1568 1535
1569 size_t PromotedSinceLastGC() { 1536 size_t PromotedSinceLastGC() {
1570 return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_; 1537 return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
1571 } 1538 }
1572 1539
1573 // Update GC statistics that are tracked on the Heap.
1574 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1575 double marking_time);
1576
1577 // Returns maximum GC pause.
1578 double get_max_gc_pause() { return max_gc_pause_; }
1579
1580 // Returns maximum size of objects alive after GC.
1581 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1582
1583 // Returns minimal interval between two subsequent collections.
1584 double get_min_in_mutator() { return min_in_mutator_; }
1585
1586 int gc_count() const { return gc_count_; } 1540 int gc_count() const { return gc_count_; }
1587 1541
1588 // Returns the size of objects residing in non new spaces. 1542 // Returns the size of objects residing in non new spaces.
1589 intptr_t PromotedSpaceSizeOfObjects(); 1543 intptr_t PromotedSpaceSizeOfObjects();
1590 1544
1591 double total_regexp_code_generated() { return total_regexp_code_generated_; } 1545 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1592 void IncreaseTotalRegexpCodeGenerated(int size) { 1546 void IncreaseTotalRegexpCodeGenerated(int size) {
1593 total_regexp_code_generated_ += size; 1547 total_regexp_code_generated_ += size;
1594 } 1548 }
1595 1549
(...skipping 17 matching lines...) Expand all
1613 GCType gc_type_filter, bool pass_isolate = true); 1567 GCType gc_type_filter, bool pass_isolate = true);
1614 void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback); 1568 void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1615 1569
1616 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags); 1570 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1617 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags); 1571 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1618 1572
1619 // =========================================================================== 1573 // ===========================================================================
1620 // Allocation methods. ======================================================= 1574 // Allocation methods. =======================================================
1621 // =========================================================================== 1575 // ===========================================================================
1622 1576
1623 // Returns a deep copy of the JavaScript object.
1624 // Properties and elements are copied too.
1625 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1626 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1627 AllocationSite* site = NULL);
1628
1629 // Creates a filler object and returns a heap object immediately after it. 1577 // Creates a filler object and returns a heap object immediately after it.
1630 MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object, 1578 MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
1631 int filler_size); 1579 int filler_size);
1580
1632 // Creates a filler object if needed for alignment and returns a heap object 1581 // Creates a filler object if needed for alignment and returns a heap object
1633 // immediately after it. If any space is left after the returned object, 1582 // immediately after it. If any space is left after the returned object,
1634 // another filler object is created so the over allocated memory is iterable. 1583 // another filler object is created so the over allocated memory is iterable.
1635 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object, 1584 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1636 int object_size, 1585 int object_size,
1637 int allocation_size, 1586 int allocation_size,
1638 AllocationAlignment alignment); 1587 AllocationAlignment alignment);
1639 1588
1640 // ============================================================================= 1589 // =============================================================================
1641 1590
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
1720 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, 1669 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1721 StoreBufferEvent event); 1670 StoreBufferEvent event);
1722 1671
1723 // Selects the proper allocation space depending on the given object 1672 // Selects the proper allocation space depending on the given object
1724 // size and pretenuring decision. 1673 // size and pretenuring decision.
1725 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) { 1674 static AllocationSpace SelectSpace(int object_size, PretenureFlag pretenure) {
1726 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE; 1675 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1727 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; 1676 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
1728 } 1677 }
1729 1678
1679 #define ROOT_ACCESSOR(type, name, camel_name) \
1680 inline void set_##name(type* value);
1681 ROOT_LIST(ROOT_ACCESSOR)
1682 #undef ROOT_ACCESSOR
1683
1730 int current_gc_flags() { return current_gc_flags_; } 1684 int current_gc_flags() { return current_gc_flags_; }
1731 1685
1732 void set_current_gc_flags(int flags) { 1686 void set_current_gc_flags(int flags) {
1733 current_gc_flags_ = flags; 1687 current_gc_flags_ = flags;
1734 DCHECK(!ShouldFinalizeIncrementalMarking() || 1688 DCHECK(!ShouldFinalizeIncrementalMarking() ||
1735 !ShouldAbortIncrementalMarking()); 1689 !ShouldAbortIncrementalMarking());
1736 } 1690 }
1737 1691
1738 inline bool ShouldReduceMemory() const { 1692 inline bool ShouldReduceMemory() const {
1739 return current_gc_flags_ & kReduceMemoryFootprintMask; 1693 return current_gc_flags_ & kReduceMemoryFootprintMask;
1740 } 1694 }
1741 1695
1742 inline bool ShouldAbortIncrementalMarking() const { 1696 inline bool ShouldAbortIncrementalMarking() const {
1743 return current_gc_flags_ & kAbortIncrementalMarkingMask; 1697 return current_gc_flags_ & kAbortIncrementalMarkingMask;
1744 } 1698 }
1745 1699
1746 inline bool ShouldFinalizeIncrementalMarking() const { 1700 inline bool ShouldFinalizeIncrementalMarking() const {
1747 return current_gc_flags_ & kFinalizeIncrementalMarkingMask; 1701 return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
1748 } 1702 }
1749 1703
1750 #define ROOT_ACCESSOR(type, name, camel_name) \
1751 inline void set_##name(type* value);
1752 ROOT_LIST(ROOT_ACCESSOR)
1753 #undef ROOT_ACCESSOR
1754
1755 // Code that should be run before and after each GC. Includes some 1704 // Code that should be run before and after each GC. Includes some
1756 // reporting/verification activities when compiled with DEBUG set. 1705 // reporting/verification activities when compiled with DEBUG set.
1757 void GarbageCollectionPrologue(); 1706 void GarbageCollectionPrologue();
1758 void GarbageCollectionEpilogue(); 1707 void GarbageCollectionEpilogue();
Michael Starzinger 2015/08/24 16:55:47 Can both go into the "Actual GC" section as well,
Michael Lippautz 2015/08/25 06:54:42 Done.
1759 1708
1760 void PreprocessStackTraces(); 1709 void PreprocessStackTraces();
1761 1710
1762 // Pretenuring decisions are made based on feedback collected during new 1711 // Pretenuring decisions are made based on feedback collected during new
1763 // space evacuation. Note that between feedback collection and calling this 1712 // space evacuation. Note that between feedback collection and calling this
1764 // method object in old space must not move. 1713 // method object in old space must not move.
1765 // Right now we only process pretenuring feedback in high promotion mode. 1714 // Right now we only process pretenuring feedback in high promotion mode.
1766 bool ProcessPretenuringFeedback(); 1715 bool ProcessPretenuringFeedback();
1767 1716
1768 // Checks whether a global GC is necessary 1717 // Checks whether a global GC is necessary
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1807 // These five Create*EntryStub functions are here and forced to not be inlined 1756 // These five Create*EntryStub functions are here and forced to not be inlined
1808 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1757 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1809 NO_INLINE(void CreateJSEntryStub()); 1758 NO_INLINE(void CreateJSEntryStub());
1810 NO_INLINE(void CreateJSConstructEntryStub()); 1759 NO_INLINE(void CreateJSConstructEntryStub());
1811 1760
1812 void CreateFixedStubs(); 1761 void CreateFixedStubs();
1813 1762
1814 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size); 1763 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
1815 1764
1816 // Performs a minor collection in new generation. 1765 // Performs a minor collection in new generation.
1817 void Scavenge(); 1766 void Scavenge();
Michael Starzinger 2015/08/24 16:55:47 Can go into the "Actual GC" section, right before
Michael Lippautz 2015/08/25 06:54:42 Done.
1818 1767
1819 // Commits from space if it is uncommitted. 1768 // Commits from space if it is uncommitted.
1820 void EnsureFromSpaceIsCommitted(); 1769 void EnsureFromSpaceIsCommitted();
1821 1770
1822 // Uncommit unused semi space. 1771 // Uncommit unused semi space.
1823 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 1772 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1824 1773
1825 // Fill in bogus values in from space 1774 // Fill in bogus values in from space
1826 void ZapFromSpace(); 1775 void ZapFromSpace();
1827 1776
1828 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1829
1830 // Performs a major collection in the whole heap.
1831 void MarkCompact();
1832
1833 // Code to be run before and after mark-compact.
1834 void MarkCompactPrologue();
1835 void MarkCompactEpilogue();
1836
1837 void ProcessNativeContexts(WeakObjectRetainer* retainer); 1777 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1838 void ProcessAllocationSites(WeakObjectRetainer* retainer); 1778 void ProcessAllocationSites(WeakObjectRetainer* retainer);
Michael Starzinger 2015/08/24 16:55:47 Can both go into the "Actual GC" section after Pro
Michael Lippautz 2015/08/25 06:54:42 Done.
1839 1779
1840 // Deopts all code that contains allocation instruction which are tenured or 1780 // Deopts all code that contains allocation instruction which are tenured or
1841 // not tenured. Moreover it clears the pretenuring allocation site statistics. 1781 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1842 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); 1782 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1843 1783
1844 // Evaluates local pretenuring for the old space and calls 1784 // Evaluates local pretenuring for the old space and calls
1845 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in 1785 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1846 // the old space. 1786 // the old space.
1847 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); 1787 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1848 1788
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1911 void IdleNotificationEpilogue(GCIdleTimeAction action, 1851 void IdleNotificationEpilogue(GCIdleTimeAction action,
1912 GCIdleTimeHandler::HeapState heap_state, 1852 GCIdleTimeHandler::HeapState heap_state,
1913 double start_ms, double deadline_in_ms); 1853 double start_ms, double deadline_in_ms);
1914 void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms, 1854 void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms,
1915 double now_ms); 1855 double now_ms);
1916 1856
1917 void ClearObjectStats(bool clear_last_time_stats = false); 1857 void ClearObjectStats(bool clear_last_time_stats = false);
1918 1858
1919 inline void UpdateAllocationsHash(HeapObject* object); 1859 inline void UpdateAllocationsHash(HeapObject* object);
1920 inline void UpdateAllocationsHash(uint32_t value); 1860 inline void UpdateAllocationsHash(uint32_t value);
1921 inline void PrintAlloctionsHash(); 1861 void PrintAlloctionsHash();
1922 1862
1923 void AddToRingBuffer(const char* string); 1863 void AddToRingBuffer(const char* string);
1924 void GetFromRingBuffer(char* buffer); 1864 void GetFromRingBuffer(char* buffer);
1925 1865
1866 // Decrease the allocation limit if the new limit based on the given
1867 // parameters is lower than the current limit.
1868 void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1869 double gc_speed,
1870 double mutator_speed);
1871
1872 // Attempt to over-approximate the weak closure by marking object groups and
1873 // implicit references from global handles, but don't atomically complete
1874 // marking. If we continue to mark incrementally, we might have marked
1875 // objects that die later.
1876 void OverApproximateWeakClosure(const char* gc_reason);
1877
1878 // ===========================================================================
1879 // Actual GC. ================================================================
1880 // ===========================================================================
1881
1882 // Performs a major collection in the whole heap.
1883 void MarkCompact();
1884
1885 // Code to be run before and after mark-compact.
1886 void MarkCompactPrologue();
1887 void MarkCompactEpilogue();
1888
1889 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1890
1891 void UpdateNewSpaceReferencesInExternalStringTable(
1892 ExternalStringTableUpdaterCallback updater_func);
1893
1894 void UpdateReferencesInExternalStringTable(
1895 ExternalStringTableUpdaterCallback updater_func);
1896
1897 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1898 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1899
1900 // ===========================================================================
1901 // GC statistics. ============================================================
1902 // ===========================================================================
1903
1904 inline intptr_t OldGenerationSpaceAvailable() {
1905 return old_generation_allocation_limit_ - PromotedTotalSize();
1906 }
1907
1908 // Returns maximum GC pause.
1909 double get_max_gc_pause() { return max_gc_pause_; }
1910
1911 // Returns maximum size of objects alive after GC.
1912 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1913
1914 // Returns minimal interval between two subsequent collections.
1915 double get_min_in_mutator() { return min_in_mutator_; }
1916
1917 // Update GC statistics that are tracked on the Heap.
1918 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1919 double marking_time);
1920
1921 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1922
1926 // =========================================================================== 1923 // ===========================================================================
1927 // Allocation methods. ======================================================= 1924 // Allocation methods. =======================================================
1928 // =========================================================================== 1925 // ===========================================================================
1929 1926
1927 // Returns a deep copy of the JavaScript object.
1928 // Properties and elements are copied too.
1929 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1930 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1931 AllocationSite* site = NULL);
1932
1930 // Allocates a JS Map in the heap. 1933 // Allocates a JS Map in the heap.
1931 MUST_USE_RESULT AllocationResult 1934 MUST_USE_RESULT AllocationResult
1932 AllocateMap(InstanceType instance_type, int instance_size, 1935 AllocateMap(InstanceType instance_type, int instance_size,
1933 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); 1936 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1934 1937
1935 // Allocates and initializes a new JavaScript object based on a 1938 // Allocates and initializes a new JavaScript object based on a
1936 // constructor. 1939 // constructor.
1937 // If allocation_site is non-null, then a memento is emitted after the object 1940 // If allocation_site is non-null, then a memento is emitted after the object
1938 // that points to the site. 1941 // that points to the site.
1939 MUST_USE_RESULT AllocationResult AllocateJSObject( 1942 MUST_USE_RESULT AllocationResult AllocateJSObject(
(...skipping 810 matching lines...) Expand 10 before | Expand all | Expand 10 after
2750 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2753 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2751 2754
2752 private: 2755 private:
2753 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2756 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2754 }; 2757 };
2755 #endif // DEBUG 2758 #endif // DEBUG
2756 } 2759 }
2757 } // namespace v8::internal 2760 } // namespace v8::internal
2758 2761
2759 #endif // V8_HEAP_HEAP_H_ 2762 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « no previous file | src/heap/heap.cc » ('j') | src/heap/heap-inl.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698