Chromium Code Reviews| Index: src/heap/heap.h |
| diff --git a/src/heap/heap.h b/src/heap/heap.h |
| index f7eca4011a2b71d6dfe9c926a261f4693977d526..37d189c6c9c563b6cd02ef2da50beb986618eb20 100644 |
| --- a/src/heap/heap.h |
| +++ b/src/heap/heap.h |
| @@ -821,9 +821,6 @@ class Heap { |
| // should not happen during deserialization. |
| void NotifyDeserializationComplete(); |
| - // Returns whether SetUp has been called. |
| - bool HasBeenSetUp(); |
| - |
| intptr_t old_generation_allocation_limit() const { |
| return old_generation_allocation_limit_; |
| } |
| @@ -890,12 +887,6 @@ class Heap { |
| // Converts the given boolean condition to JavaScript boolean value. |
| inline Object* ToBoolean(bool condition); |
| - // Attempt to over-approximate the weak closure by marking object groups and |
| - // implicit references from global handles, but don't atomically complete |
| - // marking. If we continue to mark incrementally, we might have marked |
| - // objects that die later. |
| - void OverApproximateWeakClosure(const char* gc_reason); |
| - |
| // Check whether the heap is currently iterable. |
| bool IsHeapIterable(); |
| @@ -1015,7 +1006,7 @@ class Heap { |
| // Returns deterministic "time" value in ms. Works only with |
| // FLAG_verify_predictable. |
| - double synthetic_time() { return allocations_count_ / 2.0; } |
| + double synthetic_time() { return allocations_count() / 2.0; } |
| // Print short heap statistics. |
| void PrintShortHeapStatistics(); |
| @@ -1060,12 +1051,6 @@ class Heap { |
| void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed, |
| double mutator_speed); |
| - // Decrease the allocation limit if the new limit based on the given |
| - // parameters is lower than the current limit. |
| - void DampenOldGenerationAllocationLimit(intptr_t old_gen_size, |
| - double gc_speed, |
| - double mutator_speed); |
| - |
| // Implements the corresponding V8 API function. |
| bool IdleNotification(double deadline_in_seconds); |
| bool IdleNotification(int idle_time_in_ms); |
| @@ -1098,15 +1083,6 @@ class Heap { |
| return false; |
| } |
| - void UpdateNewSpaceReferencesInExternalStringTable( |
| - ExternalStringTableUpdaterCallback updater_func); |
| - |
| - void UpdateReferencesInExternalStringTable( |
| - ExternalStringTableUpdaterCallback updater_func); |
| - |
| - void ProcessAllWeakReferences(WeakObjectRetainer* retainer); |
| - void ProcessYoungWeakReferences(WeakObjectRetainer* retainer); |
| - |
| void VisitExternalResources(v8::ExternalResourceVisitor* visitor); |
| // An object should be promoted if the object has survived a |
| @@ -1161,8 +1137,6 @@ class Heap { |
| void DeoptMarkedAllocationSites(); |
| - bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; } |
| - |
| bool DeoptMaybeTenuredAllocationSites() { |
| return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; |
| } |
| @@ -1271,6 +1245,9 @@ class Heap { |
| // Destroys all memory allocated by the heap. |
| void TearDown(); |
| + // Returns whether SetUp has been called. |
| + bool HasBeenSetUp(); |
| + |
| // =========================================================================== |
| // Getters for spaces. ======================================================= |
| // =========================================================================== |
| @@ -1504,7 +1481,6 @@ class Heap { |
| return semi_space_copied_object_size_; |
| } |
| - |
| inline intptr_t SurvivedNewSpaceObjectSize() { |
| return promoted_objects_size_ + semi_space_copied_object_size_; |
| } |
| @@ -1531,15 +1507,6 @@ class Heap { |
| return static_cast<intptr_t>(total); |
| } |
| - inline intptr_t OldGenerationSpaceAvailable() { |
| - return old_generation_allocation_limit_ - PromotedTotalSize(); |
| - } |
| - |
| - inline intptr_t OldGenerationCapacityAvailable() { |
| - return max_old_generation_size_ - PromotedTotalSize(); |
| - } |
| - |
| - |
| void UpdateNewSpaceAllocationCounter() { |
| new_space_allocation_counter_ = NewSpaceAllocationCounter(); |
| } |
| @@ -1570,19 +1537,6 @@ class Heap { |
| return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_; |
| } |
| - // Update GC statistics that are tracked on the Heap. |
| - void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator, |
| - double marking_time); |
| - |
| - // Returns maximum GC pause. |
| - double get_max_gc_pause() { return max_gc_pause_; } |
| - |
| - // Returns maximum size of objects alive after GC. |
| - intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } |
| - |
| - // Returns minimal interval between two subsequent collections. |
| - double get_min_in_mutator() { return min_in_mutator_; } |
| - |
| int gc_count() const { return gc_count_; } |
| // Returns the size of objects residing in non new spaces. |
| @@ -1620,15 +1574,10 @@ class Heap { |
| // Allocation methods. ======================================================= |
| // =========================================================================== |
| - // Returns a deep copy of the JavaScript object. |
| - // Properties and elements are copied too. |
| - // Optionally takes an AllocationSite to be appended in an AllocationMemento. |
| - MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source, |
| - AllocationSite* site = NULL); |
| - |
| // Creates a filler object and returns a heap object immediately after it. |
| MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object, |
| int filler_size); |
| + |
| // Creates a filler object if needed for alignment and returns a heap object |
| // immediately after it. If any space is left after the returned object, |
| // another filler object is created so the over allocated memory is iterable. |
| @@ -1727,6 +1676,11 @@ class Heap { |
| return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; |
| } |
| +#define ROOT_ACCESSOR(type, name, camel_name) \ |
| + inline void set_##name(type* value); |
| + ROOT_LIST(ROOT_ACCESSOR) |
| +#undef ROOT_ACCESSOR |
| + |
| int current_gc_flags() { return current_gc_flags_; } |
| void set_current_gc_flags(int flags) { |
| @@ -1747,11 +1701,6 @@ class Heap { |
| return current_gc_flags_ & kFinalizeIncrementalMarkingMask; |
| } |
| -#define ROOT_ACCESSOR(type, name, camel_name) \ |
| - inline void set_##name(type* value); |
| - ROOT_LIST(ROOT_ACCESSOR) |
| -#undef ROOT_ACCESSOR |
| - |
| // Code that should be run before and after each GC. Includes some |
| // reporting/verification activities when compiled with DEBUG set. |
| void GarbageCollectionPrologue(); |
| @@ -1825,15 +1774,6 @@ class Heap { |
| // Fill in bogus values in from space |
| void ZapFromSpace(); |
| - Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front); |
| - |
| - // Performs a major collection in the whole heap. |
| - void MarkCompact(); |
| - |
| - // Code to be run before and after mark-compact. |
| - void MarkCompactPrologue(); |
| - void MarkCompactEpilogue(); |
| - |
| void ProcessNativeContexts(WeakObjectRetainer* retainer); |
| void ProcessAllocationSites(WeakObjectRetainer* retainer); |
|
Michael Starzinger
2015/08/24 16:55:47
Can both go into the "Actual GC" section after Pro
Michael Lippautz
2015/08/25 06:54:42
Done.
|
| @@ -1918,15 +1858,78 @@ class Heap { |
| inline void UpdateAllocationsHash(HeapObject* object); |
| inline void UpdateAllocationsHash(uint32_t value); |
| - inline void PrintAlloctionsHash(); |
| + void PrintAlloctionsHash(); |
| void AddToRingBuffer(const char* string); |
| void GetFromRingBuffer(char* buffer); |
| + // Decrease the allocation limit if the new limit based on the given |
| + // parameters is lower than the current limit. |
| + void DampenOldGenerationAllocationLimit(intptr_t old_gen_size, |
| + double gc_speed, |
| + double mutator_speed); |
| + |
| + // Attempt to over-approximate the weak closure by marking object groups and |
| + // implicit references from global handles, but don't atomically complete |
| + // marking. If we continue to mark incrementally, we might have marked |
| + // objects that die later. |
| + void OverApproximateWeakClosure(const char* gc_reason); |
| + |
| + // =========================================================================== |
| + // Actual GC. ================================================================ |
| + // =========================================================================== |
| + |
| + // Performs a major collection in the whole heap. |
| + void MarkCompact(); |
| + |
| + // Code to be run before and after mark-compact. |
| + void MarkCompactPrologue(); |
| + void MarkCompactEpilogue(); |
| + |
| + Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front); |
| + |
| + void UpdateNewSpaceReferencesInExternalStringTable( |
| + ExternalStringTableUpdaterCallback updater_func); |
| + |
| + void UpdateReferencesInExternalStringTable( |
| + ExternalStringTableUpdaterCallback updater_func); |
| + |
| + void ProcessAllWeakReferences(WeakObjectRetainer* retainer); |
| + void ProcessYoungWeakReferences(WeakObjectRetainer* retainer); |
| + |
| + // =========================================================================== |
| + // GC statistics. ============================================================ |
| + // =========================================================================== |
| + |
| + inline intptr_t OldGenerationSpaceAvailable() { |
| + return old_generation_allocation_limit_ - PromotedTotalSize(); |
| + } |
| + |
| + // Returns maximum GC pause. |
| + double get_max_gc_pause() { return max_gc_pause_; } |
| + |
| + // Returns maximum size of objects alive after GC. |
| + intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } |
| + |
| + // Returns minimal interval between two subsequent collections. |
| + double get_min_in_mutator() { return min_in_mutator_; } |
| + |
| + // Update GC statistics that are tracked on the Heap. |
| + void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator, |
| + double marking_time); |
| + |
| + bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; } |
| + |
| // =========================================================================== |
| // Allocation methods. ======================================================= |
| // =========================================================================== |
| + // Returns a deep copy of the JavaScript object. |
| + // Properties and elements are copied too. |
| + // Optionally takes an AllocationSite to be appended in an AllocationMemento. |
| + MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source, |
| + AllocationSite* site = NULL); |
| + |
| // Allocates a JS Map in the heap. |
| MUST_USE_RESULT AllocationResult |
| AllocateMap(InstanceType instance_type, int instance_size, |