OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 796 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
807 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); | 807 TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); |
808 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); | 808 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); |
809 | 809 |
810 { | 810 { |
811 GCCallbacksScope scope(this); | 811 GCCallbacksScope scope(this); |
812 if (scope.CheckReenter()) { | 812 if (scope.CheckReenter()) { |
813 AllowHeapAllocation allow_allocation; | 813 AllowHeapAllocation allow_allocation; |
814 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE); | 814 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE); |
815 VMState<EXTERNAL> state(isolate_); | 815 VMState<EXTERNAL> state(isolate_); |
816 HandleScope handle_scope(isolate_); | 816 HandleScope handle_scope(isolate_); |
817 CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags); | 817 CallGCPrologueCallbacks(kGCTypeIncrementalMarking, |
818 FLAG_object_grouping_in_incremental_finalization | |
819 ? kNoGCCallbackFlags | |
820 : kGCCallbackFlagDisableObjectGrouping); | |
818 } | 821 } |
819 } | 822 } |
820 incremental_marking()->FinalizeIncrementally(); | 823 incremental_marking()->FinalizeIncrementally(); |
821 { | 824 { |
822 GCCallbacksScope scope(this); | 825 GCCallbacksScope scope(this); |
823 if (scope.CheckReenter()) { | 826 if (scope.CheckReenter()) { |
824 AllowHeapAllocation allow_allocation; | 827 AllowHeapAllocation allow_allocation; |
825 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE); | 828 TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE); |
826 VMState<EXTERNAL> state(isolate_); | 829 VMState<EXTERNAL> state(isolate_); |
827 HandleScope handle_scope(isolate_); | 830 HandleScope handle_scope(isolate_); |
(...skipping 4576 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5404 isolate()->LoadStartTimeMs() + kMaxLoadTimeMs; | 5407 isolate()->LoadStartTimeMs() + kMaxLoadTimeMs; |
5405 } | 5408 } |
5406 | 5409 |
5407 // This predicate is called when an old generation space cannot allocated from | 5410 // This predicate is called when an old generation space cannot allocated from |
5408 // the free list and is about to add a new page. Returning false will cause a | 5411 // the free list and is about to add a new page. Returning false will cause a |
5409 // major GC. It happens when the old generation allocation limit is reached and | 5412 // major GC. It happens when the old generation allocation limit is reached and |
5410 // - either we need to optimize for memory usage, | 5413 // - either we need to optimize for memory usage, |
5411 // - or the incremental marking is not in progress and we cannot start it. | 5414 // - or the incremental marking is not in progress and we cannot start it. |
5412 bool Heap::ShouldExpandOldGenerationOnSlowAllocation() { | 5415 bool Heap::ShouldExpandOldGenerationOnSlowAllocation() { |
5413 if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true; | 5416 if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true; |
5414 // We reached the old generation allocation limit. | |
ulan
2017/03/30 16:24:30
This changes reduce noise in scheduling mark-compa
| |
5415 | |
5416 if (ShouldOptimizeForMemoryUsage()) return false; | |
5417 | |
5418 if (ShouldOptimizeForLoadTime()) return true; | |
5419 | |
5420 if (incremental_marking()->NeedsFinalization()) { | |
5421 return !AllocationLimitOvershotByLargeMargin(); | |
5422 } | |
5423 | |
5424 if (incremental_marking()->IsStopped() && | 5417 if (incremental_marking()->IsStopped() && |
5425 IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) { | 5418 IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) { |
5426 // We cannot start incremental marking. | 5419 // We cannot start incremental marking. |
5427 return false; | 5420 return false; |
5428 } | 5421 } |
5429 return true; | 5422 return true; |
5430 } | 5423 } |
5431 | 5424 |
5432 // This function returns either kNoLimit, kSoftLimit, or kHardLimit. | 5425 // This function returns either kNoLimit, kSoftLimit, or kHardLimit. |
5433 // The kNoLimit means that either incremental marking is disabled or it is too | 5426 // The kNoLimit means that either incremental marking is disabled or it is too |
(...skipping 10 matching lines...) Expand all Loading... | |
5444 if ((FLAG_stress_compaction && (gc_count_ & 1) != 0) || | 5437 if ((FLAG_stress_compaction && (gc_count_ & 1) != 0) || |
5445 HighMemoryPressure()) { | 5438 HighMemoryPressure()) { |
5446 // If there is high memory pressure or stress testing is enabled, then | 5439 // If there is high memory pressure or stress testing is enabled, then |
5447 // start marking immediately. | 5440 // start marking immediately. |
5448 return IncrementalMarkingLimit::kHardLimit; | 5441 return IncrementalMarkingLimit::kHardLimit; |
5449 } | 5442 } |
5450 size_t old_generation_space_available = OldGenerationSpaceAvailable(); | 5443 size_t old_generation_space_available = OldGenerationSpaceAvailable(); |
5451 if (old_generation_space_available > new_space_->Capacity()) { | 5444 if (old_generation_space_available > new_space_->Capacity()) { |
5452 return IncrementalMarkingLimit::kNoLimit; | 5445 return IncrementalMarkingLimit::kNoLimit; |
5453 } | 5446 } |
5454 if (ShouldOptimizeForMemoryUsage()) { | |
ulan
2017/03/30 16:24:30
This changes reduce noise in scheduling mark-compa
| |
5455 return IncrementalMarkingLimit::kHardLimit; | |
5456 } | |
5457 if (ShouldOptimizeForLoadTime()) { | |
5458 return IncrementalMarkingLimit::kNoLimit; | |
5459 } | |
5460 if (old_generation_space_available == 0) { | 5447 if (old_generation_space_available == 0) { |
5461 return IncrementalMarkingLimit::kHardLimit; | 5448 return IncrementalMarkingLimit::kHardLimit; |
5462 } | 5449 } |
5463 return IncrementalMarkingLimit::kSoftLimit; | 5450 return IncrementalMarkingLimit::kSoftLimit; |
5464 } | 5451 } |
5465 | 5452 |
5466 void Heap::EnableInlineAllocation() { | 5453 void Heap::EnableInlineAllocation() { |
5467 if (!inline_allocation_disabled_) return; | 5454 if (!inline_allocation_disabled_) return; |
5468 inline_allocation_disabled_ = false; | 5455 inline_allocation_disabled_ = false; |
5469 | 5456 |
(...skipping 1116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6586 } | 6573 } |
6587 | 6574 |
6588 | 6575 |
6589 // static | 6576 // static |
6590 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6577 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6591 return StaticVisitorBase::GetVisitorId(map); | 6578 return StaticVisitorBase::GetVisitorId(map); |
6592 } | 6579 } |
6593 | 6580 |
6594 } // namespace internal | 6581 } // namespace internal |
6595 } // namespace v8 | 6582 } // namespace v8 |
OLD | NEW |