OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 isolate()->optimizing_compile_dispatcher()->AgeBufferedOsrJobs(); | 463 isolate()->optimizing_compile_dispatcher()->AgeBufferedOsrJobs(); |
464 } | 464 } |
465 | 465 |
466 if (new_space_.IsAtMaximumCapacity()) { | 466 if (new_space_.IsAtMaximumCapacity()) { |
467 maximum_size_scavenges_++; | 467 maximum_size_scavenges_++; |
468 } else { | 468 } else { |
469 maximum_size_scavenges_ = 0; | 469 maximum_size_scavenges_ = 0; |
470 } | 470 } |
471 CheckNewSpaceExpansionCriteria(); | 471 CheckNewSpaceExpansionCriteria(); |
472 UpdateNewSpaceAllocationCounter(); | 472 UpdateNewSpaceAllocationCounter(); |
473 UpdateOldGenerationAllocationCounter(); | |
474 } | 473 } |
475 | 474 |
476 | 475 |
477 intptr_t Heap::SizeOfObjects() { | 476 intptr_t Heap::SizeOfObjects() { |
478 intptr_t total = 0; | 477 intptr_t total = 0; |
479 AllSpaces spaces(this); | 478 AllSpaces spaces(this); |
480 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { | 479 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { |
481 total += space->SizeOfObjects(); | 480 total += space->SizeOfObjects(); |
482 } | 481 } |
483 return total; | 482 return total; |
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
731 | 730 |
732 #ifdef DEBUG | 731 #ifdef DEBUG |
733 ReportStatisticsAfterGC(); | 732 ReportStatisticsAfterGC(); |
734 #endif // DEBUG | 733 #endif // DEBUG |
735 | 734 |
736 // Remember the last top pointer so that we can later find out | 735 // Remember the last top pointer so that we can later find out |
737 // whether we allocated in new space since the last GC. | 736 // whether we allocated in new space since the last GC. |
738 new_space_top_after_last_gc_ = new_space()->top(); | 737 new_space_top_after_last_gc_ = new_space()->top(); |
739 last_gc_time_ = MonotonicallyIncreasingTimeInMs(); | 738 last_gc_time_ = MonotonicallyIncreasingTimeInMs(); |
740 | 739 |
741 ReduceNewSpaceSize( | 740 ReduceNewSpaceSize(); |
742 tracer()->CurrentAllocationThroughputInBytesPerMillisecond()); | |
743 } | 741 } |
744 | 742 |
745 | 743 |
746 void Heap::PreprocessStackTraces() { | 744 void Heap::PreprocessStackTraces() { |
747 if (!weak_stack_trace_list()->IsWeakFixedArray()) return; | 745 if (!weak_stack_trace_list()->IsWeakFixedArray()) return; |
748 WeakFixedArray* array = WeakFixedArray::cast(weak_stack_trace_list()); | 746 WeakFixedArray* array = WeakFixedArray::cast(weak_stack_trace_list()); |
749 int length = array->Length(); | 747 int length = array->Length(); |
750 for (int i = 0; i < length; i++) { | 748 for (int i = 0; i < length; i++) { |
751 if (array->IsEmptySlot(i)) continue; | 749 if (array->IsEmptySlot(i)) continue; |
752 FixedArray* elements = FixedArray::cast(array->Get(i)); | 750 FixedArray* elements = FixedArray::cast(array->Get(i)); |
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1207 int start_new_space_size = Heap::new_space()->SizeAsInt(); | 1205 int start_new_space_size = Heap::new_space()->SizeAsInt(); |
1208 | 1206 |
1209 if (IsHighSurvivalRate()) { | 1207 if (IsHighSurvivalRate()) { |
1210 // We speed up the incremental marker if it is running so that it | 1208 // We speed up the incremental marker if it is running so that it |
1211 // does not fall behind the rate of promotion, which would cause a | 1209 // does not fall behind the rate of promotion, which would cause a |
1212 // constantly growing old space. | 1210 // constantly growing old space. |
1213 incremental_marking()->NotifyOfHighPromotionRate(); | 1211 incremental_marking()->NotifyOfHighPromotionRate(); |
1214 } | 1212 } |
1215 | 1213 |
1216 if (collector == MARK_COMPACTOR) { | 1214 if (collector == MARK_COMPACTOR) { |
| 1215 UpdateOldGenerationAllocationCounter(); |
1217 // Perform mark-sweep with optional compaction. | 1216 // Perform mark-sweep with optional compaction. |
1218 MarkCompact(); | 1217 MarkCompact(); |
1219 sweep_generation_++; | 1218 sweep_generation_++; |
1220 old_gen_exhausted_ = false; | 1219 old_gen_exhausted_ = false; |
1221 old_generation_size_configured_ = true; | 1220 old_generation_size_configured_ = true; |
| 1221 // This should be updated before PostGarbageCollectionProcessing, which can |
| 1222 // cause another GC. |
| 1223 old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects(); |
1222 } else { | 1224 } else { |
1223 Scavenge(); | 1225 Scavenge(); |
1224 } | 1226 } |
1225 | 1227 |
1226 // This should be updated before PostGarbageCollectionProcessing, which can | |
1227 // cause another GC. | |
1228 old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects(); | |
1229 | 1228 |
1230 UpdateSurvivalStatistics(start_new_space_size); | 1229 UpdateSurvivalStatistics(start_new_space_size); |
1231 ConfigureInitialOldGenerationSize(); | 1230 ConfigureInitialOldGenerationSize(); |
1232 | 1231 |
1233 isolate_->counters()->objs_since_last_young()->Set(0); | 1232 isolate_->counters()->objs_since_last_young()->Set(0); |
1234 | 1233 |
1235 if (collector != SCAVENGER) { | 1234 if (collector != SCAVENGER) { |
1236 // Callbacks that fire after this point might trigger nested GCs and | 1235 // Callbacks that fire after this point might trigger nested GCs and |
1237 // restart incremental marking, the assertion can't be moved down. | 1236 // restart incremental marking, the assertion can't be moved down. |
1238 DCHECK(incremental_marking()->IsStopped()); | 1237 DCHECK(incremental_marking()->IsStopped()); |
(...skipping 3263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4502 if (!IsHeapIterable()) { | 4501 if (!IsHeapIterable()) { |
4503 CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable"); | 4502 CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable"); |
4504 } | 4503 } |
4505 if (mark_compact_collector()->sweeping_in_progress()) { | 4504 if (mark_compact_collector()->sweeping_in_progress()) { |
4506 mark_compact_collector()->EnsureSweepingCompleted(); | 4505 mark_compact_collector()->EnsureSweepingCompleted(); |
4507 } | 4506 } |
4508 DCHECK(IsHeapIterable()); | 4507 DCHECK(IsHeapIterable()); |
4509 } | 4508 } |
4510 | 4509 |
4511 | 4510 |
4512 bool Heap::HasLowAllocationRate(size_t allocation_rate) { | 4511 bool Heap::HasLowYoungGenerationAllocationRate() { |
4513 static const size_t kLowAllocationRate = 1000; | 4512 const double high_mutator_utilization = 0.995; |
4514 if (allocation_rate == 0) return false; | 4513 double mutator_speed = static_cast<double>( |
4515 return allocation_rate < kLowAllocationRate; | 4514 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond()); |
| 4515 double gc_speed = |
| 4516 static_cast<double>(tracer()->ScavengeSpeedInBytesPerMillisecond()); |
| 4517 if (mutator_speed == 0 || gc_speed == 0) return false; |
| 4518 double mutator_utilization = gc_speed / (mutator_speed + gc_speed); |
| 4519 return mutator_utilization > high_mutator_utilization; |
4516 } | 4520 } |
4517 | 4521 |
4518 | 4522 |
4519 void Heap::ReduceNewSpaceSize(size_t allocation_rate) { | 4523 bool Heap::HasLowOldGenerationAllocationRate() { |
4520 if (!FLAG_predictable && HasLowAllocationRate(allocation_rate)) { | 4524 const double high_mutator_utilization = 0.995; |
| 4525 double mutator_speed = static_cast<double>( |
| 4526 tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond()); |
| 4527 double gc_speed = static_cast<double>( |
| 4528 tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond()); |
| 4529 if (mutator_speed == 0 || gc_speed == 0) return false; |
| 4530 double mutator_utilization = gc_speed / (mutator_speed + gc_speed); |
| 4531 return mutator_utilization > high_mutator_utilization; |
| 4532 } |
| 4533 |
| 4534 |
| 4535 bool Heap::HasLowAllocationRate() { |
| 4536 return HasLowYoungGenerationAllocationRate() && |
| 4537 HasLowOldGenerationAllocationRate(); |
| 4538 } |
| 4539 |
| 4540 |
| 4541 void Heap::ReduceNewSpaceSize() { |
| 4542 if (!FLAG_predictable && HasLowAllocationRate()) { |
4521 new_space_.Shrink(); | 4543 new_space_.Shrink(); |
4522 UncommitFromSpace(); | 4544 UncommitFromSpace(); |
4523 } | 4545 } |
4524 } | 4546 } |
4525 | 4547 |
4526 | 4548 |
4527 bool Heap::TryFinalizeIdleIncrementalMarking( | 4549 bool Heap::TryFinalizeIdleIncrementalMarking( |
4528 double idle_time_in_ms, size_t size_of_objects, | 4550 double idle_time_in_ms, size_t size_of_objects, |
4529 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) { | 4551 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) { |
4530 if (FLAG_overapproximate_weak_closure && | 4552 if (FLAG_overapproximate_weak_closure && |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4566 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); | 4588 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); |
4567 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms = | 4589 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms = |
4568 static_cast<size_t>( | 4590 static_cast<size_t>( |
4569 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); | 4591 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); |
4570 heap_state.scavenge_speed_in_bytes_per_ms = | 4592 heap_state.scavenge_speed_in_bytes_per_ms = |
4571 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond()); | 4593 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond()); |
4572 heap_state.used_new_space_size = new_space_.Size(); | 4594 heap_state.used_new_space_size = new_space_.Size(); |
4573 heap_state.new_space_capacity = new_space_.Capacity(); | 4595 heap_state.new_space_capacity = new_space_.Capacity(); |
4574 heap_state.new_space_allocation_throughput_in_bytes_per_ms = | 4596 heap_state.new_space_allocation_throughput_in_bytes_per_ms = |
4575 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(); | 4597 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(); |
4576 heap_state.current_allocation_throughput_in_bytes_per_ms = | 4598 heap_state.has_low_allocation_rate = HasLowAllocationRate(); |
4577 tracer()->CurrentAllocationThroughputInBytesPerMillisecond(); | |
4578 intptr_t limit = old_generation_allocation_limit_; | 4599 intptr_t limit = old_generation_allocation_limit_; |
4579 if (HasLowAllocationRate( | 4600 if (heap_state.has_low_allocation_rate) { |
4580 heap_state.current_allocation_throughput_in_bytes_per_ms)) { | |
4581 limit = idle_old_generation_allocation_limit_; | 4601 limit = idle_old_generation_allocation_limit_; |
4582 } | 4602 } |
4583 heap_state.can_start_incremental_marking = | 4603 heap_state.can_start_incremental_marking = |
4584 incremental_marking()->CanBeActivated() && | 4604 incremental_marking()->CanBeActivated() && |
4585 HeapIsFullEnoughToStartIncrementalMarking(limit) && | 4605 HeapIsFullEnoughToStartIncrementalMarking(limit) && |
4586 !mark_compact_collector()->sweeping_in_progress(); | 4606 !mark_compact_collector()->sweeping_in_progress(); |
4587 return heap_state; | 4607 return heap_state; |
4588 } | 4608 } |
4589 | 4609 |
4590 | 4610 |
(...skipping 1962 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6553 *object_type = "CODE_TYPE"; \ | 6573 *object_type = "CODE_TYPE"; \ |
6554 *object_sub_type = "CODE_AGE/" #name; \ | 6574 *object_sub_type = "CODE_AGE/" #name; \ |
6555 return true; | 6575 return true; |
6556 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6576 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) |
6557 #undef COMPARE_AND_RETURN_NAME | 6577 #undef COMPARE_AND_RETURN_NAME |
6558 } | 6578 } |
6559 return false; | 6579 return false; |
6560 } | 6580 } |
6561 } // namespace internal | 6581 } // namespace internal |
6562 } // namespace v8 | 6582 } // namespace v8 |
OLD | NEW |