Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 61 : AllocationObserver(step_size), heap_(heap) {} | 61 : AllocationObserver(step_size), heap_(heap) {} |
| 62 | 62 |
| 63 void Step(int bytes_allocated, Address, size_t) override { | 63 void Step(int bytes_allocated, Address, size_t) override { |
| 64 heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated); | 64 heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated); |
| 65 } | 65 } |
| 66 | 66 |
| 67 private: | 67 private: |
| 68 Heap& heap_; | 68 Heap& heap_; |
| 69 }; | 69 }; |
| 70 | 70 |
| 71 | |
| 72 Heap::Heap() | 71 Heap::Heap() |
| 73 : amount_of_external_allocated_memory_(0), | 72 : amount_of_external_allocated_memory_(0), |
| 74 amount_of_external_allocated_memory_at_last_global_gc_(0), | 73 amount_of_external_allocated_memory_at_last_global_gc_(0), |
| 75 isolate_(NULL), | 74 isolate_(NULL), |
| 76 code_range_size_(0), | 75 code_range_size_(0), |
| 77 // semispace_size_ should be a power of 2 and old_generation_size_ should | 76 // semispace_size_ should be a power of 2 and old_generation_size_ should |
| 78 // be a multiple of Page::kPageSize. | 77 // be a multiple of Page::kPageSize. |
| 79 reserved_semispace_size_(8 * (kPointerSize / 4) * MB), | 78 reserved_semispace_size_(8 * (kPointerSize / 4) * MB), |
| 80 max_semi_space_size_(8 * (kPointerSize / 4) * MB), | 79 max_semi_space_size_(8 * (kPointerSize / 4) * MB), |
| 81 initial_semispace_size_(Page::kPageSize), | 80 initial_semispace_size_(Page::kPageSize), |
| 82 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), | 81 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), |
| 83 initial_old_generation_size_(max_old_generation_size_ / | 82 initial_old_generation_size_(max_old_generation_size_ / |
| 84 kInitalOldGenerationLimitFactor), | 83 kInitalOldGenerationLimitFactor), |
| 85 old_generation_size_configured_(false), | 84 old_generation_size_configured_(false), |
| 86 max_executable_size_(256ul * (kPointerSize / 4) * MB), | 85 max_executable_size_(256ul * (kPointerSize / 4) * MB), |
| 87 // Variables set based on semispace_size_ and old_generation_size_ in | 86 // Variables set based on semispace_size_ and old_generation_size_ in |
| 88 // ConfigureHeap. | 87 // ConfigureHeap. |
| 89 // Will be 4 * reserved_semispace_size_ to ensure that young | 88 // Will be 4 * reserved_semispace_size_ to ensure that young |
| 90 // generation can be aligned to its size. | 89 // generation can be aligned to its size. |
| 91 maximum_committed_(0), | 90 maximum_committed_(0), |
| 92 survived_since_last_expansion_(0), | 91 survived_since_last_expansion_(0), |
| 93 survived_last_scavenge_(0), | 92 survived_last_scavenge_(0), |
| 94 always_allocate_scope_count_(0), | 93 always_allocate_scope_count_(0), |
| 94 memory_pressure_level_(MemoryPressureLevel::kNone), | |
| 95 contexts_disposed_(0), | 95 contexts_disposed_(0), |
| 96 number_of_disposed_maps_(0), | 96 number_of_disposed_maps_(0), |
| 97 global_ic_age_(0), | 97 global_ic_age_(0), |
| 98 new_space_(this), | 98 new_space_(this), |
| 99 old_space_(NULL), | 99 old_space_(NULL), |
| 100 code_space_(NULL), | 100 code_space_(NULL), |
| 101 map_space_(NULL), | 101 map_space_(NULL), |
| 102 lo_space_(NULL), | 102 lo_space_(NULL), |
| 103 gc_state_(NOT_IN_GC), | 103 gc_state_(NOT_IN_GC), |
| 104 gc_post_processing_depth_(0), | 104 gc_post_processing_depth_(0), |
| (...skipping 678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 783 ~GCCallbacksScope() { heap_->gc_callbacks_depth_--; } | 783 ~GCCallbacksScope() { heap_->gc_callbacks_depth_--; } |
| 784 | 784 |
| 785 bool CheckReenter() { return heap_->gc_callbacks_depth_ == 1; } | 785 bool CheckReenter() { return heap_->gc_callbacks_depth_ == 1; } |
| 786 | 786 |
| 787 private: | 787 private: |
| 788 Heap* heap_; | 788 Heap* heap_; |
| 789 }; | 789 }; |
| 790 | 790 |
| 791 | 791 |
| 792 void Heap::HandleGCRequest() { | 792 void Heap::HandleGCRequest() { |
| 793 if (incremental_marking()->request_type() == | 793 if (memory_pressure_level_.Value() == MemoryPressureLevel::kCritical) { |
| 794 IncrementalMarking::COMPLETE_MARKING) { | 794 CollectAllGarbage(kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask, |
| 795 "critical memory pressure notification (interrupt)"); | |
| 796 } else if (incremental_marking()->request_type() == | |
| 797 IncrementalMarking::COMPLETE_MARKING) { | |
| 798 incremental_marking()->reset_request_type(); | |
| 795 CollectAllGarbage(current_gc_flags_, "GC interrupt", | 799 CollectAllGarbage(current_gc_flags_, "GC interrupt", |
| 796 current_gc_callback_flags_); | 800 current_gc_callback_flags_); |
| 797 } else if (incremental_marking()->IsMarking() && | 801 } else if (incremental_marking()->request_type() == |
| 802 IncrementalMarking::FINALIZATION && | |
| 803 incremental_marking()->IsMarking() && | |
| 798 !incremental_marking()->finalize_marking_completed()) { | 804 !incremental_marking()->finalize_marking_completed()) { |
| 805 incremental_marking()->reset_request_type(); | |
| 799 FinalizeIncrementalMarking("GC interrupt: finalize incremental marking"); | 806 FinalizeIncrementalMarking("GC interrupt: finalize incremental marking"); |
| 800 } | 807 } |
| 801 } | 808 } |
| 802 | 809 |
| 803 | 810 |
| 804 void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) { | 811 void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) { |
| 805 scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated); | 812 scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated); |
| 806 } | 813 } |
| 807 | 814 |
| 808 | 815 |
| (...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1449 | 1456 |
| 1450 | 1457 |
| 1451 void Heap::MarkCompactEpilogue() { | 1458 void Heap::MarkCompactEpilogue() { |
| 1452 gc_state_ = NOT_IN_GC; | 1459 gc_state_ = NOT_IN_GC; |
| 1453 | 1460 |
| 1454 isolate_->counters()->objs_since_last_full()->Set(0); | 1461 isolate_->counters()->objs_since_last_full()->Set(0); |
| 1455 | 1462 |
| 1456 incremental_marking()->Epilogue(); | 1463 incremental_marking()->Epilogue(); |
| 1457 | 1464 |
| 1458 PreprocessStackTraces(); | 1465 PreprocessStackTraces(); |
| 1466 | |
| 1467 memory_pressure_level_.SetValue(MemoryPressureLevel::kNone); | |
| 1459 } | 1468 } |
| 1460 | 1469 |
| 1461 | 1470 |
| 1462 void Heap::MarkCompactPrologue() { | 1471 void Heap::MarkCompactPrologue() { |
| 1463 // At any old GC clear the keyed lookup cache to enable collection of unused | 1472 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 1464 // maps. | 1473 // maps. |
| 1465 isolate_->keyed_lookup_cache()->Clear(); | 1474 isolate_->keyed_lookup_cache()->Clear(); |
| 1466 isolate_->context_slot_cache()->Clear(); | 1475 isolate_->context_slot_cache()->Clear(); |
| 1467 isolate_->descriptor_lookup_cache()->Clear(); | 1476 isolate_->descriptor_lookup_cache()->Clear(); |
| 1468 RegExpResultsCache::Clear(string_split_cache()); | 1477 RegExpResultsCache::Clear(string_split_cache()); |
| (...skipping 2940 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4409 return result; | 4418 return result; |
| 4410 } | 4419 } |
| 4411 | 4420 |
| 4412 | 4421 |
| 4413 bool Heap::RecentIdleNotificationHappened() { | 4422 bool Heap::RecentIdleNotificationHappened() { |
| 4414 return (last_idle_notification_time_ + | 4423 return (last_idle_notification_time_ + |
| 4415 GCIdleTimeHandler::kMaxScheduledIdleTime) > | 4424 GCIdleTimeHandler::kMaxScheduledIdleTime) > |
| 4416 MonotonicallyIncreasingTimeInMs(); | 4425 MonotonicallyIncreasingTimeInMs(); |
| 4417 } | 4426 } |
| 4418 | 4427 |
| 4428 void Heap::MemoryPressureNotification(MemoryPressureLevel level, | |
|
hong.zheng
2016/03/18 02:27:16
On Android, LMK(low memory killer) and memory pres
ulan
2016/03/18 12:49:40
I didn't get it. Will blink send only kModerate me
Hannes Payer (out of office)
2016/03/18 13:27:26
I am with Ulan. kModerate should still take latenc
| |
| 4429 bool is_isolate_locked) { | |
| 4430 MemoryPressureLevel previous = memory_pressure_level_.Value(); | |
| 4431 memory_pressure_level_.SetValue(level); | |
| 4432 if (previous != MemoryPressureLevel::kCritical && | |
| 4433 level == MemoryPressureLevel::kCritical) { | |
| 4434 if (is_isolate_locked) { | |
| 4435 CollectAllGarbage( | |
| 4436 kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask, | |
| 4437 "critical memory pressure notification"); | |
| 4438 } else { | |
| 4439 ExecutionAccess access(isolate()); | |
| 4440 isolate()->stack_guard()->RequestGC(); | |
| 4441 } | |
| 4442 } | |
| 4443 if (previous == MemoryPressureLevel::kNone && | |
| 4444 level != MemoryPressureLevel::kNone) { | |
| 4445 MemoryReducer::Event event; | |
| 4446 event.type = MemoryReducer::kPossibleGarbage; | |
| 4447 event.time_ms = MonotonicallyIncreasingTimeInMs(); | |
| 4448 memory_reducer_->NotifyPossibleGarbage(event); | |
| 4449 } | |
| 4450 } | |
| 4419 | 4451 |
| 4420 #ifdef DEBUG | 4452 #ifdef DEBUG |
| 4421 | 4453 |
| 4422 void Heap::Print() { | 4454 void Heap::Print() { |
| 4423 if (!HasBeenSetUp()) return; | 4455 if (!HasBeenSetUp()) return; |
| 4424 isolate()->PrintStack(stdout); | 4456 isolate()->PrintStack(stdout); |
| 4425 AllSpaces spaces(this); | 4457 AllSpaces spaces(this); |
| 4426 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { | 4458 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { |
| 4427 space->Print(); | 4459 space->Print(); |
| 4428 } | 4460 } |
| (...skipping 1974 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6403 } | 6435 } |
| 6404 | 6436 |
| 6405 | 6437 |
| 6406 // static | 6438 // static |
| 6407 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6439 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6408 return StaticVisitorBase::GetVisitorId(map); | 6440 return StaticVisitorBase::GetVisitorId(map); |
| 6409 } | 6441 } |
| 6410 | 6442 |
| 6411 } // namespace internal | 6443 } // namespace internal |
| 6412 } // namespace v8 | 6444 } // namespace v8 |
| OLD | NEW |