Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 98 old_pointer_space_(NULL), | 98 old_pointer_space_(NULL), |
| 99 old_data_space_(NULL), | 99 old_data_space_(NULL), |
| 100 code_space_(NULL), | 100 code_space_(NULL), |
| 101 map_space_(NULL), | 101 map_space_(NULL), |
| 102 cell_space_(NULL), | 102 cell_space_(NULL), |
| 103 lo_space_(NULL), | 103 lo_space_(NULL), |
| 104 gc_state_(NOT_IN_GC), | 104 gc_state_(NOT_IN_GC), |
| 105 gc_post_processing_depth_(0), | 105 gc_post_processing_depth_(0), |
| 106 ms_count_(0), | 106 ms_count_(0), |
| 107 gc_count_(0), | 107 gc_count_(0), |
| 108 scavenges_since_last_full_gc_(0), | |
| 108 unflattened_strings_length_(0), | 109 unflattened_strings_length_(0), |
| 109 #ifdef DEBUG | 110 #ifdef DEBUG |
| 110 allocation_allowed_(true), | 111 allocation_allowed_(true), |
| 111 allocation_timeout_(0), | 112 allocation_timeout_(0), |
| 112 disallow_allocation_failure_(false), | 113 disallow_allocation_failure_(false), |
| 113 debug_utils_(NULL), | 114 debug_utils_(NULL), |
| 114 #endif // DEBUG | 115 #endif // DEBUG |
| 115 old_gen_promotion_limit_(kMinimumPromotionLimit), | 116 old_gen_promotion_limit_(kMinimumPromotionLimit), |
| 116 old_gen_allocation_limit_(kMinimumAllocationLimit), | 117 old_gen_allocation_limit_(kMinimumAllocationLimit), |
| 117 old_gen_limit_factor_(1), | 118 old_gen_limit_factor_(1), |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 136 max_alive_after_gc_(0), | 137 max_alive_after_gc_(0), |
| 137 min_in_mutator_(kMaxInt), | 138 min_in_mutator_(kMaxInt), |
| 138 alive_after_last_gc_(0), | 139 alive_after_last_gc_(0), |
| 139 last_gc_end_timestamp_(0.0), | 140 last_gc_end_timestamp_(0.0), |
| 140 store_buffer_(this), | 141 store_buffer_(this), |
| 141 marking_(this), | 142 marking_(this), |
| 142 incremental_marking_(this), | 143 incremental_marking_(this), |
| 143 number_idle_notifications_(0), | 144 number_idle_notifications_(0), |
| 144 last_idle_notification_gc_count_(0), | 145 last_idle_notification_gc_count_(0), |
| 145 last_idle_notification_gc_count_init_(false), | 146 last_idle_notification_gc_count_init_(false), |
| 147 idle_notification_will_schedule_next_gc_(false), | |
| 148 mark_sweeps_since_idle_round_started_(0), | |
| 149 ms_count_at_last_idle_notification_(0), | |
| 150 gc_count_at_last_idle_gc_(0), | |
| 146 configured_(false), | 151 configured_(false), |
| 147 chunks_queued_for_free_(NULL) { | 152 chunks_queued_for_free_(NULL) { |
| 148 // Allow build-time customization of the max semispace size. Building | 153 // Allow build-time customization of the max semispace size. Building |
| 149 // V8 with snapshots and a non-default max semispace size is much | 154 // V8 with snapshots and a non-default max semispace size is much |
| 150 // easier if you can define it as part of the build environment. | 155 // easier if you can define it as part of the build environment. |
| 151 #if defined(V8_MAX_SEMISPACE_SIZE) | 156 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 152 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 157 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 153 #endif | 158 #endif |
| 154 | 159 |
| 155 intptr_t max_virtual = OS::MaxVirtualMemory(); | 160 intptr_t max_virtual = OS::MaxVirtualMemory(); |
| (...skipping 659 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 815 | 820 |
| 816 mark_compact_collector_.CollectGarbage(); | 821 mark_compact_collector_.CollectGarbage(); |
| 817 | 822 |
| 818 LOG(isolate_, ResourceEvent("markcompact", "end")); | 823 LOG(isolate_, ResourceEvent("markcompact", "end")); |
| 819 | 824 |
| 820 gc_state_ = NOT_IN_GC; | 825 gc_state_ = NOT_IN_GC; |
| 821 | 826 |
| 822 isolate_->counters()->objs_since_last_full()->Set(0); | 827 isolate_->counters()->objs_since_last_full()->Set(0); |
| 823 | 828 |
| 824 contexts_disposed_ = 0; | 829 contexts_disposed_ = 0; |
| 830 | |
| 831 scavenges_since_last_full_gc_ = 0; | |
| 825 } | 832 } |
| 826 | 833 |
| 827 | 834 |
| 828 void Heap::MarkCompactPrologue() { | 835 void Heap::MarkCompactPrologue() { |
| 829 // At any old GC clear the keyed lookup cache to enable collection of unused | 836 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 830 // maps. | 837 // maps. |
| 831 isolate_->keyed_lookup_cache()->Clear(); | 838 isolate_->keyed_lookup_cache()->Clear(); |
| 832 isolate_->context_slot_cache()->Clear(); | 839 isolate_->context_slot_cache()->Clear(); |
| 833 isolate_->descriptor_lookup_cache()->Clear(); | 840 isolate_->descriptor_lookup_cache()->Clear(); |
| 834 StringSplitCache::Clear(string_split_cache()); | 841 StringSplitCache::Clear(string_split_cache()); |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1003 | 1010 |
| 1004 // Used for updating survived_since_last_expansion_ at function end. | 1011 // Used for updating survived_since_last_expansion_ at function end. |
| 1005 intptr_t survived_watermark = PromotedSpaceSize(); | 1012 intptr_t survived_watermark = PromotedSpaceSize(); |
| 1006 | 1013 |
| 1007 CheckNewSpaceExpansionCriteria(); | 1014 CheckNewSpaceExpansionCriteria(); |
| 1008 | 1015 |
| 1009 SelectScavengingVisitorsTable(); | 1016 SelectScavengingVisitorsTable(); |
| 1010 | 1017 |
| 1011 incremental_marking()->PrepareForScavenge(); | 1018 incremental_marking()->PrepareForScavenge(); |
| 1012 | 1019 |
| 1013 old_pointer_space()->AdvanceSweeper(new_space_.Size()); | 1020 AdvanceSweepers(new_space_.Size()); |
| 1014 old_data_space()->AdvanceSweeper(new_space_.Size()); | |
| 1015 | 1021 |
| 1016 // Flip the semispaces. After flipping, to space is empty, from space has | 1022 // Flip the semispaces. After flipping, to space is empty, from space has |
| 1017 // live objects. | 1023 // live objects. |
| 1018 new_space_.Flip(); | 1024 new_space_.Flip(); |
| 1019 new_space_.ResetAllocationInfo(); | 1025 new_space_.ResetAllocationInfo(); |
| 1020 | 1026 |
| 1021 // We need to sweep newly copied objects which can be either in the | 1027 // We need to sweep newly copied objects which can be either in the |
| 1022 // to space or promoted to the old generation. For to-space | 1028 // to space or promoted to the old generation. For to-space |
| 1023 // objects, we treat the bottom of the to space as a queue. Newly | 1029 // objects, we treat the bottom of the to space as a queue. Newly |
| 1024 // copied and unswept objects lie between a 'front' mark and the | 1030 // copied and unswept objects lie between a 'front' mark and the |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1092 new_space_.LowerInlineAllocationLimit( | 1098 new_space_.LowerInlineAllocationLimit( |
| 1093 new_space_.inline_allocation_limit_step()); | 1099 new_space_.inline_allocation_limit_step()); |
| 1094 | 1100 |
| 1095 // Update how much has survived scavenge. | 1101 // Update how much has survived scavenge. |
| 1096 IncrementYoungSurvivorsCounter(static_cast<int>( | 1102 IncrementYoungSurvivorsCounter(static_cast<int>( |
| 1097 (PromotedSpaceSize() - survived_watermark) + new_space_.Size())); | 1103 (PromotedSpaceSize() - survived_watermark) + new_space_.Size())); |
| 1098 | 1104 |
| 1099 LOG(isolate_, ResourceEvent("scavenge", "end")); | 1105 LOG(isolate_, ResourceEvent("scavenge", "end")); |
| 1100 | 1106 |
| 1101 gc_state_ = NOT_IN_GC; | 1107 gc_state_ = NOT_IN_GC; |
| 1108 | |
| 1109 scavenges_since_last_full_gc_++; | |
| 1102 } | 1110 } |
| 1103 | 1111 |
| 1104 | 1112 |
| 1105 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, | 1113 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
| 1106 Object** p) { | 1114 Object** p) { |
| 1107 MapWord first_word = HeapObject::cast(*p)->map_word(); | 1115 MapWord first_word = HeapObject::cast(*p)->map_word(); |
| 1108 | 1116 |
| 1109 if (!first_word.IsForwardingAddress()) { | 1117 if (!first_word.IsForwardingAddress()) { |
| 1110 // Unreachable external string can be finalized. | 1118 // Unreachable external string can be finalized. |
| 1111 heap->FinalizeExternalString(String::cast(*p)); | 1119 heap->FinalizeExternalString(String::cast(*p)); |
| (...skipping 3341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4453 | 4461 |
| 4454 void Heap::EnsureHeapIsIterable() { | 4462 void Heap::EnsureHeapIsIterable() { |
| 4455 ASSERT(IsAllocationAllowed()); | 4463 ASSERT(IsAllocationAllowed()); |
| 4456 if (!IsHeapIterable()) { | 4464 if (!IsHeapIterable()) { |
| 4457 CollectAllGarbage(kMakeHeapIterableMask); | 4465 CollectAllGarbage(kMakeHeapIterableMask); |
| 4458 } | 4466 } |
| 4459 ASSERT(IsHeapIterable()); | 4467 ASSERT(IsHeapIterable()); |
| 4460 } | 4468 } |
| 4461 | 4469 |
| 4462 | 4470 |
| 4463 bool Heap::IdleNotification() { | 4471 bool Heap::IdleNotification(int hint) { |
| 4472 if (!FLAG_incremental_marking) { | |
| 4473 return hint < 1000 ? true : IdleGlobalGC(); | |
| 4474 } | |
| 4475 | |
| 4476 // By doing small chunks of GC work in each IdleNotification, | |
| 4477 // perform a round of incremental GCs and after that wait until | |
| 4478 // the mutator creates enough garbage to justify a new round. | |
| 4479 // An incremental GC progresses as follows: | |
| 4480 // 1. many incremental marking steps, | |
| 4481 // 2. one old space mark-sweep-compact, | |
| 4482 // 3. many lazy sweep steps. | |
| 4483 // Use mark-sweep-compact events to count incremental GCs in a round. | |
| 4484 | |
| 4485 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) { | |
| 4486 if (EnoughGarbageSinceLastIdleRound()) { | |
| 4487 StartIdleRound(); | |
| 4488 } else { | |
| 4489 return true; | |
| 4490 } | |
| 4491 } | |
| 4492 | |
| 4493 int new_mark_sweeps = ms_count_ - ms_count_at_last_idle_notification_; | |
| 4494 mark_sweeps_since_idle_round_started_ += new_mark_sweeps; | |
| 4495 ms_count_at_last_idle_notification_ = ms_count_; | |
| 4496 | |
| 4497 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) { | |
|
Erik Corry
2011/11/22 12:29:26
This test is also included in WorthStartingGCWhenI
ulan
2011/11/23 16:18:06
Removed the test from WorthStartingGCWhenIdle.
| |
| 4498 FinishIdleRound(); | |
| 4499 return true; | |
|
Erik Corry
2011/11/22 12:29:26
If sweeping is not complete we should not return h
ulan
2011/11/23 16:18:06
Moved the sweeping steps up.
| |
| 4500 } | |
| 4501 | |
| 4502 intptr_t size_factor = Min(Max(hint, 30), 1000) / 10; | |
| 4503 // The size factor is in range [3..100]. | |
| 4504 intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold; | |
| 4505 | |
| 4506 if (incremental_marking()->IsStopped()) { | |
| 4507 if (!IsSweepingComplete() && !AdvanceSweepers(step_size)) { | |
| 4508 return false; | |
| 4509 } | |
| 4510 if (!WorthStartingGCWhenIdle()) { | |
| 4511 FinishIdleRound(); | |
| 4512 return true; | |
| 4513 } | |
| 4514 incremental_marking()->Start(); | |
| 4515 } | |
| 4516 | |
| 4517 // This flag prevents incremental marking from requesting GC via stack guard | |
| 4518 idle_notification_will_schedule_next_gc_ = true; | |
| 4519 incremental_marking()->Step(step_size); | |
| 4520 idle_notification_will_schedule_next_gc_ = false; | |
| 4521 | |
| 4522 if (incremental_marking()->IsComplete()) { | |
| 4523 bool uncommit = false; | |
| 4524 if (gc_count_at_last_idle_gc_ == gc_count_) { | |
|
Erik Corry
2011/11/22 12:29:26
It would be nice with a comment that explained the
ulan
2011/11/23 16:18:06
Done.
| |
| 4525 isolate_->compilation_cache()->Clear(); | |
| 4526 uncommit = true; | |
| 4527 } | |
| 4528 CollectAllGarbage(kNoGCFlags); | |
| 4529 gc_count_at_last_idle_gc_ = gc_count_; | |
| 4530 if (uncommit) { | |
| 4531 new_space_.Shrink(); | |
| 4532 UncommitFromSpace(); | |
| 4533 } | |
| 4534 } | |
| 4535 return false; | |
| 4536 } | |
| 4537 | |
| 4538 | |
| 4539 bool Heap::IdleGlobalGC() { | |
| 4464 static const int kIdlesBeforeScavenge = 4; | 4540 static const int kIdlesBeforeScavenge = 4; |
| 4465 static const int kIdlesBeforeMarkSweep = 7; | 4541 static const int kIdlesBeforeMarkSweep = 7; |
| 4466 static const int kIdlesBeforeMarkCompact = 8; | 4542 static const int kIdlesBeforeMarkCompact = 8; |
| 4467 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1; | 4543 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1; |
| 4468 static const unsigned int kGCsBetweenCleanup = 4; | 4544 static const unsigned int kGCsBetweenCleanup = 4; |
| 4469 | 4545 |
| 4470 if (!last_idle_notification_gc_count_init_) { | 4546 if (!last_idle_notification_gc_count_init_) { |
| 4471 last_idle_notification_gc_count_ = gc_count_; | 4547 last_idle_notification_gc_count_ = gc_count_; |
| 4472 last_idle_notification_gc_count_init_ = true; | 4548 last_idle_notification_gc_count_init_ = true; |
| 4473 } | 4549 } |
| (...skipping 1939 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6413 isolate_->heap()->store_buffer()->Compact(); | 6489 isolate_->heap()->store_buffer()->Compact(); |
| 6414 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6490 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
| 6415 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6491 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
| 6416 next = chunk->next_chunk(); | 6492 next = chunk->next_chunk(); |
| 6417 isolate_->memory_allocator()->Free(chunk); | 6493 isolate_->memory_allocator()->Free(chunk); |
| 6418 } | 6494 } |
| 6419 chunks_queued_for_free_ = NULL; | 6495 chunks_queued_for_free_ = NULL; |
| 6420 } | 6496 } |
| 6421 | 6497 |
| 6422 } } // namespace v8::internal | 6498 } } // namespace v8::internal |
| OLD | NEW |