OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
430 gc_count_++; | 430 gc_count_++; |
431 | 431 |
432 #ifdef VERIFY_HEAP | 432 #ifdef VERIFY_HEAP |
433 if (FLAG_verify_heap) { | 433 if (FLAG_verify_heap) { |
434 Verify(); | 434 Verify(); |
435 } | 435 } |
436 #endif | 436 #endif |
437 } | 437 } |
438 | 438 |
439 // Reset GC statistics. | 439 // Reset GC statistics. |
440 promoted_objects_size_ = 0; | 440 promoted_objects_size_.SetValue(0); |
441 previous_semi_space_copied_object_size_ = semi_space_copied_object_size_; | 441 previous_semi_space_copied_object_size_ = semi_space_copied_object_size(); |
442 semi_space_copied_object_size_ = 0; | 442 semi_space_copied_object_size_.SetValue(0); |
443 nodes_died_in_new_space_ = 0; | 443 nodes_died_in_new_space_ = 0; |
444 nodes_copied_in_new_space_ = 0; | 444 nodes_copied_in_new_space_ = 0; |
445 nodes_promoted_ = 0; | 445 nodes_promoted_ = 0; |
446 | 446 |
447 UpdateMaximumCommitted(); | 447 UpdateMaximumCommitted(); |
448 | 448 |
449 #ifdef DEBUG | 449 #ifdef DEBUG |
450 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | 450 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); |
451 | 451 |
452 if (FLAG_gc_verbose) Print(); | 452 if (FLAG_gc_verbose) Print(); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
512 const HashMap& local_pretenuring_feedback) { | 512 const HashMap& local_pretenuring_feedback) { |
513 AllocationSite* site = nullptr; | 513 AllocationSite* site = nullptr; |
514 for (HashMap::Entry* local_entry = local_pretenuring_feedback.Start(); | 514 for (HashMap::Entry* local_entry = local_pretenuring_feedback.Start(); |
515 local_entry != nullptr; | 515 local_entry != nullptr; |
516 local_entry = local_pretenuring_feedback.Next(local_entry)) { | 516 local_entry = local_pretenuring_feedback.Next(local_entry)) { |
517 site = reinterpret_cast<AllocationSite*>(local_entry->key); | 517 site = reinterpret_cast<AllocationSite*>(local_entry->key); |
518 MapWord map_word = site->map_word(); | 518 MapWord map_word = site->map_word(); |
519 if (map_word.IsForwardingAddress()) { | 519 if (map_word.IsForwardingAddress()) { |
520 site = AllocationSite::cast(map_word.ToForwardingAddress()); | 520 site = AllocationSite::cast(map_word.ToForwardingAddress()); |
521 } | 521 } |
522 DCHECK(site->IsAllocationSite()); | 522 |
| 523 // We have not validated the allocation site yet, since we have not |
| 524 // dereferenced the site during collecting information. |
| 525 // This is an inlined check of AllocationMemento::IsValid. |
| 526 if (!site->IsAllocationSite() || site->IsZombie()) continue; |
| 527 |
523 int value = | 528 int value = |
524 static_cast<int>(reinterpret_cast<intptr_t>(local_entry->value)); | 529 static_cast<int>(reinterpret_cast<intptr_t>(local_entry->value)); |
525 DCHECK_GT(value, 0); | 530 DCHECK_GT(value, 0); |
526 | 531 |
527 { | 532 if (site->IncrementMementoFoundCount(value)) { |
528 // TODO(mlippautz): For parallel processing we need synchronization here. | 533 global_pretenuring_feedback_->LookupOrInsert( |
529 if (site->IncrementMementoFoundCount(value)) { | 534 site, static_cast<uint32_t>(bit_cast<uintptr_t>(site))); |
530 global_pretenuring_feedback_->LookupOrInsert( | |
531 site, static_cast<uint32_t>(bit_cast<uintptr_t>(site))); | |
532 } | |
533 } | 535 } |
534 } | 536 } |
535 } | 537 } |
536 | 538 |
537 | 539 |
538 class Heap::PretenuringScope { | 540 class Heap::PretenuringScope { |
539 public: | 541 public: |
540 explicit PretenuringScope(Heap* heap) : heap_(heap) { | 542 explicit PretenuringScope(Heap* heap) : heap_(heap) { |
541 heap_->global_pretenuring_feedback_ = | 543 heap_->global_pretenuring_feedback_ = |
542 new HashMap(HashMap::PointersMatch, kInitialFeedbackCapacity); | 544 new HashMap(HashMap::PointersMatch, kInitialFeedbackCapacity); |
(...skipping 684 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1227 NormalizedMapCache::cast(cache)->Clear(); | 1229 NormalizedMapCache::cast(cache)->Clear(); |
1228 } | 1230 } |
1229 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 1231 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
1230 } | 1232 } |
1231 } | 1233 } |
1232 | 1234 |
1233 | 1235 |
1234 void Heap::UpdateSurvivalStatistics(int start_new_space_size) { | 1236 void Heap::UpdateSurvivalStatistics(int start_new_space_size) { |
1235 if (start_new_space_size == 0) return; | 1237 if (start_new_space_size == 0) return; |
1236 | 1238 |
1237 promotion_ratio_ = (static_cast<double>(promoted_objects_size_) / | 1239 promotion_ratio_ = (static_cast<double>(promoted_objects_size()) / |
1238 static_cast<double>(start_new_space_size) * 100); | 1240 static_cast<double>(start_new_space_size) * 100); |
1239 | 1241 |
1240 if (previous_semi_space_copied_object_size_ > 0) { | 1242 if (previous_semi_space_copied_object_size_ > 0) { |
1241 promotion_rate_ = | 1243 promotion_rate_ = |
1242 (static_cast<double>(promoted_objects_size_) / | 1244 (static_cast<double>(promoted_objects_size()) / |
1243 static_cast<double>(previous_semi_space_copied_object_size_) * 100); | 1245 static_cast<double>(previous_semi_space_copied_object_size_) * 100); |
1244 } else { | 1246 } else { |
1245 promotion_rate_ = 0; | 1247 promotion_rate_ = 0; |
1246 } | 1248 } |
1247 | 1249 |
1248 semi_space_copied_rate_ = | 1250 semi_space_copied_rate_ = |
1249 (static_cast<double>(semi_space_copied_object_size_) / | 1251 (static_cast<double>(semi_space_copied_object_size()) / |
1250 static_cast<double>(start_new_space_size) * 100); | 1252 static_cast<double>(start_new_space_size) * 100); |
1251 | 1253 |
1252 double survival_rate = promotion_ratio_ + semi_space_copied_rate_; | 1254 double survival_rate = promotion_ratio_ + semi_space_copied_rate_; |
1253 tracer()->AddSurvivalRatio(survival_rate); | 1255 tracer()->AddSurvivalRatio(survival_rate); |
1254 if (survival_rate > kYoungSurvivalRateHighThreshold) { | 1256 if (survival_rate > kYoungSurvivalRateHighThreshold) { |
1255 high_survival_rate_period_length_++; | 1257 high_survival_rate_period_length_++; |
1256 } else { | 1258 } else { |
1257 high_survival_rate_period_length_ = 0; | 1259 high_survival_rate_period_length_ = 0; |
1258 } | 1260 } |
1259 } | 1261 } |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1302 | 1304 |
1303 if (collector == MARK_COMPACTOR) { | 1305 if (collector == MARK_COMPACTOR) { |
1304 UpdateOldGenerationAllocationCounter(); | 1306 UpdateOldGenerationAllocationCounter(); |
1305 // Perform mark-sweep with optional compaction. | 1307 // Perform mark-sweep with optional compaction. |
1306 MarkCompact(); | 1308 MarkCompact(); |
1307 old_gen_exhausted_ = false; | 1309 old_gen_exhausted_ = false; |
1308 old_generation_size_configured_ = true; | 1310 old_generation_size_configured_ = true; |
1309 // This should be updated before PostGarbageCollectionProcessing, which | 1311 // This should be updated before PostGarbageCollectionProcessing, which |
1310 // can cause another GC. Take into account the objects promoted during GC. | 1312 // can cause another GC. Take into account the objects promoted during GC. |
1311 old_generation_allocation_counter_ += | 1313 old_generation_allocation_counter_ += |
1312 static_cast<size_t>(promoted_objects_size_); | 1314 static_cast<size_t>(promoted_objects_size()); |
1313 old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects(); | 1315 old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects(); |
1314 } else { | 1316 } else { |
1315 Scavenge(); | 1317 Scavenge(); |
1316 } | 1318 } |
1317 | 1319 |
1318 ProcessPretenuringFeedback(); | 1320 ProcessPretenuringFeedback(); |
1319 } | 1321 } |
1320 | 1322 |
1321 UpdateSurvivalStatistics(start_new_space_size); | 1323 UpdateSurvivalStatistics(start_new_space_size); |
1322 ConfigureInitialOldGenerationSize(); | 1324 ConfigureInitialOldGenerationSize(); |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1506 for (HeapObject* object = code_it.Next(); object != NULL; | 1508 for (HeapObject* object = code_it.Next(); object != NULL; |
1507 object = code_it.Next()) | 1509 object = code_it.Next()) |
1508 object->Iterate(&v); | 1510 object->Iterate(&v); |
1509 } | 1511 } |
1510 #endif // VERIFY_HEAP | 1512 #endif // VERIFY_HEAP |
1511 | 1513 |
1512 | 1514 |
1513 void Heap::CheckNewSpaceExpansionCriteria() { | 1515 void Heap::CheckNewSpaceExpansionCriteria() { |
1514 if (FLAG_experimental_new_space_growth_heuristic) { | 1516 if (FLAG_experimental_new_space_growth_heuristic) { |
1515 if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && | 1517 if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && |
1516 survived_last_scavenge_ * 100 / new_space_.TotalCapacity() >= 10) { | 1518 survived_last_scavenge() * 100 / new_space_.TotalCapacity() >= 10) { |
1517 // Grow the size of new space if there is room to grow, and more than 10% | 1519 // Grow the size of new space if there is room to grow, and more than 10% |
1518 // have survived the last scavenge. | 1520 // have survived the last scavenge. |
1519 new_space_.Grow(); | 1521 new_space_.Grow(); |
1520 survived_since_last_expansion_ = 0; | 1522 survived_since_last_expansion_.SetValue(0); |
1521 } | 1523 } |
1522 } else if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && | 1524 } else if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && |
1523 survived_since_last_expansion_ > new_space_.TotalCapacity()) { | 1525 survived_since_last_expansion() > new_space_.TotalCapacity()) { |
1524 // Grow the size of new space if there is room to grow, and enough data | 1526 // Grow the size of new space if there is room to grow, and enough data |
1525 // has survived scavenge since the last expansion. | 1527 // has survived scavenge since the last expansion. |
1526 new_space_.Grow(); | 1528 new_space_.Grow(); |
1527 survived_since_last_expansion_ = 0; | 1529 survived_since_last_expansion_.SetValue(0); |
1528 } | 1530 } |
1529 } | 1531 } |
1530 | 1532 |
1531 | 1533 |
1532 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1534 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
1533 return heap->InNewSpace(*p) && | 1535 return heap->InNewSpace(*p) && |
1534 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1536 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
1535 } | 1537 } |
1536 | 1538 |
1537 | 1539 |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1754 ProcessYoungWeakReferences(&weak_object_retainer); | 1756 ProcessYoungWeakReferences(&weak_object_retainer); |
1755 | 1757 |
1756 DCHECK(new_space_front == new_space_.top()); | 1758 DCHECK(new_space_front == new_space_.top()); |
1757 | 1759 |
1758 // Set age mark. | 1760 // Set age mark. |
1759 new_space_.set_age_mark(new_space_.top()); | 1761 new_space_.set_age_mark(new_space_.top()); |
1760 | 1762 |
1761 array_buffer_tracker()->FreeDead(true); | 1763 array_buffer_tracker()->FreeDead(true); |
1762 | 1764 |
1763 // Update how much has survived scavenge. | 1765 // Update how much has survived scavenge. |
1764 IncrementYoungSurvivorsCounter(static_cast<int>( | 1766 IncrementYoungSurvivorsCounter( |
1765 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); | 1767 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()); |
1766 | 1768 |
1767 LOG(isolate_, ResourceEvent("scavenge", "end")); | 1769 LOG(isolate_, ResourceEvent("scavenge", "end")); |
1768 | 1770 |
1769 gc_state_ = NOT_IN_GC; | 1771 gc_state_ = NOT_IN_GC; |
1770 } | 1772 } |
1771 | 1773 |
1772 | 1774 |
1773 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, | 1775 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
1774 Object** p) { | 1776 Object** p) { |
1775 MapWord first_word = HeapObject::cast(*p)->map_word(); | 1777 MapWord first_word = HeapObject::cast(*p)->map_word(); |
(...skipping 4461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6237 } | 6239 } |
6238 | 6240 |
6239 | 6241 |
6240 // static | 6242 // static |
6241 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6243 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6242 return StaticVisitorBase::GetVisitorId(map); | 6244 return StaticVisitorBase::GetVisitorId(map); |
6243 } | 6245 } |
6244 | 6246 |
6245 } // namespace internal | 6247 } // namespace internal |
6246 } // namespace v8 | 6248 } // namespace v8 |
OLD | NEW |