Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(24)

Side by Side Diff: src/heap.cc

Issue 3421009: Revision 2.4.4.... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 10 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 #endif 48 #endif
49 49
50 50
51 namespace v8 { 51 namespace v8 {
52 namespace internal { 52 namespace internal {
53 53
54 54
55 String* Heap::hidden_symbol_; 55 String* Heap::hidden_symbol_;
56 Object* Heap::roots_[Heap::kRootListLength]; 56 Object* Heap::roots_[Heap::kRootListLength];
57 57
58
59 NewSpace Heap::new_space_; 58 NewSpace Heap::new_space_;
60 OldSpace* Heap::old_pointer_space_ = NULL; 59 OldSpace* Heap::old_pointer_space_ = NULL;
61 OldSpace* Heap::old_data_space_ = NULL; 60 OldSpace* Heap::old_data_space_ = NULL;
62 OldSpace* Heap::code_space_ = NULL; 61 OldSpace* Heap::code_space_ = NULL;
63 MapSpace* Heap::map_space_ = NULL; 62 MapSpace* Heap::map_space_ = NULL;
64 CellSpace* Heap::cell_space_ = NULL; 63 CellSpace* Heap::cell_space_ = NULL;
65 LargeObjectSpace* Heap::lo_space_ = NULL; 64 LargeObjectSpace* Heap::lo_space_ = NULL;
66 65
67 static const int kMinimumPromotionLimit = 2*MB;
68 static const int kMinimumAllocationLimit = 8*MB;
69
70 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit; 66 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
71 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit; 67 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
72 68
73 int Heap::old_gen_exhausted_ = false; 69 int Heap::old_gen_exhausted_ = false;
74 70
75 int Heap::amount_of_external_allocated_memory_ = 0; 71 int Heap::amount_of_external_allocated_memory_ = 0;
76 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0; 72 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
77 73
78 // semispace_size_ should be a power of 2 and old_generation_size_ should be 74 // semispace_size_ should be a power of 2 and old_generation_size_ should be
79 // a multiple of Page::kPageSize. 75 // a multiple of Page::kPageSize.
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after
398 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); 394 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
399 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 395 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
400 ReportStatisticsAfterGC(); 396 ReportStatisticsAfterGC();
401 #endif 397 #endif
402 #ifdef ENABLE_DEBUGGER_SUPPORT 398 #ifdef ENABLE_DEBUGGER_SUPPORT
403 Debug::AfterGarbageCollection(); 399 Debug::AfterGarbageCollection();
404 #endif 400 #endif
405 } 401 }
406 402
407 403
408 void Heap::CollectAllGarbage(bool force_compaction) { 404 void Heap::CollectAllGarbage(bool force_compaction,
405 CollectionPolicy collectionPolicy) {
409 // Since we are ignoring the return value, the exact choice of space does 406 // Since we are ignoring the return value, the exact choice of space does
410 // not matter, so long as we do not specify NEW_SPACE, which would not 407 // not matter, so long as we do not specify NEW_SPACE, which would not
411 // cause a full GC. 408 // cause a full GC.
412 MarkCompactCollector::SetForceCompaction(force_compaction); 409 MarkCompactCollector::SetForceCompaction(force_compaction);
413 CollectGarbage(0, OLD_POINTER_SPACE); 410 CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy);
414 MarkCompactCollector::SetForceCompaction(false); 411 MarkCompactCollector::SetForceCompaction(false);
415 } 412 }
416 413
417 414
418 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { 415 void Heap::CollectAllAvailableGarbage() {
416 CompilationCache::Clear();
417 CollectAllGarbage(true, AGGRESSIVE);
418 }
419
420
421 bool Heap::CollectGarbage(int requested_size,
422 AllocationSpace space,
423 CollectionPolicy collectionPolicy) {
419 // The VM is in the GC state until exiting this function. 424 // The VM is in the GC state until exiting this function.
420 VMState state(GC); 425 VMState state(GC);
421 426
422 #ifdef DEBUG 427 #ifdef DEBUG
423 // Reset the allocation timeout to the GC interval, but make sure to 428 // Reset the allocation timeout to the GC interval, but make sure to
424 // allow at least a few allocations after a collection. The reason 429 // allow at least a few allocations after a collection. The reason
425 // for this is that we have a lot of allocation sequences and we 430 // for this is that we have a lot of allocation sequences and we
426 // assume that a garbage collection will allow the subsequent 431 // assume that a garbage collection will allow the subsequent
427 // allocation attempts to go through. 432 // allocation attempts to go through.
428 allocation_timeout_ = Max(6, FLAG_gc_interval); 433 allocation_timeout_ = Max(6, FLAG_gc_interval);
429 #endif 434 #endif
430 435
431 { GCTracer tracer; 436 { GCTracer tracer;
432 GarbageCollectionPrologue(); 437 GarbageCollectionPrologue();
433 // The GC count was incremented in the prologue. Tell the tracer about 438 // The GC count was incremented in the prologue. Tell the tracer about
434 // it. 439 // it.
435 tracer.set_gc_count(gc_count_); 440 tracer.set_gc_count(gc_count_);
436 441
437 GarbageCollector collector = SelectGarbageCollector(space); 442 GarbageCollector collector = SelectGarbageCollector(space);
438 // Tell the tracer which collector we've selected. 443 // Tell the tracer which collector we've selected.
439 tracer.set_collector(collector); 444 tracer.set_collector(collector);
440 445
441 HistogramTimer* rate = (collector == SCAVENGER) 446 HistogramTimer* rate = (collector == SCAVENGER)
442 ? &Counters::gc_scavenger 447 ? &Counters::gc_scavenger
443 : &Counters::gc_compactor; 448 : &Counters::gc_compactor;
444 rate->Start(); 449 rate->Start();
445 PerformGarbageCollection(space, collector, &tracer); 450 PerformGarbageCollection(collector, &tracer, collectionPolicy);
446 rate->Stop(); 451 rate->Stop();
447 452
448 GarbageCollectionEpilogue(); 453 GarbageCollectionEpilogue();
449 } 454 }
450 455
451 456
452 #ifdef ENABLE_LOGGING_AND_PROFILING 457 #ifdef ENABLE_LOGGING_AND_PROFILING
453 if (FLAG_log_gc) HeapProfiler::WriteSample(); 458 if (FLAG_log_gc) HeapProfiler::WriteSample();
454 #endif 459 #endif
455 460
(...skipping 12 matching lines...) Expand all
468 return cell_space_->Available() >= requested_size; 473 return cell_space_->Available() >= requested_size;
469 case LO_SPACE: 474 case LO_SPACE:
470 return lo_space_->Available() >= requested_size; 475 return lo_space_->Available() >= requested_size;
471 } 476 }
472 return false; 477 return false;
473 } 478 }
474 479
475 480
476 void Heap::PerformScavenge() { 481 void Heap::PerformScavenge() {
477 GCTracer tracer; 482 GCTracer tracer;
478 PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer); 483 PerformGarbageCollection(SCAVENGER, &tracer, NORMAL);
479 } 484 }
480 485
481 486
482 #ifdef DEBUG 487 #ifdef DEBUG
483 // Helper class for verifying the symbol table. 488 // Helper class for verifying the symbol table.
484 class SymbolTableVerifier : public ObjectVisitor { 489 class SymbolTableVerifier : public ObjectVisitor {
485 public: 490 public:
486 SymbolTableVerifier() { } 491 SymbolTableVerifier() { }
487 void VisitPointers(Object** start, Object** end) { 492 void VisitPointers(Object** start, Object** end) {
488 // Visit all HeapObject pointers in [start, end). 493 // Visit all HeapObject pointers in [start, end).
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
657 set_survival_rate_trend(DECREASING); 662 set_survival_rate_trend(DECREASING);
658 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { 663 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
659 set_survival_rate_trend(INCREASING); 664 set_survival_rate_trend(INCREASING);
660 } else { 665 } else {
661 set_survival_rate_trend(STABLE); 666 set_survival_rate_trend(STABLE);
662 } 667 }
663 668
664 survival_rate_ = survival_rate; 669 survival_rate_ = survival_rate;
665 } 670 }
666 671
667 void Heap::PerformGarbageCollection(AllocationSpace space, 672 void Heap::PerformGarbageCollection(GarbageCollector collector,
668 GarbageCollector collector, 673 GCTracer* tracer,
669 GCTracer* tracer) { 674 CollectionPolicy collectionPolicy) {
670 VerifySymbolTable(); 675 VerifySymbolTable();
671 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 676 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
672 ASSERT(!allocation_allowed_); 677 ASSERT(!allocation_allowed_);
673 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 678 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
674 global_gc_prologue_callback_(); 679 global_gc_prologue_callback_();
675 } 680 }
676 681
677 GCType gc_type = 682 GCType gc_type =
678 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 683 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
679 684
680 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { 685 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
681 if (gc_type & gc_prologue_callbacks_[i].gc_type) { 686 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
682 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags); 687 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
683 } 688 }
684 } 689 }
685 690
686 EnsureFromSpaceIsCommitted(); 691 EnsureFromSpaceIsCommitted();
687 692
688 int start_new_space_size = Heap::new_space()->Size(); 693 int start_new_space_size = Heap::new_space()->Size();
689 694
690 if (collector == MARK_COMPACTOR) { 695 if (collector == MARK_COMPACTOR) {
691 // Perform mark-sweep with optional compaction. 696 // Perform mark-sweep with optional compaction.
692 MarkCompact(tracer); 697 MarkCompact(tracer);
693 698
694 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() && 699 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
695 IsStableOrIncreasingSurvivalTrend(); 700 IsStableOrIncreasingSurvivalTrend();
696 701
697 UpdateSurvivalRateTrend(start_new_space_size); 702 UpdateSurvivalRateTrend(start_new_space_size);
698 703
699 int old_gen_size = PromotedSpaceSize(); 704 UpdateOldSpaceLimits();
700 old_gen_promotion_limit_ =
701 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
702 old_gen_allocation_limit_ =
703 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
704 705
705 if (high_survival_rate_during_scavenges && 706 // Major GC would invoke weak handle callbacks on weakly reachable
706 IsStableOrIncreasingSurvivalTrend()) { 707 // handles, but won't collect weakly reachable objects until next
707 // Stable high survival rates of young objects both during partial and 708 // major GC. Therefore if we collect aggressively and weak handle callback
708 // full collection indicate that mutator is either building or modifying 709 // has been invoked, we rerun major GC to release objects which become
709 // a structure with a long lifetime. 710 // garbage.
710 // In this case we aggressively raise old generation memory limits to 711 if (collectionPolicy == AGGRESSIVE) {
711 // postpone subsequent mark-sweep collection and thus trade memory 712 // Note: as weak callbacks can execute arbitrary code, we cannot
712 // space for the mutation speed. 713 // hope that eventually there will be no weak callbacks invocations.
713 old_gen_promotion_limit_ *= 2; 714 // Therefore stop recollecting after several attempts.
714 old_gen_allocation_limit_ *= 2; 715 const int kMaxNumberOfAttempts = 7;
716 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
717 { DisableAssertNoAllocation allow_allocation;
718 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
719 if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
720 }
721 MarkCompact(tracer);
722 // Weak handle callbacks can allocate data, so keep limits correct.
723 UpdateOldSpaceLimits();
724 }
725 } else {
726 if (high_survival_rate_during_scavenges &&
727 IsStableOrIncreasingSurvivalTrend()) {
728 // Stable high survival rates of young objects both during partial and
729 // full collection indicate that mutator is either building or modifying
730 // a structure with a long lifetime.
731 // In this case we aggressively raise old generation memory limits to
732 // postpone subsequent mark-sweep collection and thus trade memory
733 // space for the mutation speed.
734 old_gen_promotion_limit_ *= 2;
735 old_gen_allocation_limit_ *= 2;
736 }
715 } 737 }
716 738
717 old_gen_exhausted_ = false; 739 { DisableAssertNoAllocation allow_allocation;
740 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
741 GlobalHandles::PostGarbageCollectionProcessing();
742 }
718 } else { 743 } else {
719 tracer_ = tracer; 744 tracer_ = tracer;
720 Scavenge(); 745 Scavenge();
721 tracer_ = NULL; 746 tracer_ = NULL;
722 747
723 UpdateSurvivalRateTrend(start_new_space_size); 748 UpdateSurvivalRateTrend(start_new_space_size);
724 } 749 }
725 750
726 Counters::objs_since_last_young.Set(0); 751 Counters::objs_since_last_young.Set(0);
727 752
728 if (collector == MARK_COMPACTOR) {
729 DisableAssertNoAllocation allow_allocation;
730 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
731 GlobalHandles::PostGarbageCollectionProcessing();
732 }
733
734 // Update relocatables. 753 // Update relocatables.
735 Relocatable::PostGarbageCollectionProcessing(); 754 Relocatable::PostGarbageCollectionProcessing();
736 755
737 if (collector == MARK_COMPACTOR) { 756 if (collector == MARK_COMPACTOR) {
738 // Register the amount of external allocated memory. 757 // Register the amount of external allocated memory.
739 amount_of_external_allocated_memory_at_last_global_gc_ = 758 amount_of_external_allocated_memory_at_last_global_gc_ =
740 amount_of_external_allocated_memory_; 759 amount_of_external_allocated_memory_;
741 } 760 }
742 761
743 GCCallbackFlags callback_flags = tracer->is_compacting() 762 GCCallbackFlags callback_flags = tracer->is_compacting()
(...skipping 1083 matching lines...) Expand 10 before | Expand all | Expand 10 after
1827 obj = NumberDictionary::Allocate(64); 1846 obj = NumberDictionary::Allocate(64);
1828 if (obj->IsFailure()) return false; 1847 if (obj->IsFailure()) return false;
1829 set_non_monomorphic_cache(NumberDictionary::cast(obj)); 1848 set_non_monomorphic_cache(NumberDictionary::cast(obj));
1830 1849
1831 set_instanceof_cache_function(Smi::FromInt(0)); 1850 set_instanceof_cache_function(Smi::FromInt(0));
1832 set_instanceof_cache_map(Smi::FromInt(0)); 1851 set_instanceof_cache_map(Smi::FromInt(0));
1833 set_instanceof_cache_answer(Smi::FromInt(0)); 1852 set_instanceof_cache_answer(Smi::FromInt(0));
1834 1853
1835 CreateFixedStubs(); 1854 CreateFixedStubs();
1836 1855
1856 // Allocate the dictionary of intrinsic function names.
1857 obj = StringDictionary::Allocate(Runtime::kNumFunctions);
1858 if (obj->IsFailure()) return false;
1859 obj = Runtime::InitializeIntrinsicFunctionNames(obj);
1860 if (obj->IsFailure()) return false;
1861 set_intrinsic_function_names(StringDictionary::cast(obj));
1862
1837 if (InitializeNumberStringCache()->IsFailure()) return false; 1863 if (InitializeNumberStringCache()->IsFailure()) return false;
1838 1864
1839 // Allocate cache for single character ASCII strings. 1865 // Allocate cache for single character ASCII strings.
1840 obj = AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED); 1866 obj = AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
1841 if (obj->IsFailure()) return false; 1867 if (obj->IsFailure()) return false;
1842 set_single_character_string_cache(FixedArray::cast(obj)); 1868 set_single_character_string_cache(FixedArray::cast(obj));
1843 1869
1844 // Allocate cache for external strings pointing to native source code. 1870 // Allocate cache for external strings pointing to native source code.
1845 obj = AllocateFixedArray(Natives::GetBuiltinsCount()); 1871 obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1846 if (obj->IsFailure()) return false; 1872 if (obj->IsFailure()) return false;
(...skipping 3056 matching lines...) Expand 10 before | Expand all | Expand 10 after
4903 void ExternalStringTable::TearDown() { 4929 void ExternalStringTable::TearDown() {
4904 new_space_strings_.Free(); 4930 new_space_strings_.Free();
4905 old_space_strings_.Free(); 4931 old_space_strings_.Free();
4906 } 4932 }
4907 4933
4908 4934
4909 List<Object*> ExternalStringTable::new_space_strings_; 4935 List<Object*> ExternalStringTable::new_space_strings_;
4910 List<Object*> ExternalStringTable::old_space_strings_; 4936 List<Object*> ExternalStringTable::old_space_strings_;
4911 4937
4912 } } // namespace v8::internal 4938 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698