Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(115)

Side by Side Diff: src/heap.cc

Issue 4295004: Attempt to collect more garbage before panicking with out of memory. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing various English usage concerns Created 10 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
422 void Heap::CollectAllGarbage(bool force_compaction) { 422 void Heap::CollectAllGarbage(bool force_compaction) {
423 // Since we are ignoring the return value, the exact choice of space does 423 // Since we are ignoring the return value, the exact choice of space does
424 // not matter, so long as we do not specify NEW_SPACE, which would not 424 // not matter, so long as we do not specify NEW_SPACE, which would not
425 // cause a full GC. 425 // cause a full GC.
426 MarkCompactCollector::SetForceCompaction(force_compaction); 426 MarkCompactCollector::SetForceCompaction(force_compaction);
427 CollectGarbage(OLD_POINTER_SPACE); 427 CollectGarbage(OLD_POINTER_SPACE);
428 MarkCompactCollector::SetForceCompaction(false); 428 MarkCompactCollector::SetForceCompaction(false);
429 } 429 }
430 430
431 431
432 void Heap::CollectGarbage(AllocationSpace space) { 432 void Heap::CollectAllAvailableGarbage() {
433 // Since we are ignoring the return value, the exact choice of space does
434 // not matter, so long as we do not specify NEW_SPACE, which would not
435 // cause a full GC.
436 MarkCompactCollector::SetForceCompaction(true);
437
438 // Major GC would invoke weak handle callbacks on weakly reachable
439 // handles, but won't collect weakly reachable objects until next
440 // major GC. Therefore if we collect aggressively and weak handle callback
441 // has been invoked, we rerun major GC to release objects which become
442 // garbage.
443 // Note: as weak callbacks can execute arbitrary code, we cannot
444 // hope that eventually there will be no weak callbacks invocations.
445 // Therefore stop recollecting after several attempts.
446 const int kMaxNumberOfAttempts = 7;
447 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
448 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
449 break;
450 }
451 }
452 MarkCompactCollector::SetForceCompaction(false);
453 }
454
455
456 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
433 // The VM is in the GC state until exiting this function. 457 // The VM is in the GC state until exiting this function.
434 VMState state(GC); 458 VMState state(GC);
435 459
436 #ifdef DEBUG 460 #ifdef DEBUG
437 // Reset the allocation timeout to the GC interval, but make sure to 461 // Reset the allocation timeout to the GC interval, but make sure to
438 // allow at least a few allocations after a collection. The reason 462 // allow at least a few allocations after a collection. The reason
439 // for this is that we have a lot of allocation sequences and we 463 // for this is that we have a lot of allocation sequences and we
440 // assume that a garbage collection will allow the subsequent 464 // assume that a garbage collection will allow the subsequent
441 // allocation attempts to go through. 465 // allocation attempts to go through.
442 allocation_timeout_ = Max(6, FLAG_gc_interval); 466 allocation_timeout_ = Max(6, FLAG_gc_interval);
443 #endif 467 #endif
444 468
469 bool next_gc_likely_to_collect_more = false;
470
445 { GCTracer tracer; 471 { GCTracer tracer;
446 GarbageCollectionPrologue(); 472 GarbageCollectionPrologue();
447 // The GC count was incremented in the prologue. Tell the tracer about 473 // The GC count was incremented in the prologue. Tell the tracer about
448 // it. 474 // it.
449 tracer.set_gc_count(gc_count_); 475 tracer.set_gc_count(gc_count_);
450 476
451 GarbageCollector collector = SelectGarbageCollector(space);
452 // Tell the tracer which collector we've selected. 477 // Tell the tracer which collector we've selected.
453 tracer.set_collector(collector); 478 tracer.set_collector(collector);
454 479
455 HistogramTimer* rate = (collector == SCAVENGER) 480 HistogramTimer* rate = (collector == SCAVENGER)
456 ? &Counters::gc_scavenger 481 ? &Counters::gc_scavenger
457 : &Counters::gc_compactor; 482 : &Counters::gc_compactor;
458 rate->Start(); 483 rate->Start();
459 PerformGarbageCollection(collector, &tracer); 484 next_gc_likely_to_collect_more =
485 PerformGarbageCollection(collector, &tracer);
460 rate->Stop(); 486 rate->Stop();
461 487
462 GarbageCollectionEpilogue(); 488 GarbageCollectionEpilogue();
463 } 489 }
464 490
465 491
466 #ifdef ENABLE_LOGGING_AND_PROFILING 492 #ifdef ENABLE_LOGGING_AND_PROFILING
467 if (FLAG_log_gc) HeapProfiler::WriteSample(); 493 if (FLAG_log_gc) HeapProfiler::WriteSample();
468 if (CpuProfiler::is_profiling()) CpuProfiler::ProcessMovedFunctions(); 494 if (CpuProfiler::is_profiling()) CpuProfiler::ProcessMovedFunctions();
469 #endif 495 #endif
496
497 return next_gc_likely_to_collect_more;
470 } 498 }
471 499
472 500
473 void Heap::PerformScavenge() { 501 void Heap::PerformScavenge() {
474 GCTracer tracer; 502 GCTracer tracer;
475 PerformGarbageCollection(SCAVENGER, &tracer); 503 PerformGarbageCollection(SCAVENGER, &tracer);
476 } 504 }
477 505
478 506
479 #ifdef DEBUG 507 #ifdef DEBUG
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
646 set_survival_rate_trend(DECREASING); 674 set_survival_rate_trend(DECREASING);
647 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { 675 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
648 set_survival_rate_trend(INCREASING); 676 set_survival_rate_trend(INCREASING);
649 } else { 677 } else {
650 set_survival_rate_trend(STABLE); 678 set_survival_rate_trend(STABLE);
651 } 679 }
652 680
653 survival_rate_ = survival_rate; 681 survival_rate_ = survival_rate;
654 } 682 }
655 683
656 void Heap::PerformGarbageCollection(GarbageCollector collector, 684 bool Heap::PerformGarbageCollection(GarbageCollector collector,
657 GCTracer* tracer) { 685 GCTracer* tracer) {
686 bool next_gc_likely_to_collect_more = false;
687
658 if (collector != SCAVENGER) { 688 if (collector != SCAVENGER) {
659 PROFILE(CodeMovingGCEvent()); 689 PROFILE(CodeMovingGCEvent());
660 } 690 }
661 691
662 VerifySymbolTable(); 692 VerifySymbolTable();
663 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 693 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
664 ASSERT(!allocation_allowed_); 694 ASSERT(!allocation_allowed_);
665 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 695 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
666 global_gc_prologue_callback_(); 696 global_gc_prologue_callback_();
667 } 697 }
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
713 tracer_ = NULL; 743 tracer_ = NULL;
714 744
715 UpdateSurvivalRateTrend(start_new_space_size); 745 UpdateSurvivalRateTrend(start_new_space_size);
716 } 746 }
717 747
718 Counters::objs_since_last_young.Set(0); 748 Counters::objs_since_last_young.Set(0);
719 749
720 if (collector == MARK_COMPACTOR) { 750 if (collector == MARK_COMPACTOR) {
721 DisableAssertNoAllocation allow_allocation; 751 DisableAssertNoAllocation allow_allocation;
722 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 752 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
723 GlobalHandles::PostGarbageCollectionProcessing(); 753 next_gc_likely_to_collect_more =
754 GlobalHandles::PostGarbageCollectionProcessing();
724 } 755 }
725 756
726 // Update relocatables. 757 // Update relocatables.
727 Relocatable::PostGarbageCollectionProcessing(); 758 Relocatable::PostGarbageCollectionProcessing();
728 759
729 if (collector == MARK_COMPACTOR) { 760 if (collector == MARK_COMPACTOR) {
730 // Register the amount of external allocated memory. 761 // Register the amount of external allocated memory.
731 amount_of_external_allocated_memory_at_last_global_gc_ = 762 amount_of_external_allocated_memory_at_last_global_gc_ =
732 amount_of_external_allocated_memory_; 763 amount_of_external_allocated_memory_;
733 } 764 }
734 765
735 GCCallbackFlags callback_flags = tracer->is_compacting() 766 GCCallbackFlags callback_flags = tracer->is_compacting()
736 ? kGCCallbackFlagCompacted 767 ? kGCCallbackFlagCompacted
737 : kNoGCCallbackFlags; 768 : kNoGCCallbackFlags;
738 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { 769 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
739 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { 770 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
740 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); 771 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
741 } 772 }
742 } 773 }
743 774
744 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 775 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
745 ASSERT(!allocation_allowed_); 776 ASSERT(!allocation_allowed_);
746 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 777 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
747 global_gc_epilogue_callback_(); 778 global_gc_epilogue_callback_();
748 } 779 }
749 VerifySymbolTable(); 780 VerifySymbolTable();
781
782 return next_gc_likely_to_collect_more;
750 } 783 }
751 784
752 785
753 void Heap::MarkCompact(GCTracer* tracer) { 786 void Heap::MarkCompact(GCTracer* tracer) {
754 gc_state_ = MARK_COMPACT; 787 gc_state_ = MARK_COMPACT;
755 LOG(ResourceEvent("markcompact", "begin")); 788 LOG(ResourceEvent("markcompact", "begin"));
756 789
757 MarkCompactCollector::Prepare(tracer); 790 MarkCompactCollector::Prepare(tracer);
758 791
759 bool is_compacting = MarkCompactCollector::IsCompacting(); 792 bool is_compacting = MarkCompactCollector::IsCompacting();
(...skipping 4472 matching lines...) Expand 10 before | Expand all | Expand 10 after
5232 void ExternalStringTable::TearDown() { 5265 void ExternalStringTable::TearDown() {
5233 new_space_strings_.Free(); 5266 new_space_strings_.Free();
5234 old_space_strings_.Free(); 5267 old_space_strings_.Free();
5235 } 5268 }
5236 5269
5237 5270
5238 List<Object*> ExternalStringTable::new_space_strings_; 5271 List<Object*> ExternalStringTable::new_space_strings_;
5239 List<Object*> ExternalStringTable::old_space_strings_; 5272 List<Object*> ExternalStringTable::old_space_strings_;
5240 5273
5241 } } // namespace v8::internal 5274 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698