OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
584 isolate_->compilation_cache()->Clear(); | 584 isolate_->compilation_cache()->Clear(); |
585 const int kMaxNumberOfAttempts = 7; | 585 const int kMaxNumberOfAttempts = 7; |
586 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 586 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
587 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL)) { | 587 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL)) { |
588 break; | 588 break; |
589 } | 589 } |
590 } | 590 } |
591 mark_compact_collector()->SetFlags(kNoGCFlags); | 591 mark_compact_collector()->SetFlags(kNoGCFlags); |
592 new_space_.Shrink(); | 592 new_space_.Shrink(); |
593 UncommitFromSpace(); | 593 UncommitFromSpace(); |
594 Shrink(); | |
595 incremental_marking()->UncommitMarkingDeque(); | 594 incremental_marking()->UncommitMarkingDeque(); |
596 } | 595 } |
597 | 596 |
598 | 597 |
599 bool Heap::CollectGarbage(AllocationSpace space, | 598 bool Heap::CollectGarbage(AllocationSpace space, |
600 GarbageCollector collector, | 599 GarbageCollector collector, |
601 const char* gc_reason, | 600 const char* gc_reason, |
602 const char* collector_reason) { | 601 const char* collector_reason) { |
603 // The VM is in the GC state until exiting this function. | 602 // The VM is in the GC state until exiting this function. |
604 VMState state(isolate_, GC); | 603 VMState state(isolate_, GC); |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
782 // Failed to reserve the space after several attempts. | 781 // Failed to reserve the space after several attempts. |
783 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); | 782 V8::FatalProcessOutOfMemory("Heap::ReserveSpace"); |
784 } | 783 } |
785 } | 784 } |
786 | 785 |
787 | 786 |
788 void Heap::EnsureFromSpaceIsCommitted() { | 787 void Heap::EnsureFromSpaceIsCommitted() { |
789 if (new_space_.CommitFromSpaceIfNeeded()) return; | 788 if (new_space_.CommitFromSpaceIfNeeded()) return; |
790 | 789 |
791 // Committing memory to from space failed. | 790 // Committing memory to from space failed. |
792 // Try shrinking and try again. | |
793 Shrink(); | |
794 if (new_space_.CommitFromSpaceIfNeeded()) return; | |
795 | |
796 // Committing memory to from space failed again. | |
797 // Memory is exhausted and we will die. | 791 // Memory is exhausted and we will die. |
798 V8::FatalProcessOutOfMemory("Committing semi space failed."); | 792 V8::FatalProcessOutOfMemory("Committing semi space failed."); |
799 } | 793 } |
800 | 794 |
801 | 795 |
802 void Heap::ClearJSFunctionResultCaches() { | 796 void Heap::ClearJSFunctionResultCaches() { |
803 if (isolate_->bootstrapper()->IsActive()) return; | 797 if (isolate_->bootstrapper()->IsActive()) return; |
804 | 798 |
805 Object* context = native_contexts_list_; | 799 Object* context = native_contexts_list_; |
806 while (!context->IsUndefined()) { | 800 while (!context->IsUndefined()) { |
807 // Get the caches for this context. GC can happen when the context | 801 // Get the caches for this context. GC can happen when the context |
808 // is not fully initialized, so the caches can be undefined. | 802 // is not fully initialized, so the caches can be undefined. |
809 Object* caches_or_undefined = | 803 Object* caches_or_undefined = |
810 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); | 804 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); |
811 if (!caches_or_undefined->IsUndefined()) { | 805 if (!caches_or_undefined->IsUndefined()) { |
812 FixedArray* caches = FixedArray::cast(caches_or_undefined); | 806 FixedArray* caches = FixedArray::cast(caches_or_undefined); |
813 // Clear the caches: | 807 // Clear the caches: |
814 int length = caches->length(); | 808 int length = caches->length(); |
815 for (int i = 0; i < length; i++) { | 809 for (int i = 0; i < length; i++) { |
816 JSFunctionResultCache::cast(caches->get(i))->Clear(); | 810 JSFunctionResultCache::cast(caches->get(i))->Clear(); |
817 } | 811 } |
818 } | 812 } |
819 // Get the next context: | 813 // Get the next context: |
820 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 814 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
821 } | 815 } |
822 } | 816 } |
823 | 817 |
824 | 818 |
825 | |
826 void Heap::ClearNormalizedMapCaches() { | 819 void Heap::ClearNormalizedMapCaches() { |
827 if (isolate_->bootstrapper()->IsActive() && | 820 if (isolate_->bootstrapper()->IsActive() && |
828 !incremental_marking()->IsMarking()) { | 821 !incremental_marking()->IsMarking()) { |
829 return; | 822 return; |
830 } | 823 } |
831 | 824 |
832 Object* context = native_contexts_list_; | 825 Object* context = native_contexts_list_; |
833 while (!context->IsUndefined()) { | 826 while (!context->IsUndefined()) { |
834 // GC can happen when the context is not fully initialized, | 827 // GC can happen when the context is not fully initialized, |
835 // so the cache can be undefined. | 828 // so the cache can be undefined. |
(...skipping 5571 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6407 | 6400 |
6408 store_buffer()->TearDown(); | 6401 store_buffer()->TearDown(); |
6409 incremental_marking()->TearDown(); | 6402 incremental_marking()->TearDown(); |
6410 | 6403 |
6411 isolate_->memory_allocator()->TearDown(); | 6404 isolate_->memory_allocator()->TearDown(); |
6412 | 6405 |
6413 delete relocation_mutex_; | 6406 delete relocation_mutex_; |
6414 } | 6407 } |
6415 | 6408 |
6416 | 6409 |
6417 void Heap::Shrink() { | |
6418 // Try to shrink all paged spaces. | |
6419 PagedSpaces spaces(this); | |
6420 for (PagedSpace* space = spaces.next(); | |
6421 space != NULL; | |
6422 space = spaces.next()) { | |
6423 space->ReleaseAllUnusedPages(); | |
6424 } | |
6425 } | |
6426 | |
6427 | |
6428 void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) { | 6410 void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) { |
6429 ASSERT(callback != NULL); | 6411 ASSERT(callback != NULL); |
6430 GCPrologueCallbackPair pair(callback, gc_type); | 6412 GCPrologueCallbackPair pair(callback, gc_type); |
6431 ASSERT(!gc_prologue_callbacks_.Contains(pair)); | 6413 ASSERT(!gc_prologue_callbacks_.Contains(pair)); |
6432 return gc_prologue_callbacks_.Add(pair); | 6414 return gc_prologue_callbacks_.Add(pair); |
6433 } | 6415 } |
6434 | 6416 |
6435 | 6417 |
6436 void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) { | 6418 void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) { |
6437 ASSERT(callback != NULL); | 6419 ASSERT(callback != NULL); |
(...skipping 1098 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7536 static_cast<int>(object_sizes_last_time_[index])); | 7518 static_cast<int>(object_sizes_last_time_[index])); |
7537 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7519 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
7538 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7520 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7539 | 7521 |
7540 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7522 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7541 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7523 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7542 ClearObjectStats(); | 7524 ClearObjectStats(); |
7543 } | 7525 } |
7544 | 7526 |
7545 } } // namespace v8::internal | 7527 } } // namespace v8::internal |
OLD | NEW |