OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 753 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
764 } else { | 764 } else { |
765 tracer_ = tracer; | 765 tracer_ = tracer; |
766 Scavenge(); | 766 Scavenge(); |
767 tracer_ = NULL; | 767 tracer_ = NULL; |
768 | 768 |
769 UpdateSurvivalRateTrend(start_new_space_size); | 769 UpdateSurvivalRateTrend(start_new_space_size); |
770 } | 770 } |
771 | 771 |
772 isolate_->counters()->objs_since_last_young()->Set(0); | 772 isolate_->counters()->objs_since_last_young()->Set(0); |
773 | 773 |
774 if (collector == MARK_COMPACTOR) { | 774 { DisableAssertNoAllocation allow_allocation; |
775 DisableAssertNoAllocation allow_allocation; | |
776 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 775 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
777 next_gc_likely_to_collect_more = | 776 next_gc_likely_to_collect_more = |
778 isolate_->global_handles()->PostGarbageCollectionProcessing(); | 777 isolate_->global_handles()->PostGarbageCollectionProcessing(collector); |
779 } | 778 } |
780 | 779 |
781 // Update relocatables. | 780 // Update relocatables. |
782 Relocatable::PostGarbageCollectionProcessing(); | 781 Relocatable::PostGarbageCollectionProcessing(); |
783 | 782 |
784 if (collector == MARK_COMPACTOR) { | 783 if (collector == MARK_COMPACTOR) { |
785 // Register the amount of external allocated memory. | 784 // Register the amount of external allocated memory. |
786 amount_of_external_allocated_memory_at_last_global_gc_ = | 785 amount_of_external_allocated_memory_at_last_global_gc_ = |
787 amount_of_external_allocated_memory_; | 786 amount_of_external_allocated_memory_; |
788 } | 787 } |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
928 if (new_space_.Capacity() < new_space_.MaximumCapacity() && | 927 if (new_space_.Capacity() < new_space_.MaximumCapacity() && |
929 survived_since_last_expansion_ > new_space_.Capacity()) { | 928 survived_since_last_expansion_ > new_space_.Capacity()) { |
930 // Grow the size of new space if there is room to grow and enough | 929 // Grow the size of new space if there is room to grow and enough |
931 // data has survived scavenge since the last expansion. | 930 // data has survived scavenge since the last expansion. |
932 new_space_.Grow(); | 931 new_space_.Grow(); |
933 survived_since_last_expansion_ = 0; | 932 survived_since_last_expansion_ = 0; |
934 } | 933 } |
935 } | 934 } |
936 | 935 |
937 | 936 |
| 937 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
| 938 return heap->InNewSpace(*p) && |
| 939 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
| 940 } |
| 941 |
| 942 |
938 void Heap::Scavenge() { | 943 void Heap::Scavenge() { |
939 #ifdef DEBUG | 944 #ifdef DEBUG |
940 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); | 945 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); |
941 #endif | 946 #endif |
942 | 947 |
943 gc_state_ = SCAVENGE; | 948 gc_state_ = SCAVENGE; |
944 | 949 |
945 SwitchScavengingVisitorsTableIfProfilingWasEnabled(); | 950 SwitchScavengingVisitorsTableIfProfilingWasEnabled(); |
946 | 951 |
947 Page::FlipMeaningOfInvalidatedWatermarkFlag(this); | 952 Page::FlipMeaningOfInvalidatedWatermarkFlag(this); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1022 reinterpret_cast<Address>(cell) + | 1027 reinterpret_cast<Address>(cell) + |
1023 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 1028 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
1024 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 1029 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
1025 } | 1030 } |
1026 } | 1031 } |
1027 | 1032 |
1028 // Scavenge object reachable from the global contexts list directly. | 1033 // Scavenge object reachable from the global contexts list directly. |
1029 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); | 1034 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); |
1030 | 1035 |
1031 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1036 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1037 isolate_->global_handles()->IdentifyWeakIndependentHandles( |
| 1038 &IsUnscavengedHeapObject); |
| 1039 isolate_->global_handles()->IterateWeakIndependentRoots(&scavenge_visitor); |
| 1040 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1041 |
1032 | 1042 |
1033 UpdateNewSpaceReferencesInExternalStringTable( | 1043 UpdateNewSpaceReferencesInExternalStringTable( |
1034 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1044 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1035 | 1045 |
1036 LiveObjectList::UpdateReferencesForScavengeGC(); | 1046 LiveObjectList::UpdateReferencesForScavengeGC(); |
1037 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); | 1047 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); |
1038 | 1048 |
1039 ASSERT(new_space_front == new_space_.top()); | 1049 ASSERT(new_space_front == new_space_.top()); |
1040 | 1050 |
1041 is_safe_to_read_maps_ = true; | 1051 is_safe_to_read_maps_ = true; |
(...skipping 3443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4485 | 4495 |
4486 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 4496 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
4487 IterateStrongRoots(v, mode); | 4497 IterateStrongRoots(v, mode); |
4488 IterateWeakRoots(v, mode); | 4498 IterateWeakRoots(v, mode); |
4489 } | 4499 } |
4490 | 4500 |
4491 | 4501 |
4492 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { | 4502 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { |
4493 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); | 4503 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); |
4494 v->Synchronize("symbol_table"); | 4504 v->Synchronize("symbol_table"); |
4495 if (mode != VISIT_ALL_IN_SCAVENGE) { | 4505 if (mode != VISIT_ALL_IN_SCAVENGE && |
| 4506 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
4496 // Scavenge collections have special processing for this. | 4507 // Scavenge collections have special processing for this. |
4497 external_string_table_.Iterate(v); | 4508 external_string_table_.Iterate(v); |
4498 } | 4509 } |
4499 v->Synchronize("external_string_table"); | 4510 v->Synchronize("external_string_table"); |
4500 } | 4511 } |
4501 | 4512 |
4502 | 4513 |
4503 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { | 4514 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { |
4504 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); | 4515 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); |
4505 v->Synchronize("strong_root_list"); | 4516 v->Synchronize("strong_root_list"); |
(...skipping 15 matching lines...) Expand all Loading... |
4521 isolate_->compilation_cache()->Iterate(v); | 4532 isolate_->compilation_cache()->Iterate(v); |
4522 v->Synchronize("compilationcache"); | 4533 v->Synchronize("compilationcache"); |
4523 | 4534 |
4524 // Iterate over local handles in handle scopes. | 4535 // Iterate over local handles in handle scopes. |
4525 isolate_->handle_scope_implementer()->Iterate(v); | 4536 isolate_->handle_scope_implementer()->Iterate(v); |
4526 v->Synchronize("handlescope"); | 4537 v->Synchronize("handlescope"); |
4527 | 4538 |
4528 // Iterate over the builtin code objects and code stubs in the | 4539 // Iterate over the builtin code objects and code stubs in the |
4529 // heap. Note that it is not necessary to iterate over code objects | 4540 // heap. Note that it is not necessary to iterate over code objects |
4530 // on scavenge collections. | 4541 // on scavenge collections. |
4531 if (mode != VISIT_ALL_IN_SCAVENGE) { | 4542 if (mode != VISIT_ALL_IN_SCAVENGE && |
| 4543 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
4532 isolate_->builtins()->IterateBuiltins(v); | 4544 isolate_->builtins()->IterateBuiltins(v); |
4533 } | 4545 } |
4534 v->Synchronize("builtins"); | 4546 v->Synchronize("builtins"); |
4535 | 4547 |
4536 // Iterate over global handles. | 4548 // Iterate over global handles. |
4537 if (mode == VISIT_ONLY_STRONG) { | 4549 switch (mode) { |
4538 isolate_->global_handles()->IterateStrongRoots(v); | 4550 case VISIT_ONLY_STRONG: |
4539 } else { | 4551 isolate_->global_handles()->IterateStrongRoots(v); |
4540 isolate_->global_handles()->IterateAllRoots(v); | 4552 break; |
| 4553 case VISIT_ALL_IN_SCAVENGE: |
| 4554 isolate_->global_handles()->IterateStrongAndDependentRoots(v); |
| 4555 break; |
| 4556 case VISIT_ALL_IN_SWEEP_NEWSPACE: |
| 4557 case VISIT_ALL: |
| 4558 isolate_->global_handles()->IterateAllRoots(v); |
| 4559 break; |
4541 } | 4560 } |
4542 v->Synchronize("globalhandles"); | 4561 v->Synchronize("globalhandles"); |
4543 | 4562 |
4544 // Iterate over pointers being held by inactive threads. | 4563 // Iterate over pointers being held by inactive threads. |
4545 isolate_->thread_manager()->Iterate(v); | 4564 isolate_->thread_manager()->Iterate(v); |
4546 v->Synchronize("threadmanager"); | 4565 v->Synchronize("threadmanager"); |
4547 | 4566 |
4548 // Iterate over the pointers the Serialization/Deserialization code is | 4567 // Iterate over the pointers the Serialization/Deserialization code is |
4549 // holding. | 4568 // holding. |
4550 // During garbage collection this keeps the partial snapshot cache alive. | 4569 // During garbage collection this keeps the partial snapshot cache alive. |
(...skipping 1332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5883 } | 5902 } |
5884 | 5903 |
5885 | 5904 |
5886 void ExternalStringTable::TearDown() { | 5905 void ExternalStringTable::TearDown() { |
5887 new_space_strings_.Free(); | 5906 new_space_strings_.Free(); |
5888 old_space_strings_.Free(); | 5907 old_space_strings_.Free(); |
5889 } | 5908 } |
5890 | 5909 |
5891 | 5910 |
5892 } } // namespace v8::internal | 5911 } } // namespace v8::internal |
OLD | NEW |