Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 715 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 726 // | 726 // |
| 727 // There is guaranteed to be enough room at the top of the to space | 727 // There is guaranteed to be enough room at the top of the to space |
| 728 // for the addresses of promoted objects: every object promoted | 728 // for the addresses of promoted objects: every object promoted |
| 729 // frees up its size in bytes from the top of the new space, and | 729 // frees up its size in bytes from the top of the new space, and |
| 730 // objects are at least one pointer in size. | 730 // objects are at least one pointer in size. |
| 731 Address new_space_front = new_space_.ToSpaceLow(); | 731 Address new_space_front = new_space_.ToSpaceLow(); |
| 732 promotion_queue.Initialize(new_space_.ToSpaceHigh()); | 732 promotion_queue.Initialize(new_space_.ToSpaceHigh()); |
| 733 | 733 |
| 734 ScavengeVisitor scavenge_visitor; | 734 ScavengeVisitor scavenge_visitor; |
| 735 // Copy roots. | 735 // Copy roots. |
| 736 IterateRoots(&scavenge_visitor, VISIT_ALL); | 736 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
| 737 | 737 |
| 738 // Copy objects reachable from the old generation. By definition, | 738 // Copy objects reachable from the old generation. By definition, |
| 739 // there are no intergenerational pointers in code or data spaces. | 739 // there are no intergenerational pointers in code or data spaces. |
| 740 IterateRSet(old_pointer_space_, &ScavengePointer); | 740 IterateRSet(old_pointer_space_, &ScavengePointer); |
| 741 IterateRSet(map_space_, &ScavengePointer); | 741 IterateRSet(map_space_, &ScavengePointer); |
| 742 lo_space_->IterateRSet(&ScavengePointer); | 742 lo_space_->IterateRSet(&ScavengePointer); |
| 743 | 743 |
| 744 // Copy objects reachable from cells by scavenging cell values directly. | 744 // Copy objects reachable from cells by scavenging cell values directly. |
| 745 HeapObjectIterator cell_iterator(cell_space_); | 745 HeapObjectIterator cell_iterator(cell_space_); |
| 746 while (cell_iterator.has_next()) { | 746 while (cell_iterator.has_next()) { |
| 747 HeapObject* cell = cell_iterator.next(); | 747 HeapObject* cell = cell_iterator.next(); |
| 748 if (cell->IsJSGlobalPropertyCell()) { | 748 if (cell->IsJSGlobalPropertyCell()) { |
| 749 Address value_address = | 749 Address value_address = |
| 750 reinterpret_cast<Address>(cell) + | 750 reinterpret_cast<Address>(cell) + |
| 751 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 751 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
| 752 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 752 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 753 } | 753 } |
| 754 } | 754 } |
| 755 | 755 |
| 756 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | |
| 757 | |
| 758 ScavengeExternalStringTable(); | |
| 759 ASSERT(new_space_front == new_space_.top()); | |
| 760 | |
| 761 // Set age mark. | |
| 762 new_space_.set_age_mark(new_space_.top()); | |
| 763 | |
| 764 // Update how much has survived scavenge. | |
| 765 survived_since_last_expansion_ += | |
| 766 (PromotedSpaceSize() - survived_watermark) + new_space_.Size(); | |
| 767 | |
| 768 LOG(ResourceEvent("scavenge", "end")); | |
| 769 | |
| 770 gc_state_ = NOT_IN_GC; | |
| 771 } | |
| 772 | |
| 773 | |
| 774 void Heap::ScavengeExternalStringTable() { | |
| 775 ExternalStringTable::Verify(); | |
| 776 | |
| 777 if (ExternalStringTable::new_space_strings_.is_empty()) return; | |
| 778 | |
| 779 Object** start = &ExternalStringTable::new_space_strings_[0]; | |
| 780 Object** end = start + ExternalStringTable::new_space_strings_.length(); | |
| 781 Object** last = start; | |
| 782 | |
| 783 for (Object** p = start; p < end; ++p) { | |
| 784 ASSERT(Heap::InFromSpace(*p)); | |
| 785 MapWord first_word = HeapObject::cast(*p)->map_word(); | |
| 786 | |
| 787 if (!first_word.IsForwardingAddress()) { | |
| 788 // Unreachable external string can be finalized. | |
| 789 FinalizeExternalString(String::cast(*p)); | |
| 790 continue; | |
| 791 } | |
| 792 | |
| 793 // String is still reachable. | |
| 794 String* target = String::cast(first_word.ToForwardingAddress()); | |
| 795 ASSERT(target->IsExternalString()); | |
| 796 | |
| 797 if (Heap::InNewSpace(target)) { | |
| 798 // String is still in new space. Update the table entry. | |
| 799 *last = target; | |
| 800 ++last; | |
| 801 } else { | |
| 802 // String got promoted. Move it to the old string list. | |
| 803 ExternalStringTable::AddOldString(target); | |
| 804 } | |
| 805 } | |
| 806 | |
| 807 ExternalStringTable::ShrinkNewStrings(last - start); | |
| 808 } | |
| 809 | |
| 810 | |
| 811 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, | |
| 812 Address new_space_front) { | |
| 756 do { | 813 do { |
| 757 ASSERT(new_space_front <= new_space_.top()); | 814 ASSERT(new_space_front <= new_space_.top()); |
| 758 | 815 |
| 759 // The addresses new_space_front and new_space_.top() define a | 816 // The addresses new_space_front and new_space_.top() define a |
| 760 // queue of unprocessed copied objects. Process them until the | 817 // queue of unprocessed copied objects. Process them until the |
| 761 // queue is empty. | 818 // queue is empty. |
| 762 while (new_space_front < new_space_.top()) { | 819 while (new_space_front < new_space_.top()) { |
| 763 HeapObject* object = HeapObject::FromAddress(new_space_front); | 820 HeapObject* object = HeapObject::FromAddress(new_space_front); |
| 764 object->Iterate(&scavenge_visitor); | 821 object->Iterate(scavenge_visitor); |
| 765 new_space_front += object->Size(); | 822 new_space_front += object->Size(); |
| 766 } | 823 } |
| 767 | 824 |
| 768 // Promote and process all the to-be-promoted objects. | 825 // Promote and process all the to-be-promoted objects. |
| 769 while (!promotion_queue.is_empty()) { | 826 while (!promotion_queue.is_empty()) { |
| 770 HeapObject* source; | 827 HeapObject* source; |
| 771 Map* map; | 828 Map* map; |
| 772 promotion_queue.remove(&source, &map); | 829 promotion_queue.remove(&source, &map); |
| 773 // Copy the from-space object to its new location (given by the | 830 // Copy the from-space object to its new location (given by the |
| 774 // forwarding address) and fix its map. | 831 // forwarding address) and fix its map. |
| 775 HeapObject* target = source->map_word().ToForwardingAddress(); | 832 HeapObject* target = source->map_word().ToForwardingAddress(); |
| 776 CopyBlock(reinterpret_cast<Object**>(target->address()), | 833 CopyBlock(reinterpret_cast<Object**>(target->address()), |
| 777 reinterpret_cast<Object**>(source->address()), | 834 reinterpret_cast<Object**>(source->address()), |
| 778 source->SizeFromMap(map)); | 835 source->SizeFromMap(map)); |
| 779 target->set_map(map); | 836 target->set_map(map); |
| 780 | 837 |
| 781 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) | 838 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
| 782 // Update NewSpace stats if necessary. | 839 // Update NewSpace stats if necessary. |
| 783 RecordCopiedObject(target); | 840 RecordCopiedObject(target); |
| 784 #endif | 841 #endif |
| 785 // Visit the newly copied object for pointers to new space. | 842 // Visit the newly copied object for pointers to new space. |
| 786 target->Iterate(&scavenge_visitor); | 843 target->Iterate(scavenge_visitor); |
| 787 UpdateRSet(target); | 844 UpdateRSet(target); |
| 788 } | 845 } |
| 789 | 846 |
| 790 // Take another spin if there are now unswept objects in new space | 847 // Take another spin if there are now unswept objects in new space |
| 791 // (there are currently no more unswept promoted objects). | 848 // (there are currently no more unswept promoted objects). |
| 792 } while (new_space_front < new_space_.top()); | 849 } while (new_space_front < new_space_.top()); |
| 793 | 850 |
| 794 // Set age mark. | 851 return new_space_front; |
| 795 new_space_.set_age_mark(new_space_.top()); | |
| 796 | |
| 797 // Update how much has survived scavenge. | |
| 798 survived_since_last_expansion_ += | |
| 799 (PromotedSpaceSize() - survived_watermark) + new_space_.Size(); | |
| 800 | |
| 801 LOG(ResourceEvent("scavenge", "end")); | |
| 802 | |
| 803 gc_state_ = NOT_IN_GC; | |
| 804 } | 852 } |
| 805 | 853 |
| 806 | 854 |
| 807 void Heap::ClearRSetRange(Address start, int size_in_bytes) { | 855 void Heap::ClearRSetRange(Address start, int size_in_bytes) { |
| 808 uint32_t start_bit; | 856 uint32_t start_bit; |
| 809 Address start_word_address = | 857 Address start_word_address = |
| 810 Page::ComputeRSetBitPosition(start, 0, &start_bit); | 858 Page::ComputeRSetBitPosition(start, 0, &start_bit); |
| 811 uint32_t end_bit; | 859 uint32_t end_bit; |
| 812 Address end_word_address = | 860 Address end_word_address = |
| 813 Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize, | 861 Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize, |
| (...skipping 2354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3168 StatsTable::AddHistogramSample(paged_rset_histogram, count); | 3216 StatsTable::AddHistogramSample(paged_rset_histogram, count); |
| 3169 } | 3217 } |
| 3170 } | 3218 } |
| 3171 } | 3219 } |
| 3172 | 3220 |
| 3173 | 3221 |
| 3174 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 3222 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
| 3175 IterateStrongRoots(v, mode); | 3223 IterateStrongRoots(v, mode); |
| 3176 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); | 3224 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); |
| 3177 v->Synchronize("symbol_table"); | 3225 v->Synchronize("symbol_table"); |
| 3226 if (mode != VISIT_ALL_IN_SCAVENGE) { | |
| 3227 // Scavenge collections have special processing for this. | |
| 3228 ExternalStringTable::Iterate(v); | |
| 3229 } | |
| 3230 v->Synchronize("external_string_table"); | |
| 3178 } | 3231 } |
| 3179 | 3232 |
| 3180 | 3233 |
| 3181 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { | 3234 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { |
| 3182 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); | 3235 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); |
| 3183 v->Synchronize("strong_root_list"); | 3236 v->Synchronize("strong_root_list"); |
| 3184 | 3237 |
| 3185 v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_)); | 3238 v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_)); |
| 3186 v->Synchronize("symbol"); | 3239 v->Synchronize("symbol"); |
| 3187 | 3240 |
| 3188 Bootstrapper::Iterate(v); | 3241 Bootstrapper::Iterate(v); |
| 3189 v->Synchronize("bootstrapper"); | 3242 v->Synchronize("bootstrapper"); |
| 3190 Top::Iterate(v); | 3243 Top::Iterate(v); |
| 3191 v->Synchronize("top"); | 3244 v->Synchronize("top"); |
| 3192 Relocatable::Iterate(v); | 3245 Relocatable::Iterate(v); |
| 3193 v->Synchronize("relocatable"); | 3246 v->Synchronize("relocatable"); |
| 3194 | 3247 |
| 3195 #ifdef ENABLE_DEBUGGER_SUPPORT | 3248 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 3196 Debug::Iterate(v); | 3249 Debug::Iterate(v); |
| 3197 #endif | 3250 #endif |
| 3198 v->Synchronize("debug"); | 3251 v->Synchronize("debug"); |
| 3199 CompilationCache::Iterate(v); | 3252 CompilationCache::Iterate(v); |
| 3200 v->Synchronize("compilationcache"); | 3253 v->Synchronize("compilationcache"); |
| 3201 | 3254 |
| 3202 // Iterate over local handles in handle scopes. | 3255 // Iterate over local handles in handle scopes. |
| 3203 HandleScopeImplementer::Iterate(v); | 3256 HandleScopeImplementer::Iterate(v); |
| 3204 v->Synchronize("handlescope"); | 3257 v->Synchronize("handlescope"); |
| 3205 | 3258 |
| 3206 // Iterate over the builtin code objects and code stubs in the heap. Note | 3259 // Iterate over the builtin code objects and code stubs in the |
| 3207 // that it is not strictly necessary to iterate over code objects on | 3260 // heap. Note that it is not necessary to iterate over code objects |
| 3208 // scavenge collections. We still do it here because this same function | 3261 // on scavenge collections. |
| 3209 // is used by the mark-sweep collector and the deserializer. | 3262 if (mode != VISIT_ALL_IN_SCAVENGE) { |
| 3210 Builtins::IterateBuiltins(v); | 3263 Builtins::IterateBuiltins(v); |
| 3264 } | |
| 3211 v->Synchronize("builtins"); | 3265 v->Synchronize("builtins"); |
| 3212 | 3266 |
| 3213 // Iterate over global handles. | 3267 // Iterate over global handles. |
| 3214 if (mode == VISIT_ONLY_STRONG) { | 3268 if (mode == VISIT_ONLY_STRONG) { |
| 3215 GlobalHandles::IterateStrongRoots(v); | 3269 GlobalHandles::IterateStrongRoots(v); |
| 3216 } else { | 3270 } else { |
| 3217 GlobalHandles::IterateAllRoots(v); | 3271 GlobalHandles::IterateAllRoots(v); |
| 3218 } | 3272 } |
| 3219 v->Synchronize("globalhandles"); | 3273 v->Synchronize("globalhandles"); |
| 3220 | 3274 |
| (...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3417 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag); | 3471 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag); |
| 3418 roots_[kRealStackLimitRootIndex] = | 3472 roots_[kRealStackLimitRootIndex] = |
| 3419 reinterpret_cast<Object*>( | 3473 reinterpret_cast<Object*>( |
| 3420 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag); | 3474 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag); |
| 3421 } | 3475 } |
| 3422 | 3476 |
| 3423 | 3477 |
| 3424 void Heap::TearDown() { | 3478 void Heap::TearDown() { |
| 3425 GlobalHandles::TearDown(); | 3479 GlobalHandles::TearDown(); |
| 3426 | 3480 |
| 3481 ExternalStringTable::TearDown(); | |
| 3482 | |
| 3427 new_space_.TearDown(); | 3483 new_space_.TearDown(); |
| 3428 | 3484 |
| 3429 if (old_pointer_space_ != NULL) { | 3485 if (old_pointer_space_ != NULL) { |
| 3430 old_pointer_space_->TearDown(); | 3486 old_pointer_space_->TearDown(); |
| 3431 delete old_pointer_space_; | 3487 delete old_pointer_space_; |
| 3432 old_pointer_space_ = NULL; | 3488 old_pointer_space_ = NULL; |
| 3433 } | 3489 } |
| 3434 | 3490 |
| 3435 if (old_data_space_ != NULL) { | 3491 if (old_data_space_ != NULL) { |
| 3436 old_data_space_->TearDown(); | 3492 old_data_space_->TearDown(); |
| (...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3832 for (Object** p = start; p < end; p++) { | 3888 for (Object** p = start; p < end; p++) { |
| 3833 if ((*p)->IsHeapObject()) | 3889 if ((*p)->IsHeapObject()) |
| 3834 MarkRootObjectRecursively(p); | 3890 MarkRootObjectRecursively(p); |
| 3835 } | 3891 } |
| 3836 } | 3892 } |
| 3837 }; | 3893 }; |
| 3838 | 3894 |
| 3839 | 3895 |
| 3840 // Triggers a depth-first traversal of reachable objects from roots | 3896 // Triggers a depth-first traversal of reachable objects from roots |
| 3841 // and finds a path to a specific heap object and prints it. | 3897 // and finds a path to a specific heap object and prints it. |
| 3842 void Heap::TracePathToObject() { | 3898 void Heap::TracePathToObject(Object* target) { |
| 3843 search_target = NULL; | 3899 search_target = target; |
| 3844 search_for_any_global = false; | 3900 search_for_any_global = false; |
| 3845 | 3901 |
| 3846 MarkRootVisitor root_visitor; | 3902 MarkRootVisitor root_visitor; |
| 3847 IterateRoots(&root_visitor, VISIT_ONLY_STRONG); | 3903 IterateRoots(&root_visitor, VISIT_ONLY_STRONG); |
| 3848 } | 3904 } |
| 3849 | 3905 |
| 3850 | 3906 |
| 3851 // Triggers a depth-first traversal of reachable objects from roots | 3907 // Triggers a depth-first traversal of reachable objects from roots |
| 3852 // and finds a path to any global object and prints it. Useful for | 3908 // and finds a path to any global object and prints it. Useful for |
| 3853 // determining the source for leaks of global objects. | 3909 // determining the source for leaks of global objects. |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3984 void TranscendentalCache::Clear() { | 4040 void TranscendentalCache::Clear() { |
| 3985 for (int i = 0; i < kNumberOfCaches; i++) { | 4041 for (int i = 0; i < kNumberOfCaches; i++) { |
| 3986 if (caches_[i] != NULL) { | 4042 if (caches_[i] != NULL) { |
| 3987 delete caches_[i]; | 4043 delete caches_[i]; |
| 3988 caches_[i] = NULL; | 4044 caches_[i] = NULL; |
| 3989 } | 4045 } |
| 3990 } | 4046 } |
| 3991 } | 4047 } |
| 3992 | 4048 |
| 3993 | 4049 |
| 4050 void ExternalStringTable::CleanUp() { | |
| 4051 CleanUpList(true); | |
| 4052 CleanUpList(false); | |
| 4053 Verify(); | |
| 4054 } | |
| 4055 | |
| 4056 | |
| 4057 void ExternalStringTable::CleanUpList(bool new_to_old) { | |
| 4058 List<Object*>& source = new_to_old ? new_space_strings_ : old_space_strings_; | |
| 4059 if (source.is_empty()) return; | |
| 4060 List<Object*>& target = new_to_old ? old_space_strings_ : new_space_strings_; | |
|
Mads Ager (chromium)
2009/12/08 16:18:40
Use old_space_strings_ as the target always? It s
Vitaly Repeshko
2009/12/09 14:33:23
Makes sense. Done.
| |
| 4061 int last = 0; | |
| 4062 for (int i = 0; i < source.length(); ++i) { | |
| 4063 if (source[i] == Heap::raw_unchecked_null_value()) continue; | |
| 4064 if (Heap::InNewSpace(source[i]) == new_to_old) { | |
| 4065 source[last++] = source[i]; | |
| 4066 } else { | |
| 4067 target.Add(source[i]); | |
| 4068 } | |
| 4069 } | |
| 4070 source.Rewind(last); | |
| 4071 } | |
| 4072 | |
| 4073 | |
| 4074 void ExternalStringTable::TearDown() { | |
| 4075 new_space_strings_.Free(); | |
| 4076 old_space_strings_.Free(); | |
| 4077 } | |
| 4078 | |
| 4079 | |
| 4080 List<Object*> ExternalStringTable::new_space_strings_; | |
| 4081 List<Object*> ExternalStringTable::old_space_strings_; | |
| 4082 | |
| 3994 } } // namespace v8::internal | 4083 } } // namespace v8::internal |
| OLD | NEW |