OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
686 } | 686 } |
687 | 687 |
688 bool Heap::PerformGarbageCollection(GarbageCollector collector, | 688 bool Heap::PerformGarbageCollection(GarbageCollector collector, |
689 GCTracer* tracer) { | 689 GCTracer* tracer) { |
690 bool next_gc_likely_to_collect_more = false; | 690 bool next_gc_likely_to_collect_more = false; |
691 | 691 |
692 if (collector != SCAVENGER) { | 692 if (collector != SCAVENGER) { |
693 PROFILE(isolate_, CodeMovingGCEvent()); | 693 PROFILE(isolate_, CodeMovingGCEvent()); |
694 } | 694 } |
695 | 695 |
696 VerifySymbolTable(); | 696 if (FLAG_verify_heap) { |
| 697 VerifySymbolTable(); |
| 698 } |
697 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { | 699 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { |
698 ASSERT(!allocation_allowed_); | 700 ASSERT(!allocation_allowed_); |
699 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 701 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
700 global_gc_prologue_callback_(); | 702 global_gc_prologue_callback_(); |
701 } | 703 } |
702 | 704 |
703 GCType gc_type = | 705 GCType gc_type = |
704 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 706 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
705 | 707 |
706 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { | 708 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
782 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { | 784 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { |
783 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); | 785 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); |
784 } | 786 } |
785 } | 787 } |
786 | 788 |
787 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { | 789 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { |
788 ASSERT(!allocation_allowed_); | 790 ASSERT(!allocation_allowed_); |
789 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 791 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
790 global_gc_epilogue_callback_(); | 792 global_gc_epilogue_callback_(); |
791 } | 793 } |
792 VerifySymbolTable(); | 794 if (FLAG_verify_heap) { |
| 795 VerifySymbolTable(); |
| 796 } |
793 | 797 |
794 return next_gc_likely_to_collect_more; | 798 return next_gc_likely_to_collect_more; |
795 } | 799 } |
796 | 800 |
797 | 801 |
798 void Heap::MarkCompact(GCTracer* tracer) { | 802 void Heap::MarkCompact(GCTracer* tracer) { |
799 gc_state_ = MARK_COMPACT; | 803 gc_state_ = MARK_COMPACT; |
800 LOG(isolate_, ResourceEvent("markcompact", "begin")); | 804 LOG(isolate_, ResourceEvent("markcompact", "begin")); |
801 | 805 |
802 mark_compact_collector_.Prepare(tracer); | 806 mark_compact_collector_.Prepare(tracer); |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
976 store_buffer_->SetTop(start_of_current_page_); | 980 store_buffer_->SetTop(start_of_current_page_); |
977 } | 981 } |
978 } else { | 982 } else { |
979 UNREACHABLE(); | 983 UNREACHABLE(); |
980 } | 984 } |
981 } | 985 } |
982 | 986 |
983 | 987 |
984 void Heap::Scavenge() { | 988 void Heap::Scavenge() { |
985 #ifdef DEBUG | 989 #ifdef DEBUG |
986 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); | 990 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); |
987 #endif | 991 #endif |
988 | 992 |
989 gc_state_ = SCAVENGE; | 993 gc_state_ = SCAVENGE; |
990 | 994 |
991 // Implements Cheney's copying algorithm | 995 // Implements Cheney's copying algorithm |
992 LOG(isolate_, ResourceEvent("scavenge", "begin")); | 996 LOG(isolate_, ResourceEvent("scavenge", "begin")); |
993 | 997 |
994 // Clear descriptor cache. | 998 // Clear descriptor cache. |
995 isolate_->descriptor_lookup_cache()->Clear(); | 999 isolate_->descriptor_lookup_cache()->Clear(); |
996 | 1000 |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1105 return NULL; | 1109 return NULL; |
1106 } | 1110 } |
1107 | 1111 |
1108 // String is still reachable. | 1112 // String is still reachable. |
1109 return String::cast(first_word.ToForwardingAddress()); | 1113 return String::cast(first_word.ToForwardingAddress()); |
1110 } | 1114 } |
1111 | 1115 |
1112 | 1116 |
1113 void Heap::UpdateNewSpaceReferencesInExternalStringTable( | 1117 void Heap::UpdateNewSpaceReferencesInExternalStringTable( |
1114 ExternalStringTableUpdaterCallback updater_func) { | 1118 ExternalStringTableUpdaterCallback updater_func) { |
1115 external_string_table_.Verify(); | 1119 if (FLAG_verify_heap) { |
| 1120 external_string_table_.Verify(); |
| 1121 } |
1116 | 1122 |
1117 if (external_string_table_.new_space_strings_.is_empty()) return; | 1123 if (external_string_table_.new_space_strings_.is_empty()) return; |
1118 | 1124 |
1119 Object** start = &external_string_table_.new_space_strings_[0]; | 1125 Object** start = &external_string_table_.new_space_strings_[0]; |
1120 Object** end = start + external_string_table_.new_space_strings_.length(); | 1126 Object** end = start + external_string_table_.new_space_strings_.length(); |
1121 Object** last = start; | 1127 Object** last = start; |
1122 | 1128 |
1123 for (Object** p = start; p < end; ++p) { | 1129 for (Object** p = start; p < end; ++p) { |
1124 ASSERT(InFromSpace(*p)); | 1130 ASSERT(InFromSpace(*p)); |
1125 String* target = updater_func(this, p); | 1131 String* target = updater_func(this, p); |
(...skipping 1777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2903 } else { | 2909 } else { |
2904 ASSERT(string_result->IsTwoByteRepresentation()); | 2910 ASSERT(string_result->IsTwoByteRepresentation()); |
2905 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); | 2911 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); |
2906 String::WriteToFlat(buffer, dest, start, end); | 2912 String::WriteToFlat(buffer, dest, start, end); |
2907 } | 2913 } |
2908 return result; | 2914 return result; |
2909 } | 2915 } |
2910 | 2916 |
2911 ASSERT(buffer->IsFlat()); | 2917 ASSERT(buffer->IsFlat()); |
2912 #if DEBUG | 2918 #if DEBUG |
2913 buffer->StringVerify(); | 2919 if (FLAG_verify_heap) { |
| 2920 buffer->StringVerify(); |
| 2921 } |
2914 #endif | 2922 #endif |
2915 | 2923 |
2916 Object* result; | 2924 Object* result; |
2917 // When slicing an indirect string we use its encoding for a newly created | 2925 // When slicing an indirect string we use its encoding for a newly created |
2918 // slice and don't check the encoding of the underlying string. This is safe | 2926 // slice and don't check the encoding of the underlying string. This is safe |
2919 // even if the encodings are different because of externalization. If an | 2927 // even if the encodings are different because of externalization. If an |
2920 // indirect ASCII string is pointing to a two-byte string, the two-byte char | 2928 // indirect ASCII string is pointing to a two-byte string, the two-byte char |
2921 // codes of the underlying string must still fit into ASCII (because | 2929 // codes of the underlying string must still fit into ASCII (because |
2922 // externalization must not change char codes). | 2930 // externalization must not change char codes). |
2923 { Map* map = buffer->IsAsciiRepresentation() | 2931 { Map* map = buffer->IsAsciiRepresentation() |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3149 *(self_reference.location()) = code; | 3157 *(self_reference.location()) = code; |
3150 } | 3158 } |
3151 // Migrate generated code. | 3159 // Migrate generated code. |
3152 // The generated code can contain Object** values (typically from handles) | 3160 // The generated code can contain Object** values (typically from handles) |
3153 // that are dereferenced during the copy to point directly to the actual heap | 3161 // that are dereferenced during the copy to point directly to the actual heap |
3154 // objects. These pointers can include references to the code object itself, | 3162 // objects. These pointers can include references to the code object itself, |
3155 // through the self_reference parameter. | 3163 // through the self_reference parameter. |
3156 code->CopyFrom(desc); | 3164 code->CopyFrom(desc); |
3157 | 3165 |
3158 #ifdef DEBUG | 3166 #ifdef DEBUG |
3159 code->Verify(); | 3167 if (FLAG_verify_heap) { |
| 3168 code->Verify(); |
| 3169 } |
3160 #endif | 3170 #endif |
3161 return code; | 3171 return code; |
3162 } | 3172 } |
3163 | 3173 |
3164 | 3174 |
3165 MaybeObject* Heap::CopyCode(Code* code) { | 3175 MaybeObject* Heap::CopyCode(Code* code) { |
3166 // Allocate an object the same size as the code object. | 3176 // Allocate an object the same size as the code object. |
3167 int obj_size = code->Size(); | 3177 int obj_size = code->Size(); |
3168 MaybeObject* maybe_result; | 3178 MaybeObject* maybe_result; |
3169 if (obj_size > MaxObjectSizeInPagedSpace()) { | 3179 if (obj_size > MaxObjectSizeInPagedSpace()) { |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3229 | 3239 |
3230 // Copy patched rinfo. | 3240 // Copy patched rinfo. |
3231 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); | 3241 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); |
3232 | 3242 |
3233 // Relocate the copy. | 3243 // Relocate the copy. |
3234 ASSERT(!isolate_->code_range()->exists() || | 3244 ASSERT(!isolate_->code_range()->exists() || |
3235 isolate_->code_range()->contains(code->address())); | 3245 isolate_->code_range()->contains(code->address())); |
3236 new_code->Relocate(new_addr - old_addr); | 3246 new_code->Relocate(new_addr - old_addr); |
3237 | 3247 |
3238 #ifdef DEBUG | 3248 #ifdef DEBUG |
3239 code->Verify(); | 3249 if (FLAG_verify_heap) { |
| 3250 code->Verify(); |
| 3251 } |
3240 #endif | 3252 #endif |
3241 return new_code; | 3253 return new_code; |
3242 } | 3254 } |
3243 | 3255 |
3244 | 3256 |
3245 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | 3257 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
3246 ASSERT(gc_state_ == NOT_IN_GC); | 3258 ASSERT(gc_state_ == NOT_IN_GC); |
3247 ASSERT(map->instance_type() != MAP_TYPE); | 3259 ASSERT(map->instance_type() != MAP_TYPE); |
3248 // If allocation failures are disallowed, we may allocate in a different | 3260 // If allocation failures are disallowed, we may allocate in a different |
3249 // space when new space is full and the object is not a large object. | 3261 // space when new space is full and the object is not a large object. |
(...skipping 3088 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6338 } | 6350 } |
6339 } | 6351 } |
6340 new_space_strings_.Rewind(last); | 6352 new_space_strings_.Rewind(last); |
6341 last = 0; | 6353 last = 0; |
6342 for (int i = 0; i < old_space_strings_.length(); ++i) { | 6354 for (int i = 0; i < old_space_strings_.length(); ++i) { |
6343 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue; | 6355 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue; |
6344 ASSERT(!heap_->InNewSpace(old_space_strings_[i])); | 6356 ASSERT(!heap_->InNewSpace(old_space_strings_[i])); |
6345 old_space_strings_[last++] = old_space_strings_[i]; | 6357 old_space_strings_[last++] = old_space_strings_[i]; |
6346 } | 6358 } |
6347 old_space_strings_.Rewind(last); | 6359 old_space_strings_.Rewind(last); |
6348 Verify(); | 6360 if (FLAG_verify_heap) { |
| 6361 Verify(); |
| 6362 } |
6349 } | 6363 } |
6350 | 6364 |
6351 | 6365 |
6352 void ExternalStringTable::TearDown() { | 6366 void ExternalStringTable::TearDown() { |
6353 new_space_strings_.Free(); | 6367 new_space_strings_.Free(); |
6354 old_space_strings_.Free(); | 6368 old_space_strings_.Free(); |
6355 } | 6369 } |
6356 | 6370 |
6357 | 6371 |
6358 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 6372 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6397 isolate_->heap()->store_buffer()->Compact(); | 6411 isolate_->heap()->store_buffer()->Compact(); |
6398 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6412 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6399 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6413 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6400 next = chunk->next_chunk(); | 6414 next = chunk->next_chunk(); |
6401 isolate_->memory_allocator()->Free(chunk); | 6415 isolate_->memory_allocator()->Free(chunk); |
6402 } | 6416 } |
6403 chunks_queued_for_free_ = NULL; | 6417 chunks_queued_for_free_ = NULL; |
6404 } | 6418 } |
6405 | 6419 |
6406 } } // namespace v8::internal | 6420 } } // namespace v8::internal |
OLD | NEW |