Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(67)

Side by Side Diff: src/heap.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 675 matching lines...) Expand 10 before | Expand all | Expand 10 after
686 } 686 }
687 687
688 bool Heap::PerformGarbageCollection(GarbageCollector collector, 688 bool Heap::PerformGarbageCollection(GarbageCollector collector,
689 GCTracer* tracer) { 689 GCTracer* tracer) {
690 bool next_gc_likely_to_collect_more = false; 690 bool next_gc_likely_to_collect_more = false;
691 691
692 if (collector != SCAVENGER) { 692 if (collector != SCAVENGER) {
693 PROFILE(isolate_, CodeMovingGCEvent()); 693 PROFILE(isolate_, CodeMovingGCEvent());
694 } 694 }
695 695
696 VerifySymbolTable(); 696 if (FLAG_verify_heap) {
697 VerifySymbolTable();
698 }
697 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 699 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
698 ASSERT(!allocation_allowed_); 700 ASSERT(!allocation_allowed_);
699 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 701 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
700 global_gc_prologue_callback_(); 702 global_gc_prologue_callback_();
701 } 703 }
702 704
703 GCType gc_type = 705 GCType gc_type =
704 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 706 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
705 707
706 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { 708 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
782 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { 784 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
783 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); 785 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
784 } 786 }
785 } 787 }
786 788
787 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 789 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
788 ASSERT(!allocation_allowed_); 790 ASSERT(!allocation_allowed_);
789 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 791 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
790 global_gc_epilogue_callback_(); 792 global_gc_epilogue_callback_();
791 } 793 }
792 VerifySymbolTable(); 794 if (FLAG_verify_heap) {
795 VerifySymbolTable();
796 }
793 797
794 return next_gc_likely_to_collect_more; 798 return next_gc_likely_to_collect_more;
795 } 799 }
796 800
797 801
798 void Heap::MarkCompact(GCTracer* tracer) { 802 void Heap::MarkCompact(GCTracer* tracer) {
799 gc_state_ = MARK_COMPACT; 803 gc_state_ = MARK_COMPACT;
800 LOG(isolate_, ResourceEvent("markcompact", "begin")); 804 LOG(isolate_, ResourceEvent("markcompact", "begin"));
801 805
802 mark_compact_collector_.Prepare(tracer); 806 mark_compact_collector_.Prepare(tracer);
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 store_buffer_->SetTop(start_of_current_page_); 980 store_buffer_->SetTop(start_of_current_page_);
977 } 981 }
978 } else { 982 } else {
979 UNREACHABLE(); 983 UNREACHABLE();
980 } 984 }
981 } 985 }
982 986
983 987
984 void Heap::Scavenge() { 988 void Heap::Scavenge() {
985 #ifdef DEBUG 989 #ifdef DEBUG
986 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 990 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
987 #endif 991 #endif
988 992
989 gc_state_ = SCAVENGE; 993 gc_state_ = SCAVENGE;
990 994
991 // Implements Cheney's copying algorithm 995 // Implements Cheney's copying algorithm
992 LOG(isolate_, ResourceEvent("scavenge", "begin")); 996 LOG(isolate_, ResourceEvent("scavenge", "begin"));
993 997
994 // Clear descriptor cache. 998 // Clear descriptor cache.
995 isolate_->descriptor_lookup_cache()->Clear(); 999 isolate_->descriptor_lookup_cache()->Clear();
996 1000
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1105 return NULL; 1109 return NULL;
1106 } 1110 }
1107 1111
1108 // String is still reachable. 1112 // String is still reachable.
1109 return String::cast(first_word.ToForwardingAddress()); 1113 return String::cast(first_word.ToForwardingAddress());
1110 } 1114 }
1111 1115
1112 1116
1113 void Heap::UpdateNewSpaceReferencesInExternalStringTable( 1117 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1114 ExternalStringTableUpdaterCallback updater_func) { 1118 ExternalStringTableUpdaterCallback updater_func) {
1115 external_string_table_.Verify(); 1119 if (FLAG_verify_heap) {
1120 external_string_table_.Verify();
1121 }
1116 1122
1117 if (external_string_table_.new_space_strings_.is_empty()) return; 1123 if (external_string_table_.new_space_strings_.is_empty()) return;
1118 1124
1119 Object** start = &external_string_table_.new_space_strings_[0]; 1125 Object** start = &external_string_table_.new_space_strings_[0];
1120 Object** end = start + external_string_table_.new_space_strings_.length(); 1126 Object** end = start + external_string_table_.new_space_strings_.length();
1121 Object** last = start; 1127 Object** last = start;
1122 1128
1123 for (Object** p = start; p < end; ++p) { 1129 for (Object** p = start; p < end; ++p) {
1124 ASSERT(InFromSpace(*p)); 1130 ASSERT(InFromSpace(*p));
1125 String* target = updater_func(this, p); 1131 String* target = updater_func(this, p);
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
1436 } 1442 }
1437 1443
1438 return target; 1444 return target;
1439 } 1445 }
1440 1446
1441 template<ObjectContents object_contents, SizeRestriction size_restriction> 1447 template<ObjectContents object_contents, SizeRestriction size_restriction>
1442 static inline void EvacuateObject(Map* map, 1448 static inline void EvacuateObject(Map* map,
1443 HeapObject** slot, 1449 HeapObject** slot,
1444 HeapObject* object, 1450 HeapObject* object,
1445 int object_size) { 1451 int object_size) {
1446 ASSERT((size_restriction != SMALL) || 1452 SLOW_ASSERT((size_restriction != SMALL) ||
1447 (object_size <= Page::kMaxHeapObjectSize)); 1453 (object_size <= Page::kMaxHeapObjectSize));
1448 ASSERT(object->Size() == object_size); 1454 SLOW_ASSERT(object->Size() == object_size);
1449 1455
1450 Heap* heap = map->GetHeap(); 1456 Heap* heap = map->GetHeap();
1451 if (heap->ShouldBePromoted(object->address(), object_size)) { 1457 if (heap->ShouldBePromoted(object->address(), object_size)) {
1452 MaybeObject* maybe_result; 1458 MaybeObject* maybe_result;
1453 1459
1454 if ((size_restriction != SMALL) && 1460 if ((size_restriction != SMALL) &&
1455 (object_size > Page::kMaxHeapObjectSize)) { 1461 (object_size > Page::kMaxHeapObjectSize)) {
1456 maybe_result = heap->lo_space()->AllocateRaw(object_size, 1462 maybe_result = heap->lo_space()->AllocateRaw(object_size,
1457 NOT_EXECUTABLE); 1463 NOT_EXECUTABLE);
1458 } else { 1464 } else {
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
1671 scavenging_visitors_table_.Register( 1677 scavenging_visitors_table_.Register(
1672 StaticVisitorBase::kVisitShortcutCandidate, 1678 StaticVisitorBase::kVisitShortcutCandidate,
1673 scavenging_visitors_table_.GetVisitorById( 1679 scavenging_visitors_table_.GetVisitorById(
1674 StaticVisitorBase::kVisitConsString)); 1680 StaticVisitorBase::kVisitConsString));
1675 } 1681 }
1676 } 1682 }
1677 } 1683 }
1678 1684
1679 1685
1680 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1686 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1681 ASSERT(HEAP->InFromSpace(object)); 1687 SLOW_ASSERT(HEAP->InFromSpace(object));
1682 MapWord first_word = object->map_word(); 1688 MapWord first_word = object->map_word();
1683 ASSERT(!first_word.IsForwardingAddress()); 1689 SLOW_ASSERT(!first_word.IsForwardingAddress());
1684 Map* map = first_word.ToMap(); 1690 Map* map = first_word.ToMap();
1685 map->GetHeap()->DoScavengeObject(map, p, object); 1691 map->GetHeap()->DoScavengeObject(map, p, object);
1686 } 1692 }
1687 1693
1688 1694
1689 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, 1695 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1690 int instance_size) { 1696 int instance_size) {
1691 Object* result; 1697 Object* result;
1692 { MaybeObject* maybe_result = AllocateRawMap(); 1698 { MaybeObject* maybe_result = AllocateRawMap();
1693 if (!maybe_result->ToObject(&result)) return maybe_result; 1699 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 1209 matching lines...) Expand 10 before | Expand all | Expand 10 after
2903 } else { 2909 } else {
2904 ASSERT(string_result->IsTwoByteRepresentation()); 2910 ASSERT(string_result->IsTwoByteRepresentation());
2905 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); 2911 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
2906 String::WriteToFlat(buffer, dest, start, end); 2912 String::WriteToFlat(buffer, dest, start, end);
2907 } 2913 }
2908 return result; 2914 return result;
2909 } 2915 }
2910 2916
2911 ASSERT(buffer->IsFlat()); 2917 ASSERT(buffer->IsFlat());
2912 #if DEBUG 2918 #if DEBUG
2913 buffer->StringVerify(); 2919 if (FLAG_verify_heap) {
2920 buffer->StringVerify();
2921 }
2914 #endif 2922 #endif
2915 2923
2916 Object* result; 2924 Object* result;
2917 // When slicing an indirect string we use its encoding for a newly created 2925 // When slicing an indirect string we use its encoding for a newly created
2918 // slice and don't check the encoding of the underlying string. This is safe 2926 // slice and don't check the encoding of the underlying string. This is safe
2919 // even if the encodings are different because of externalization. If an 2927 // even if the encodings are different because of externalization. If an
2920 // indirect ASCII string is pointing to a two-byte string, the two-byte char 2928 // indirect ASCII string is pointing to a two-byte string, the two-byte char
2921 // codes of the underlying string must still fit into ASCII (because 2929 // codes of the underlying string must still fit into ASCII (because
2922 // externalization must not change char codes). 2930 // externalization must not change char codes).
2923 { Map* map = buffer->IsAsciiRepresentation() 2931 { Map* map = buffer->IsAsciiRepresentation()
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after
3149 *(self_reference.location()) = code; 3157 *(self_reference.location()) = code;
3150 } 3158 }
3151 // Migrate generated code. 3159 // Migrate generated code.
3152 // The generated code can contain Object** values (typically from handles) 3160 // The generated code can contain Object** values (typically from handles)
3153 // that are dereferenced during the copy to point directly to the actual heap 3161 // that are dereferenced during the copy to point directly to the actual heap
3154 // objects. These pointers can include references to the code object itself, 3162 // objects. These pointers can include references to the code object itself,
3155 // through the self_reference parameter. 3163 // through the self_reference parameter.
3156 code->CopyFrom(desc); 3164 code->CopyFrom(desc);
3157 3165
3158 #ifdef DEBUG 3166 #ifdef DEBUG
3159 code->Verify(); 3167 if (FLAG_verify_heap) {
3168 code->Verify();
3169 }
3160 #endif 3170 #endif
3161 return code; 3171 return code;
3162 } 3172 }
3163 3173
3164 3174
3165 MaybeObject* Heap::CopyCode(Code* code) { 3175 MaybeObject* Heap::CopyCode(Code* code) {
3166 // Allocate an object the same size as the code object. 3176 // Allocate an object the same size as the code object.
3167 int obj_size = code->Size(); 3177 int obj_size = code->Size();
3168 MaybeObject* maybe_result; 3178 MaybeObject* maybe_result;
3169 if (obj_size > MaxObjectSizeInPagedSpace()) { 3179 if (obj_size > MaxObjectSizeInPagedSpace()) {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
3229 3239
3230 // Copy patched rinfo. 3240 // Copy patched rinfo.
3231 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); 3241 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
3232 3242
3233 // Relocate the copy. 3243 // Relocate the copy.
3234 ASSERT(!isolate_->code_range()->exists() || 3244 ASSERT(!isolate_->code_range()->exists() ||
3235 isolate_->code_range()->contains(code->address())); 3245 isolate_->code_range()->contains(code->address()));
3236 new_code->Relocate(new_addr - old_addr); 3246 new_code->Relocate(new_addr - old_addr);
3237 3247
3238 #ifdef DEBUG 3248 #ifdef DEBUG
3239 code->Verify(); 3249 if (FLAG_verify_heap) {
3250 code->Verify();
3251 }
3240 #endif 3252 #endif
3241 return new_code; 3253 return new_code;
3242 } 3254 }
3243 3255
3244 3256
3245 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { 3257 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
3246 ASSERT(gc_state_ == NOT_IN_GC); 3258 ASSERT(gc_state_ == NOT_IN_GC);
3247 ASSERT(map->instance_type() != MAP_TYPE); 3259 ASSERT(map->instance_type() != MAP_TYPE);
3248 // If allocation failures are disallowed, we may allocate in a different 3260 // If allocation failures are disallowed, we may allocate in a different
3249 // space when new space is full and the object is not a large object. 3261 // space when new space is full and the object is not a large object.
(...skipping 12 matching lines...) Expand all
3262 void Heap::InitializeFunction(JSFunction* function, 3274 void Heap::InitializeFunction(JSFunction* function,
3263 SharedFunctionInfo* shared, 3275 SharedFunctionInfo* shared,
3264 Object* prototype) { 3276 Object* prototype) {
3265 ASSERT(!prototype->IsMap()); 3277 ASSERT(!prototype->IsMap());
3266 function->initialize_properties(); 3278 function->initialize_properties();
3267 function->initialize_elements(); 3279 function->initialize_elements();
3268 function->set_shared(shared); 3280 function->set_shared(shared);
3269 function->set_code(shared->code()); 3281 function->set_code(shared->code());
3270 function->set_prototype_or_initial_map(prototype); 3282 function->set_prototype_or_initial_map(prototype);
3271 function->set_context(undefined_value()); 3283 function->set_context(undefined_value());
3272 function->set_literals(empty_fixed_array()); 3284 function->set_literals_or_bindings(empty_fixed_array());
3273 function->set_next_function_link(undefined_value()); 3285 function->set_next_function_link(undefined_value());
3274 } 3286 }
3275 3287
3276 3288
3277 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { 3289 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
3278 // Allocate the prototype. Make sure to use the object function 3290 // Allocate the prototype. Make sure to use the object function
3279 // from the function's context, since the function can be from a 3291 // from the function's context, since the function can be from a
3280 // different context. 3292 // different context.
3281 JSFunction* object_function = 3293 JSFunction* object_function =
3282 function->context()->global_context()->object_function(); 3294 function->context()->global_context()->object_function();
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
3427 // cannot be constructed without having these properties. Guard by 3439 // cannot be constructed without having these properties. Guard by
3428 // the inline_new flag so we only change the map if we generate a 3440 // the inline_new flag so we only change the map if we generate a
3429 // specialized construct stub. 3441 // specialized construct stub.
3430 ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields); 3442 ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
3431 if (fun->shared()->CanGenerateInlineConstructor(prototype)) { 3443 if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
3432 int count = fun->shared()->this_property_assignments_count(); 3444 int count = fun->shared()->this_property_assignments_count();
3433 if (count > in_object_properties) { 3445 if (count > in_object_properties) {
3434 // Inline constructor can only handle inobject properties. 3446 // Inline constructor can only handle inobject properties.
3435 fun->shared()->ForbidInlineConstructor(); 3447 fun->shared()->ForbidInlineConstructor();
3436 } else { 3448 } else {
3437 Object* descriptors_obj; 3449 DescriptorArray* descriptors;
3438 { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count); 3450 { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
3439 if (!maybe_descriptors_obj->ToObject(&descriptors_obj)) { 3451 if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
3440 return maybe_descriptors_obj; 3452 return maybe_descriptors_obj;
3441 } 3453 }
3442 } 3454 }
3443 DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj); 3455 DescriptorArray::WhitenessWitness witness(descriptors);
3444 for (int i = 0; i < count; i++) { 3456 for (int i = 0; i < count; i++) {
3445 String* name = fun->shared()->GetThisPropertyAssignmentName(i); 3457 String* name = fun->shared()->GetThisPropertyAssignmentName(i);
3446 ASSERT(name->IsSymbol()); 3458 ASSERT(name->IsSymbol());
3447 FieldDescriptor field(name, i, NONE); 3459 FieldDescriptor field(name, i, NONE);
3448 field.SetEnumerationIndex(i); 3460 field.SetEnumerationIndex(i);
3449 descriptors->Set(i, &field); 3461 descriptors->Set(i, &field, witness);
3450 } 3462 }
3451 descriptors->SetNextEnumerationIndex(count); 3463 descriptors->SetNextEnumerationIndex(count);
3452 descriptors->SortUnchecked(); 3464 descriptors->SortUnchecked(witness);
3453 3465
3454 // The descriptors may contain duplicates because the compiler does not 3466 // The descriptors may contain duplicates because the compiler does not
3455 // guarantee the uniqueness of property names (it would have required 3467 // guarantee the uniqueness of property names (it would have required
3456 // quadratic time). Once the descriptors are sorted we can check for 3468 // quadratic time). Once the descriptors are sorted we can check for
3457 // duplicates in linear time. 3469 // duplicates in linear time.
3458 if (HasDuplicates(descriptors)) { 3470 if (HasDuplicates(descriptors)) {
3459 fun->shared()->ForbidInlineConstructor(); 3471 fun->shared()->ForbidInlineConstructor();
3460 } else { 3472 } else {
3461 map->set_instance_descriptors(descriptors); 3473 map->set_instance_descriptors(descriptors);
3462 map->set_pre_allocated_property_fields(count); 3474 map->set_pre_allocated_property_fields(count);
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
3681 // Make sure result is a global object with properties in dictionary. 3693 // Make sure result is a global object with properties in dictionary.
3682 ASSERT(global->IsGlobalObject()); 3694 ASSERT(global->IsGlobalObject());
3683 ASSERT(!global->HasFastProperties()); 3695 ASSERT(!global->HasFastProperties());
3684 return global; 3696 return global;
3685 } 3697 }
3686 3698
3687 3699
3688 MaybeObject* Heap::CopyJSObject(JSObject* source) { 3700 MaybeObject* Heap::CopyJSObject(JSObject* source) {
3689 // Never used to copy functions. If functions need to be copied we 3701 // Never used to copy functions. If functions need to be copied we
3690 // have to be careful to clear the literals array. 3702 // have to be careful to clear the literals array.
3691 ASSERT(!source->IsJSFunction()); 3703 SLOW_ASSERT(!source->IsJSFunction());
3692 3704
3693 // Make the clone. 3705 // Make the clone.
3694 Map* map = source->map(); 3706 Map* map = source->map();
3695 int object_size = map->instance_size(); 3707 int object_size = map->instance_size();
3696 Object* clone; 3708 Object* clone;
3697 3709
3710 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
3711
3698 // If we're forced to always allocate, we use the general allocation 3712 // If we're forced to always allocate, we use the general allocation
3699 // functions which may leave us with an object in old space. 3713 // functions which may leave us with an object in old space.
3700 if (always_allocate()) { 3714 if (always_allocate()) {
3701 { MaybeObject* maybe_clone = 3715 { MaybeObject* maybe_clone =
3702 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); 3716 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
3703 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3717 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3704 } 3718 }
3705 Address clone_address = HeapObject::cast(clone)->address(); 3719 Address clone_address = HeapObject::cast(clone)->address();
3706 CopyBlock(clone_address, 3720 CopyBlock(clone_address,
3707 source->address(), 3721 source->address(),
3708 object_size); 3722 object_size);
3709 // Update write barrier for all fields that lie beyond the header. 3723 // Update write barrier for all fields that lie beyond the header.
3710 RecordWrites(clone_address, 3724 RecordWrites(clone_address,
3711 JSObject::kHeaderSize, 3725 JSObject::kHeaderSize,
3712 (object_size - JSObject::kHeaderSize) / kPointerSize); 3726 (object_size - JSObject::kHeaderSize) / kPointerSize);
3713 } else { 3727 } else {
3728 wb_mode = SKIP_WRITE_BARRIER;
3714 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); 3729 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
3715 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3730 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3716 } 3731 }
3717 ASSERT(InNewSpace(clone)); 3732 SLOW_ASSERT(InNewSpace(clone));
3718 // Since we know the clone is allocated in new space, we can copy 3733 // Since we know the clone is allocated in new space, we can copy
3719 // the contents without worrying about updating the write barrier. 3734 // the contents without worrying about updating the write barrier.
3720 CopyBlock(HeapObject::cast(clone)->address(), 3735 CopyBlock(HeapObject::cast(clone)->address(),
3721 source->address(), 3736 source->address(),
3722 object_size); 3737 object_size);
3723 } 3738 }
3724 3739
3725 ASSERT(JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); 3740 SLOW_ASSERT(
3741 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
3726 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 3742 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3727 FixedArray* properties = FixedArray::cast(source->properties()); 3743 FixedArray* properties = FixedArray::cast(source->properties());
3728 // Update elements if necessary. 3744 // Update elements if necessary.
3729 if (elements->length() > 0) { 3745 if (elements->length() > 0) {
3730 Object* elem; 3746 Object* elem;
3731 { MaybeObject* maybe_elem; 3747 { MaybeObject* maybe_elem;
3732 if (elements->map() == fixed_cow_array_map()) { 3748 if (elements->map() == fixed_cow_array_map()) {
3733 maybe_elem = FixedArray::cast(elements); 3749 maybe_elem = FixedArray::cast(elements);
3734 } else if (source->HasFastDoubleElements()) { 3750 } else if (source->HasFastDoubleElements()) {
3735 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); 3751 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3736 } else { 3752 } else {
3737 maybe_elem = CopyFixedArray(FixedArray::cast(elements)); 3753 maybe_elem = CopyFixedArray(FixedArray::cast(elements));
3738 } 3754 }
3739 if (!maybe_elem->ToObject(&elem)) return maybe_elem; 3755 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3740 } 3756 }
3741 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem)); 3757 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
3742 } 3758 }
3743 // Update properties if necessary. 3759 // Update properties if necessary.
3744 if (properties->length() > 0) { 3760 if (properties->length() > 0) {
3745 Object* prop; 3761 Object* prop;
3746 { MaybeObject* maybe_prop = CopyFixedArray(properties); 3762 { MaybeObject* maybe_prop = CopyFixedArray(properties);
3747 if (!maybe_prop->ToObject(&prop)) return maybe_prop; 3763 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3748 } 3764 }
3749 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); 3765 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
3750 } 3766 }
3751 // Return the new clone. 3767 // Return the new clone.
3752 return clone; 3768 return clone;
3753 } 3769 }
3754 3770
3755 3771
3756 MaybeObject* Heap::ReinitializeJSReceiver( 3772 MaybeObject* Heap::ReinitializeJSReceiver(
3757 JSReceiver* object, InstanceType type, int size) { 3773 JSReceiver* object, InstanceType type, int size) {
3758 ASSERT(type >= FIRST_JS_OBJECT_TYPE); 3774 ASSERT(type >= FIRST_JS_OBJECT_TYPE);
3759 3775
(...skipping 1035 matching lines...) Expand 10 before | Expand all | Expand 10 after
4795 // If the store buffer becomes overfull we mark pages as being exempt from 4811 // If the store buffer becomes overfull we mark pages as being exempt from
4796 // the store buffer. These pages are scanned to find pointers that point 4812 // the store buffer. These pages are scanned to find pointers that point
4797 // to the new space. In that case we may hit newly promoted objects and 4813 // to the new space. In that case we may hit newly promoted objects and
4798 // fix the pointers before the promotion queue gets to them. Thus the 'if'. 4814 // fix the pointers before the promotion queue gets to them. Thus the 'if'.
4799 if (object->IsHeapObject()) { 4815 if (object->IsHeapObject()) {
4800 if (Heap::InFromSpace(object)) { 4816 if (Heap::InFromSpace(object)) {
4801 callback(reinterpret_cast<HeapObject**>(slot), 4817 callback(reinterpret_cast<HeapObject**>(slot),
4802 HeapObject::cast(object)); 4818 HeapObject::cast(object));
4803 Object* new_object = *slot; 4819 Object* new_object = *slot;
4804 if (InNewSpace(new_object)) { 4820 if (InNewSpace(new_object)) {
4805 ASSERT(Heap::InToSpace(new_object)); 4821 SLOW_ASSERT(Heap::InToSpace(new_object));
4806 ASSERT(new_object->IsHeapObject()); 4822 SLOW_ASSERT(new_object->IsHeapObject());
4807 store_buffer_.EnterDirectlyIntoStoreBuffer( 4823 store_buffer_.EnterDirectlyIntoStoreBuffer(
4808 reinterpret_cast<Address>(slot)); 4824 reinterpret_cast<Address>(slot));
4809 } 4825 }
4810 ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object)); 4826 SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
4811 } else if (record_slots && 4827 } else if (record_slots &&
4812 MarkCompactCollector::IsOnEvacuationCandidate(object)) { 4828 MarkCompactCollector::IsOnEvacuationCandidate(object)) {
4813 mark_compact_collector()->RecordSlot(slot, slot, object); 4829 mark_compact_collector()->RecordSlot(slot, slot, object);
4814 } 4830 }
4815 } 4831 }
4816 slot_address += kPointerSize; 4832 slot_address += kPointerSize;
4817 } 4833 }
4818 } 4834 }
4819 4835
4820 4836
(...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after
5354 List<Object*> object_stack_; 5370 List<Object*> object_stack_;
5355 Heap* heap_; 5371 Heap* heap_;
5356 5372
5357 friend class Heap; 5373 friend class Heap;
5358 }; 5374 };
5359 5375
5360 #endif 5376 #endif
5361 5377
5362 bool Heap::Setup(bool create_heap_objects) { 5378 bool Heap::Setup(bool create_heap_objects) {
5363 #ifdef DEBUG 5379 #ifdef DEBUG
5380 allocation_timeout_ = FLAG_gc_interval;
5364 debug_utils_ = new HeapDebugUtils(this); 5381 debug_utils_ = new HeapDebugUtils(this);
5365 #endif 5382 #endif
5366 5383
5367 // Initialize heap spaces and initial maps and objects. Whenever something 5384 // Initialize heap spaces and initial maps and objects. Whenever something
5368 // goes wrong, just return false. The caller should check the results and 5385 // goes wrong, just return false. The caller should check the results and
5369 // call Heap::TearDown() to release allocated memory. 5386 // call Heap::TearDown() to release allocated memory.
5370 // 5387 //
5371 // If the heap is not yet configured (eg, through the API), configure it. 5388 // If the heap is not yet configured (eg, through the API), configure it.
5372 // Configuration is based on the flags new-space-size (really the semispace 5389 // Configuration is based on the flags new-space-size (really the semispace
5373 // size) and old-space-size if set or the initial values of semispace_size_ 5390 // size) and old-space-size if set or the initial values of semispace_size_
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
5439 if (!map_space_->Setup()) return false; 5456 if (!map_space_->Setup()) return false;
5440 5457
5441 // Initialize global property cell space. 5458 // Initialize global property cell space.
5442 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE); 5459 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
5443 if (cell_space_ == NULL) return false; 5460 if (cell_space_ == NULL) return false;
5444 if (!cell_space_->Setup()) return false; 5461 if (!cell_space_->Setup()) return false;
5445 5462
5446 // The large object code space may contain code or data. We set the memory 5463 // The large object code space may contain code or data. We set the memory
5447 // to be non-executable here for safety, but this means we need to enable it 5464 // to be non-executable here for safety, but this means we need to enable it
5448 // explicitly when allocating large code objects. 5465 // explicitly when allocating large code objects.
5449 lo_space_ = new LargeObjectSpace(this, LO_SPACE); 5466 lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
5450 if (lo_space_ == NULL) return false; 5467 if (lo_space_ == NULL) return false;
5451 if (!lo_space_->Setup()) return false; 5468 if (!lo_space_->Setup()) return false;
5452 if (create_heap_objects) { 5469 if (create_heap_objects) {
5453 // Create initial maps. 5470 // Create initial maps.
5454 if (!CreateInitialMaps()) return false; 5471 if (!CreateInitialMaps()) return false;
5455 if (!CreateApiObjects()) return false; 5472 if (!CreateApiObjects()) return false;
5456 5473
5457 // Create initial objects 5474 // Create initial objects
5458 if (!CreateInitialObjects()) return false; 5475 if (!CreateInitialObjects()) return false;
5459 5476
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after
5755 class HeapObjectsFilter { 5772 class HeapObjectsFilter {
5756 public: 5773 public:
5757 virtual ~HeapObjectsFilter() {} 5774 virtual ~HeapObjectsFilter() {}
5758 virtual bool SkipObject(HeapObject* object) = 0; 5775 virtual bool SkipObject(HeapObject* object) = 0;
5759 }; 5776 };
5760 5777
5761 5778
5762 class UnreachableObjectsFilter : public HeapObjectsFilter { 5779 class UnreachableObjectsFilter : public HeapObjectsFilter {
5763 public: 5780 public:
5764 UnreachableObjectsFilter() { 5781 UnreachableObjectsFilter() {
5765 MarkUnreachableObjects(); 5782 MarkReachableObjects();
5783 }
5784
5785 ~UnreachableObjectsFilter() {
5786 Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
5766 } 5787 }
5767 5788
5768 bool SkipObject(HeapObject* object) { 5789 bool SkipObject(HeapObject* object) {
5769 if (IntrusiveMarking::IsMarked(object)) { 5790 MarkBit mark_bit = Marking::MarkBitFrom(object);
5770 IntrusiveMarking::ClearMark(object); 5791 return !mark_bit.Get();
5771 return true;
5772 } else {
5773 return false;
5774 }
5775 } 5792 }
5776 5793
5777 private: 5794 private:
5778 class UnmarkingVisitor : public ObjectVisitor { 5795 class MarkingVisitor : public ObjectVisitor {
5779 public: 5796 public:
5780 UnmarkingVisitor() : list_(10) {} 5797 MarkingVisitor() : marking_stack_(10) {}
5781 5798
5782 void VisitPointers(Object** start, Object** end) { 5799 void VisitPointers(Object** start, Object** end) {
5783 for (Object** p = start; p < end; p++) { 5800 for (Object** p = start; p < end; p++) {
5784 if (!(*p)->IsHeapObject()) continue; 5801 if (!(*p)->IsHeapObject()) continue;
5785 HeapObject* obj = HeapObject::cast(*p); 5802 HeapObject* obj = HeapObject::cast(*p);
5786 if (IntrusiveMarking::IsMarked(obj)) { 5803 MarkBit mark_bit = Marking::MarkBitFrom(obj);
5787 IntrusiveMarking::ClearMark(obj); 5804 if (!mark_bit.Get()) {
5788 list_.Add(obj); 5805 mark_bit.Set();
5806 marking_stack_.Add(obj);
5789 } 5807 }
5790 } 5808 }
5791 } 5809 }
5792 5810
5793 bool can_process() { return !list_.is_empty(); } 5811 void TransitiveClosure() {
5794 5812 while (!marking_stack_.is_empty()) {
5795 void ProcessNext() { 5813 HeapObject* obj = marking_stack_.RemoveLast();
5796 HeapObject* obj = list_.RemoveLast(); 5814 obj->Iterate(this);
5797 obj->Iterate(this); 5815 }
5798 } 5816 }
5799 5817
5800 private: 5818 private:
5801 List<HeapObject*> list_; 5819 List<HeapObject*> marking_stack_;
5802 }; 5820 };
5803 5821
5804 void MarkUnreachableObjects() { 5822 void MarkReachableObjects() {
5805 HeapIterator iterator; 5823 Heap* heap = Isolate::Current()->heap();
5806 for (HeapObject* obj = iterator.next(); 5824 MarkingVisitor visitor;
5807 obj != NULL; 5825 heap->IterateRoots(&visitor, VISIT_ALL);
5808 obj = iterator.next()) { 5826 visitor.TransitiveClosure();
5809 IntrusiveMarking::SetMark(obj);
5810 }
5811 UnmarkingVisitor visitor;
5812 HEAP->IterateRoots(&visitor, VISIT_ALL);
5813 while (visitor.can_process())
5814 visitor.ProcessNext();
5815 } 5827 }
5816 5828
5817 AssertNoAllocation no_alloc; 5829 AssertNoAllocation no_alloc;
5818 }; 5830 };
5819 5831
5820 5832
5821 HeapIterator::HeapIterator() 5833 HeapIterator::HeapIterator()
5822 : filtering_(HeapIterator::kNoFiltering), 5834 : filtering_(HeapIterator::kNoFiltering),
5823 filter_(NULL) { 5835 filter_(NULL) {
5824 Init(); 5836 Init();
5825 } 5837 }
5826 5838
5827 5839
5828 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering) 5840 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
5829 : filtering_(filtering), 5841 : filtering_(filtering),
5830 filter_(NULL) { 5842 filter_(NULL) {
5831 Init(); 5843 Init();
5832 } 5844 }
5833 5845
5834 5846
5835 HeapIterator::~HeapIterator() { 5847 HeapIterator::~HeapIterator() {
5836 Shutdown(); 5848 Shutdown();
5837 } 5849 }
5838 5850
5839 5851
5840 void HeapIterator::Init() { 5852 void HeapIterator::Init() {
5841 // Start the iteration. 5853 // Start the iteration.
5842 space_iterator_ = filtering_ == kNoFiltering ? new SpaceIterator : 5854 space_iterator_ = new SpaceIterator;
5843 new SpaceIterator(Isolate::Current()->heap()->
5844 GcSafeSizeOfOldObjectFunction());
5845 switch (filtering_) { 5855 switch (filtering_) {
5846 case kFilterFreeListNodes:
5847 // TODO(gc): Not handled.
5848 break;
5849 case kFilterUnreachable: 5856 case kFilterUnreachable:
5850 filter_ = new UnreachableObjectsFilter; 5857 filter_ = new UnreachableObjectsFilter;
5851 break; 5858 break;
5852 default: 5859 default:
5853 break; 5860 break;
5854 } 5861 }
5855 object_iterator_ = space_iterator_->next(); 5862 object_iterator_ = space_iterator_->next();
5856 } 5863 }
5857 5864
5858 5865
(...skipping 484 matching lines...) Expand 10 before | Expand all | Expand 10 after
6343 } 6350 }
6344 } 6351 }
6345 new_space_strings_.Rewind(last); 6352 new_space_strings_.Rewind(last);
6346 last = 0; 6353 last = 0;
6347 for (int i = 0; i < old_space_strings_.length(); ++i) { 6354 for (int i = 0; i < old_space_strings_.length(); ++i) {
6348 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue; 6355 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
6349 ASSERT(!heap_->InNewSpace(old_space_strings_[i])); 6356 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
6350 old_space_strings_[last++] = old_space_strings_[i]; 6357 old_space_strings_[last++] = old_space_strings_[i];
6351 } 6358 }
6352 old_space_strings_.Rewind(last); 6359 old_space_strings_.Rewind(last);
6353 Verify(); 6360 if (FLAG_verify_heap) {
6361 Verify();
6362 }
6354 } 6363 }
6355 6364
6356 6365
6357 void ExternalStringTable::TearDown() { 6366 void ExternalStringTable::TearDown() {
6358 new_space_strings_.Free(); 6367 new_space_strings_.Free();
6359 old_space_strings_.Free(); 6368 old_space_strings_.Free();
6360 } 6369 }
6361 6370
6362 6371
6363 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { 6372 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
6402 isolate_->heap()->store_buffer()->Compact(); 6411 isolate_->heap()->store_buffer()->Compact();
6403 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); 6412 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
6404 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { 6413 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6405 next = chunk->next_chunk(); 6414 next = chunk->next_chunk();
6406 isolate_->memory_allocator()->Free(chunk); 6415 isolate_->memory_allocator()->Free(chunk);
6407 } 6416 }
6408 chunks_queued_for_free_ = NULL; 6417 chunks_queued_for_free_ = NULL;
6409 } 6418 }
6410 6419
6411 } } // namespace v8::internal 6420 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698