OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3677 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3688 MaybeObject* Heap::CopyJSObject(JSObject* source) { | 3688 MaybeObject* Heap::CopyJSObject(JSObject* source) { |
3689 // Never used to copy functions. If functions need to be copied we | 3689 // Never used to copy functions. If functions need to be copied we |
3690 // have to be careful to clear the literals array. | 3690 // have to be careful to clear the literals array. |
3691 SLOW_ASSERT(!source->IsJSFunction()); | 3691 SLOW_ASSERT(!source->IsJSFunction()); |
3692 | 3692 |
3693 // Make the clone. | 3693 // Make the clone. |
3694 Map* map = source->map(); | 3694 Map* map = source->map(); |
3695 int object_size = map->instance_size(); | 3695 int object_size = map->instance_size(); |
3696 Object* clone; | 3696 Object* clone; |
3697 | 3697 |
| 3698 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| 3699 |
3698 // If we're forced to always allocate, we use the general allocation | 3700 // If we're forced to always allocate, we use the general allocation |
3699 // functions which may leave us with an object in old space. | 3701 // functions which may leave us with an object in old space. |
3700 if (always_allocate()) { | 3702 if (always_allocate()) { |
3701 { MaybeObject* maybe_clone = | 3703 { MaybeObject* maybe_clone = |
3702 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); | 3704 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); |
3703 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 3705 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
3704 } | 3706 } |
3705 Address clone_address = HeapObject::cast(clone)->address(); | 3707 Address clone_address = HeapObject::cast(clone)->address(); |
3706 CopyBlock(clone_address, | 3708 CopyBlock(clone_address, |
3707 source->address(), | 3709 source->address(), |
3708 object_size); | 3710 object_size); |
3709 // Update write barrier for all fields that lie beyond the header. | 3711 // Update write barrier for all fields that lie beyond the header. |
3710 RecordWrites(clone_address, | 3712 RecordWrites(clone_address, |
3711 JSObject::kHeaderSize, | 3713 JSObject::kHeaderSize, |
3712 (object_size - JSObject::kHeaderSize) / kPointerSize); | 3714 (object_size - JSObject::kHeaderSize) / kPointerSize); |
3713 } else { | 3715 } else { |
| 3716 wb_mode = SKIP_WRITE_BARRIER; |
3714 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); | 3717 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); |
3715 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 3718 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
3716 } | 3719 } |
3717 SLOW_ASSERT(InNewSpace(clone)); | 3720 SLOW_ASSERT(InNewSpace(clone)); |
3718 // Since we know the clone is allocated in new space, we can copy | 3721 // Since we know the clone is allocated in new space, we can copy |
3719 // the contents without worrying about updating the write barrier. | 3722 // the contents without worrying about updating the write barrier. |
3720 CopyBlock(HeapObject::cast(clone)->address(), | 3723 CopyBlock(HeapObject::cast(clone)->address(), |
3721 source->address(), | 3724 source->address(), |
3722 object_size); | 3725 object_size); |
3723 } | 3726 } |
3724 | 3727 |
3725 SLOW_ASSERT( | 3728 SLOW_ASSERT( |
3726 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 3729 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
3727 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 3730 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
3728 FixedArray* properties = FixedArray::cast(source->properties()); | 3731 FixedArray* properties = FixedArray::cast(source->properties()); |
3729 // Update elements if necessary. | 3732 // Update elements if necessary. |
3730 if (elements->length() > 0) { | 3733 if (elements->length() > 0) { |
3731 Object* elem; | 3734 Object* elem; |
3732 { MaybeObject* maybe_elem; | 3735 { MaybeObject* maybe_elem; |
3733 if (elements->map() == fixed_cow_array_map()) { | 3736 if (elements->map() == fixed_cow_array_map()) { |
3734 maybe_elem = FixedArray::cast(elements); | 3737 maybe_elem = FixedArray::cast(elements); |
3735 } else if (source->HasFastDoubleElements()) { | 3738 } else if (source->HasFastDoubleElements()) { |
3736 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); | 3739 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); |
3737 } else { | 3740 } else { |
3738 maybe_elem = CopyFixedArray(FixedArray::cast(elements)); | 3741 maybe_elem = CopyFixedArray(FixedArray::cast(elements)); |
3739 } | 3742 } |
3740 if (!maybe_elem->ToObject(&elem)) return maybe_elem; | 3743 if (!maybe_elem->ToObject(&elem)) return maybe_elem; |
3741 } | 3744 } |
3742 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem)); | 3745 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); |
3743 } | 3746 } |
3744 // Update properties if necessary. | 3747 // Update properties if necessary. |
3745 if (properties->length() > 0) { | 3748 if (properties->length() > 0) { |
3746 Object* prop; | 3749 Object* prop; |
3747 { MaybeObject* maybe_prop = CopyFixedArray(properties); | 3750 { MaybeObject* maybe_prop = CopyFixedArray(properties); |
3748 if (!maybe_prop->ToObject(&prop)) return maybe_prop; | 3751 if (!maybe_prop->ToObject(&prop)) return maybe_prop; |
3749 } | 3752 } |
3750 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); | 3753 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); |
3751 } | 3754 } |
3752 // Return the new clone. | 3755 // Return the new clone. |
3753 return clone; | 3756 return clone; |
3754 } | 3757 } |
3755 | 3758 |
3756 | 3759 |
3757 MaybeObject* Heap::ReinitializeJSReceiver( | 3760 MaybeObject* Heap::ReinitializeJSReceiver( |
3758 JSReceiver* object, InstanceType type, int size) { | 3761 JSReceiver* object, InstanceType type, int size) { |
3759 ASSERT(type >= FIRST_JS_OBJECT_TYPE); | 3762 ASSERT(type >= FIRST_JS_OBJECT_TYPE); |
3760 | 3763 |
(...skipping 2642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6403 isolate_->heap()->store_buffer()->Compact(); | 6406 isolate_->heap()->store_buffer()->Compact(); |
6404 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6407 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6405 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6408 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6406 next = chunk->next_chunk(); | 6409 next = chunk->next_chunk(); |
6407 isolate_->memory_allocator()->Free(chunk); | 6410 isolate_->memory_allocator()->Free(chunk); |
6408 } | 6411 } |
6409 chunks_queued_for_free_ = NULL; | 6412 chunks_queued_for_free_ = NULL; |
6410 } | 6413 } |
6411 | 6414 |
6412 } } // namespace v8::internal | 6415 } } // namespace v8::internal |
OLD | NEW |