OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
4304 int size = map->instance_size() + AllocationMemento::kSize; | 4304 int size = map->instance_size() + AllocationMemento::kSize; |
4305 Object* result; | 4305 Object* result; |
4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
4307 if (!maybe_result->ToObject(&result)) return maybe_result; | 4307 if (!maybe_result->ToObject(&result)) return maybe_result; |
4308 // No need for write barrier since object is white and map is in old space. | 4308 // No need for write barrier since object is white and map is in old space. |
4309 HeapObject::cast(result)->set_map_no_write_barrier(map); | 4309 HeapObject::cast(result)->set_map_no_write_barrier(map); |
4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
4311 reinterpret_cast<Address>(result) + map->instance_size()); | 4311 reinterpret_cast<Address>(result) + map->instance_size()); |
4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
4313 | |
4314 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
4315 CHECK(allocation_site->map() == allocation_site_map()); | |
4316 | |
4317 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); | 4313 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); |
4318 return result; | 4314 return result; |
4319 } | 4315 } |
4320 | 4316 |
4321 | 4317 |
4322 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | 4318 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
4323 ASSERT(gc_state_ == NOT_IN_GC); | 4319 ASSERT(gc_state_ == NOT_IN_GC); |
4324 ASSERT(map->instance_type() != MAP_TYPE); | 4320 ASSERT(map->instance_type() != MAP_TYPE); |
4325 // If allocation failures are disallowed, we may allocate in a different | 4321 // If allocation failures are disallowed, we may allocate in a different |
4326 // space when new space is full and the object is not a large object. | 4322 // space when new space is full and the object is not a large object. |
(...skipping 723 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5050 } | 5046 } |
5051 | 5047 |
5052 // Track allocation site information, if we failed to allocate it inline. | 5048 // Track allocation site information, if we failed to allocate it inline. |
5053 if (InNewSpace(clone) && | 5049 if (InNewSpace(clone) && |
5054 adjusted_object_size == object_size) { | 5050 adjusted_object_size == object_size) { |
5055 MaybeObject* maybe_alloc_memento = | 5051 MaybeObject* maybe_alloc_memento = |
5056 AllocateStruct(ALLOCATION_MEMENTO_TYPE); | 5052 AllocateStruct(ALLOCATION_MEMENTO_TYPE); |
5057 AllocationMemento* alloc_memento; | 5053 AllocationMemento* alloc_memento; |
5058 if (maybe_alloc_memento->To(&alloc_memento)) { | 5054 if (maybe_alloc_memento->To(&alloc_memento)) { |
5059 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5055 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
5060 | |
5061 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
5062 CHECK(site->map() == allocation_site_map()); | |
5063 | |
5064 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5056 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5065 } | 5057 } |
5066 } | 5058 } |
5067 } else { | 5059 } else { |
5068 wb_mode = SKIP_WRITE_BARRIER; | 5060 wb_mode = SKIP_WRITE_BARRIER; |
5069 adjusted_object_size += AllocationMemento::kSize; | 5061 adjusted_object_size += AllocationMemento::kSize; |
5070 | 5062 |
5071 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 5063 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
5072 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 5064 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
5073 } | 5065 } |
5074 SLOW_ASSERT(InNewSpace(clone)); | 5066 SLOW_ASSERT(InNewSpace(clone)); |
5075 // Since we know the clone is allocated in new space, we can copy | 5067 // Since we know the clone is allocated in new space, we can copy |
5076 // the contents without worrying about updating the write barrier. | 5068 // the contents without worrying about updating the write barrier. |
5077 CopyBlock(HeapObject::cast(clone)->address(), | 5069 CopyBlock(HeapObject::cast(clone)->address(), |
5078 source->address(), | 5070 source->address(), |
5079 object_size); | 5071 object_size); |
5080 } | 5072 } |
5081 | 5073 |
5082 if (adjusted_object_size > object_size) { | 5074 if (adjusted_object_size > object_size) { |
5083 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 5075 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
5084 reinterpret_cast<Address>(clone) + object_size); | 5076 reinterpret_cast<Address>(clone) + object_size); |
5085 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5077 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
5086 | |
5087 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
5088 CHECK(site->map() == allocation_site_map()); | |
5089 | |
5090 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5078 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5091 } | 5079 } |
5092 | 5080 |
5093 SLOW_ASSERT( | 5081 SLOW_ASSERT( |
5094 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 5082 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
5095 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 5083 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
5096 FixedArray* properties = FixedArray::cast(source->properties()); | 5084 FixedArray* properties = FixedArray::cast(source->properties()); |
5097 // Update elements if necessary. | 5085 // Update elements if necessary. |
5098 if (elements->length() > 0) { | 5086 if (elements->length() > 0) { |
5099 Object* elem; | 5087 Object* elem; |
(...skipping 3013 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8113 if (FLAG_concurrent_recompilation) { | 8101 if (FLAG_concurrent_recompilation) { |
8114 heap_->relocation_mutex_->Lock(); | 8102 heap_->relocation_mutex_->Lock(); |
8115 #ifdef DEBUG | 8103 #ifdef DEBUG |
8116 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8104 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8117 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8105 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8118 #endif // DEBUG | 8106 #endif // DEBUG |
8119 } | 8107 } |
8120 } | 8108 } |
8121 | 8109 |
8122 } } // namespace v8::internal | 8110 } } // namespace v8::internal |
OLD | NEW |