OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
4304 int size = map->instance_size() + AllocationMemento::kSize; | 4304 int size = map->instance_size() + AllocationMemento::kSize; |
4305 Object* result; | 4305 Object* result; |
4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
4307 if (!maybe_result->ToObject(&result)) return maybe_result; | 4307 if (!maybe_result->ToObject(&result)) return maybe_result; |
4308 // No need for write barrier since object is white and map is in old space. | 4308 // No need for write barrier since object is white and map is in old space. |
4309 HeapObject::cast(result)->set_map_no_write_barrier(map); | 4309 HeapObject::cast(result)->set_map_no_write_barrier(map); |
4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
4311 reinterpret_cast<Address>(result) + map->instance_size()); | 4311 reinterpret_cast<Address>(result) + map->instance_size()); |
4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
4313 | 4313 ASSERT(allocation_site->map() == allocation_site_map()); |
4314 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
4315 CHECK(allocation_site->map() == allocation_site_map()); | |
4316 | |
4317 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); | 4314 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); |
4318 return result; | 4315 return result; |
4319 } | 4316 } |
4320 | 4317 |
4321 | 4318 |
4322 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | 4319 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
4323 ASSERT(gc_state_ == NOT_IN_GC); | 4320 ASSERT(gc_state_ == NOT_IN_GC); |
4324 ASSERT(map->instance_type() != MAP_TYPE); | 4321 ASSERT(map->instance_type() != MAP_TYPE); |
4325 // If allocation failures are disallowed, we may allocate in a different | 4322 // If allocation failures are disallowed, we may allocate in a different |
4326 // space when new space is full and the object is not a large object. | 4323 // space when new space is full and the object is not a large object. |
(...skipping 723 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5050 } | 5047 } |
5051 | 5048 |
5052 // Track allocation site information, if we failed to allocate it inline. | 5049 // Track allocation site information, if we failed to allocate it inline. |
5053 if (InNewSpace(clone) && | 5050 if (InNewSpace(clone) && |
5054 adjusted_object_size == object_size) { | 5051 adjusted_object_size == object_size) { |
5055 MaybeObject* maybe_alloc_memento = | 5052 MaybeObject* maybe_alloc_memento = |
5056 AllocateStruct(ALLOCATION_MEMENTO_TYPE); | 5053 AllocateStruct(ALLOCATION_MEMENTO_TYPE); |
5057 AllocationMemento* alloc_memento; | 5054 AllocationMemento* alloc_memento; |
5058 if (maybe_alloc_memento->To(&alloc_memento)) { | 5055 if (maybe_alloc_memento->To(&alloc_memento)) { |
5059 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5056 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
5060 | 5057 ASSERT(site->map() == allocation_site_map()); |
5061 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
5062 CHECK(site->map() == allocation_site_map()); | |
5063 | |
5064 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5058 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5065 } | 5059 } |
5066 } | 5060 } |
5067 } else { | 5061 } else { |
5068 wb_mode = SKIP_WRITE_BARRIER; | 5062 wb_mode = SKIP_WRITE_BARRIER; |
5069 adjusted_object_size += AllocationMemento::kSize; | 5063 adjusted_object_size += AllocationMemento::kSize; |
5070 | 5064 |
5071 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 5065 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
5072 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 5066 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
5073 } | 5067 } |
5074 SLOW_ASSERT(InNewSpace(clone)); | 5068 SLOW_ASSERT(InNewSpace(clone)); |
5075 // Since we know the clone is allocated in new space, we can copy | 5069 // Since we know the clone is allocated in new space, we can copy |
5076 // the contents without worrying about updating the write barrier. | 5070 // the contents without worrying about updating the write barrier. |
5077 CopyBlock(HeapObject::cast(clone)->address(), | 5071 CopyBlock(HeapObject::cast(clone)->address(), |
5078 source->address(), | 5072 source->address(), |
5079 object_size); | 5073 object_size); |
5080 } | 5074 } |
5081 | 5075 |
5082 if (adjusted_object_size > object_size) { | 5076 if (adjusted_object_size > object_size) { |
5083 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 5077 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
5084 reinterpret_cast<Address>(clone) + object_size); | 5078 reinterpret_cast<Address>(clone) + object_size); |
5085 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5079 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
5086 | 5080 ASSERT(site->map() == allocation_site_map()); |
5087 // TODO(mvstanton): To diagnose bug 284577, some extra checks | |
5088 CHECK(site->map() == allocation_site_map()); | |
5089 | |
5090 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5081 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5091 } | 5082 } |
5092 | 5083 |
5093 SLOW_ASSERT( | 5084 SLOW_ASSERT( |
5094 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 5085 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
5095 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 5086 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
5096 FixedArray* properties = FixedArray::cast(source->properties()); | 5087 FixedArray* properties = FixedArray::cast(source->properties()); |
5097 // Update elements if necessary. | 5088 // Update elements if necessary. |
5098 if (elements->length() > 0) { | 5089 if (elements->length() > 0) { |
5099 Object* elem; | 5090 Object* elem; |
(...skipping 3013 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8113 if (FLAG_concurrent_recompilation) { | 8104 if (FLAG_concurrent_recompilation) { |
8114 heap_->relocation_mutex_->Lock(); | 8105 heap_->relocation_mutex_->Lock(); |
8115 #ifdef DEBUG | 8106 #ifdef DEBUG |
8116 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8107 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8117 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8108 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8118 #endif // DEBUG | 8109 #endif // DEBUG |
8119 } | 8110 } |
8120 } | 8111 } |
8121 | 8112 |
8122 } } // namespace v8::internal | 8113 } } // namespace v8::internal |
OLD | NEW |