OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 4303 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
4304 int size = map->instance_size() + AllocationMemento::kSize; | 4304 int size = map->instance_size() + AllocationMemento::kSize; |
4305 Object* result; | 4305 Object* result; |
4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4306 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
4307 if (!maybe_result->ToObject(&result)) return maybe_result; | 4307 if (!maybe_result->ToObject(&result)) return maybe_result; |
4308 // No need for write barrier since object is white and map is in old space. | 4308 // No need for write barrier since object is white and map is in old space. |
4309 HeapObject::cast(result)->set_map_no_write_barrier(map); | 4309 HeapObject::cast(result)->set_map_no_write_barrier(map); |
4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 4310 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
4311 reinterpret_cast<Address>(result) + map->instance_size()); | 4311 reinterpret_cast<Address>(result) + map->instance_size()); |
4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 4312 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| 4313 |
| 4314 // TODO(mvstanton): To diagnose bug 284577, some extra checks |
| 4315 CHECK(allocation_site->map() == allocation_site_map()); |
| 4316 |
4313 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); | 4317 alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER); |
4314 return result; | 4318 return result; |
4315 } | 4319 } |
4316 | 4320 |
4317 | 4321 |
4318 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { | 4322 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
4319 ASSERT(gc_state_ == NOT_IN_GC); | 4323 ASSERT(gc_state_ == NOT_IN_GC); |
4320 ASSERT(map->instance_type() != MAP_TYPE); | 4324 ASSERT(map->instance_type() != MAP_TYPE); |
4321 // If allocation failures are disallowed, we may allocate in a different | 4325 // If allocation failures are disallowed, we may allocate in a different |
4322 // space when new space is full and the object is not a large object. | 4326 // space when new space is full and the object is not a large object. |
(...skipping 723 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5046 } | 5050 } |
5047 | 5051 |
5048 // Track allocation site information, if we failed to allocate it inline. | 5052 // Track allocation site information, if we failed to allocate it inline. |
5049 if (InNewSpace(clone) && | 5053 if (InNewSpace(clone) && |
5050 adjusted_object_size == object_size) { | 5054 adjusted_object_size == object_size) { |
5051 MaybeObject* maybe_alloc_memento = | 5055 MaybeObject* maybe_alloc_memento = |
5052 AllocateStruct(ALLOCATION_MEMENTO_TYPE); | 5056 AllocateStruct(ALLOCATION_MEMENTO_TYPE); |
5053 AllocationMemento* alloc_memento; | 5057 AllocationMemento* alloc_memento; |
5054 if (maybe_alloc_memento->To(&alloc_memento)) { | 5058 if (maybe_alloc_memento->To(&alloc_memento)) { |
5055 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5059 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| 5060 |
| 5061 // TODO(mvstanton): To diagnose bug 284577, some extra checks |
| 5062 CHECK(site->map() == allocation_site_map()); |
| 5063 |
5056 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5064 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5057 } | 5065 } |
5058 } | 5066 } |
5059 } else { | 5067 } else { |
5060 wb_mode = SKIP_WRITE_BARRIER; | 5068 wb_mode = SKIP_WRITE_BARRIER; |
5061 adjusted_object_size += AllocationMemento::kSize; | 5069 adjusted_object_size += AllocationMemento::kSize; |
5062 | 5070 |
5063 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | 5071 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
5064 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 5072 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
5065 } | 5073 } |
5066 SLOW_ASSERT(InNewSpace(clone)); | 5074 SLOW_ASSERT(InNewSpace(clone)); |
5067 // Since we know the clone is allocated in new space, we can copy | 5075 // Since we know the clone is allocated in new space, we can copy |
5068 // the contents without worrying about updating the write barrier. | 5076 // the contents without worrying about updating the write barrier. |
5069 CopyBlock(HeapObject::cast(clone)->address(), | 5077 CopyBlock(HeapObject::cast(clone)->address(), |
5070 source->address(), | 5078 source->address(), |
5071 object_size); | 5079 object_size); |
5072 } | 5080 } |
5073 | 5081 |
5074 if (adjusted_object_size > object_size) { | 5082 if (adjusted_object_size > object_size) { |
5075 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 5083 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
5076 reinterpret_cast<Address>(clone) + object_size); | 5084 reinterpret_cast<Address>(clone) + object_size); |
5077 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | 5085 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| 5086 |
| 5087 // TODO(mvstanton): To diagnose bug 284577, some extra checks |
| 5088 CHECK(site->map() == allocation_site_map()); |
| 5089 |
5078 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | 5090 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
5079 } | 5091 } |
5080 | 5092 |
5081 SLOW_ASSERT( | 5093 SLOW_ASSERT( |
5082 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 5094 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
5083 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 5095 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
5084 FixedArray* properties = FixedArray::cast(source->properties()); | 5096 FixedArray* properties = FixedArray::cast(source->properties()); |
5085 // Update elements if necessary. | 5097 // Update elements if necessary. |
5086 if (elements->length() > 0) { | 5098 if (elements->length() > 0) { |
5087 Object* elem; | 5099 Object* elem; |
(...skipping 3013 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8101 if (FLAG_concurrent_recompilation) { | 8113 if (FLAG_concurrent_recompilation) { |
8102 heap_->relocation_mutex_->Lock(); | 8114 heap_->relocation_mutex_->Lock(); |
8103 #ifdef DEBUG | 8115 #ifdef DEBUG |
8104 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8116 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8105 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8117 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8106 #endif // DEBUG | 8118 #endif // DEBUG |
8107 } | 8119 } |
8108 } | 8120 } |
8109 | 8121 |
8110 } } // namespace v8::internal | 8122 } } // namespace v8::internal |
OLD | NEW |