OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2081 if (alignment != kObjectAlignment) { | 2081 if (alignment != kObjectAlignment) { |
2082 ASSERT(alignment == kDoubleAlignment); | 2082 ASSERT(alignment == kDoubleAlignment); |
2083 allocation_size += kPointerSize; | 2083 allocation_size += kPointerSize; |
2084 } | 2084 } |
2085 | 2085 |
2086 Heap* heap = map->GetHeap(); | 2086 Heap* heap = map->GetHeap(); |
2087 if (heap->ShouldBePromoted(object->address(), object_size)) { | 2087 if (heap->ShouldBePromoted(object->address(), object_size)) { |
2088 MaybeObject* maybe_result; | 2088 MaybeObject* maybe_result; |
2089 | 2089 |
2090 if (object_contents == DATA_OBJECT) { | 2090 if (object_contents == DATA_OBJECT) { |
| 2091 // TODO(mstarzinger): Turn this check into a regular assert soon! |
| 2092 CHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); |
2091 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); | 2093 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); |
2092 } else { | 2094 } else { |
2093 maybe_result = | 2095 // TODO(mstarzinger): Turn this check into a regular assert soon! |
2094 heap->old_pointer_space()->AllocateRaw(allocation_size); | 2096 CHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); |
| 2097 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); |
2095 } | 2098 } |
2096 | 2099 |
2097 Object* result = NULL; // Initialization to please compiler. | 2100 Object* result = NULL; // Initialization to please compiler. |
2098 if (maybe_result->ToObject(&result)) { | 2101 if (maybe_result->ToObject(&result)) { |
2099 HeapObject* target = HeapObject::cast(result); | 2102 HeapObject* target = HeapObject::cast(result); |
2100 | 2103 |
2101 if (alignment != kObjectAlignment) { | 2104 if (alignment != kObjectAlignment) { |
2102 target = EnsureDoubleAligned(heap, target, allocation_size); | 2105 target = EnsureDoubleAligned(heap, target, allocation_size); |
2103 } | 2106 } |
2104 | 2107 |
2105 // Order is important: slot might be inside of the target if target | 2108 // Order is important: slot might be inside of the target if target |
2106 // was allocated over a dead object and slot comes from the store | 2109 // was allocated over a dead object and slot comes from the store |
2107 // buffer. | 2110 // buffer. |
2108 *slot = target; | 2111 *slot = target; |
2109 MigrateObject(heap, object, target, object_size); | 2112 MigrateObject(heap, object, target, object_size); |
2110 | 2113 |
2111 if (object_contents == POINTER_OBJECT) { | 2114 if (object_contents == POINTER_OBJECT) { |
2112 if (map->instance_type() == JS_FUNCTION_TYPE) { | 2115 if (map->instance_type() == JS_FUNCTION_TYPE) { |
2113 heap->promotion_queue()->insert( | 2116 heap->promotion_queue()->insert( |
2114 target, JSFunction::kNonWeakFieldsEndOffset); | 2117 target, JSFunction::kNonWeakFieldsEndOffset); |
2115 } else { | 2118 } else { |
2116 heap->promotion_queue()->insert(target, object_size); | 2119 heap->promotion_queue()->insert(target, object_size); |
2117 } | 2120 } |
2118 } | 2121 } |
2119 | 2122 |
2120 heap->tracer()->increment_promoted_objects_size(object_size); | 2123 heap->tracer()->increment_promoted_objects_size(object_size); |
2121 return; | 2124 return; |
2122 } | 2125 } |
2123 } | 2126 } |
| 2127 // TODO(mstarzinger): Turn this check into a regular assert soon! |
| 2128 CHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); |
2124 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); | 2129 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); |
2125 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); | 2130 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); |
2126 Object* result = allocation->ToObjectUnchecked(); | 2131 Object* result = allocation->ToObjectUnchecked(); |
2127 HeapObject* target = HeapObject::cast(result); | 2132 HeapObject* target = HeapObject::cast(result); |
2128 | 2133 |
2129 if (alignment != kObjectAlignment) { | 2134 if (alignment != kObjectAlignment) { |
2130 target = EnsureDoubleAligned(heap, target, allocation_size); | 2135 target = EnsureDoubleAligned(heap, target, allocation_size); |
2131 } | 2136 } |
2132 | 2137 |
2133 // Order is important: slot might be inside of the target if target | 2138 // Order is important: slot might be inside of the target if target |
(...skipping 5911 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8045 if (FLAG_parallel_recompilation) { | 8050 if (FLAG_parallel_recompilation) { |
8046 heap_->relocation_mutex_->Lock(); | 8051 heap_->relocation_mutex_->Lock(); |
8047 #ifdef DEBUG | 8052 #ifdef DEBUG |
8048 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8053 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8049 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8054 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8050 #endif // DEBUG | 8055 #endif // DEBUG |
8051 } | 8056 } |
8052 } | 8057 } |
8053 | 8058 |
8054 } } // namespace v8::internal | 8059 } } // namespace v8::internal |
OLD | NEW |