OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "code-stubs.h" | 7 #include "code-stubs.h" |
8 #include "compilation-cache.h" | 8 #include "compilation-cache.h" |
9 #include "cpu-profiler.h" | 9 #include "cpu-profiler.h" |
10 #include "deoptimizer.h" | 10 #include "deoptimizer.h" |
11 #include "execution.h" | 11 #include "execution.h" |
12 #include "gdb-jit.h" | 12 #include "gdb-jit.h" |
13 #include "global-handles.h" | 13 #include "global-handles.h" |
14 #include "heap-profiler.h" | 14 #include "heap-profiler.h" |
15 #include "ic-inl.h" | 15 #include "ic-inl.h" |
16 #include "incremental-marking.h" | 16 #include "incremental-marking.h" |
17 #include "mark-compact.h" | 17 #include "mark-compact.h" |
18 #include "objects-visiting.h" | 18 #include "objects-visiting.h" |
19 #include "objects-visiting-inl.h" | 19 #include "objects-visiting-inl.h" |
| 20 #include "spaces-inl.h" |
20 #include "stub-cache.h" | 21 #include "stub-cache.h" |
21 #include "sweeper-thread.h" | 22 #include "sweeper-thread.h" |
22 | 23 |
23 namespace v8 { | 24 namespace v8 { |
24 namespace internal { | 25 namespace internal { |
25 | 26 |
26 | 27 |
27 const char* Marking::kWhiteBitPattern = "00"; | 28 const char* Marking::kWhiteBitPattern = "00"; |
28 const char* Marking::kBlackBitPattern = "10"; | 29 const char* Marking::kBlackBitPattern = "10"; |
29 const char* Marking::kGreyBitPattern = "11"; | 30 const char* Marking::kGreyBitPattern = "11"; |
(...skipping 2022 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2052 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); | 2053 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); |
2053 | 2054 |
2054 offset++; | 2055 offset++; |
2055 current_cell >>= 1; | 2056 current_cell >>= 1; |
2056 // Aggressively promote young survivors to the old space. | 2057 // Aggressively promote young survivors to the old space. |
2057 if (TryPromoteObject(object, size)) { | 2058 if (TryPromoteObject(object, size)) { |
2058 continue; | 2059 continue; |
2059 } | 2060 } |
2060 | 2061 |
2061 // Promotion failed. Just migrate object to another semispace. | 2062 // Promotion failed. Just migrate object to another semispace. |
2062 MaybeObject* allocation = new_space->AllocateRaw(size); | 2063 AllocationResult allocation = new_space->AllocateRaw(size); |
2063 if (allocation->IsFailure()) { | 2064 if (allocation.IsRetry()) { |
2064 if (!new_space->AddFreshPage()) { | 2065 if (!new_space->AddFreshPage()) { |
2065 // Shouldn't happen. We are sweeping linearly, and to-space | 2066 // Shouldn't happen. We are sweeping linearly, and to-space |
2066 // has the same number of pages as from-space, so there is | 2067 // has the same number of pages as from-space, so there is |
2067 // always room. | 2068 // always room. |
2068 UNREACHABLE(); | 2069 UNREACHABLE(); |
2069 } | 2070 } |
2070 allocation = new_space->AllocateRaw(size); | 2071 allocation = new_space->AllocateRaw(size); |
2071 ASSERT(!allocation->IsFailure()); | 2072 ASSERT(!allocation.IsRetry()); |
2072 } | 2073 } |
2073 Object* target = allocation->ToObjectUnchecked(); | 2074 Object* target = allocation.ToObjectChecked(); |
2074 | 2075 |
2075 MigrateObject(HeapObject::cast(target), | 2076 MigrateObject(HeapObject::cast(target), |
2076 object, | 2077 object, |
2077 size, | 2078 size, |
2078 NEW_SPACE); | 2079 NEW_SPACE); |
2079 } | 2080 } |
2080 *cells = 0; | 2081 *cells = 0; |
2081 } | 2082 } |
2082 return survivors_size; | 2083 return survivors_size; |
2083 } | 2084 } |
(...skipping 983 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3067 | 3068 |
3068 | 3069 |
3069 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, | 3070 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, |
3070 int object_size) { | 3071 int object_size) { |
3071 ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); | 3072 ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); |
3072 | 3073 |
3073 OldSpace* target_space = heap()->TargetSpace(object); | 3074 OldSpace* target_space = heap()->TargetSpace(object); |
3074 | 3075 |
3075 ASSERT(target_space == heap()->old_pointer_space() || | 3076 ASSERT(target_space == heap()->old_pointer_space() || |
3076 target_space == heap()->old_data_space()); | 3077 target_space == heap()->old_data_space()); |
3077 Object* result; | 3078 HeapObject* target; |
3078 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); | 3079 AllocationResult allocation = target_space->AllocateRaw(object_size); |
3079 if (maybe_result->ToObject(&result)) { | 3080 if (allocation.To(&target)) { |
3080 HeapObject* target = HeapObject::cast(result); | |
3081 MigrateObject(target, | 3081 MigrateObject(target, |
3082 object, | 3082 object, |
3083 object_size, | 3083 object_size, |
3084 target_space->identity()); | 3084 target_space->identity()); |
3085 heap()->mark_compact_collector()->tracer()-> | 3085 heap()->mark_compact_collector()->tracer()-> |
3086 increment_promoted_objects_size(object_size); | 3086 increment_promoted_objects_size(object_size); |
3087 return true; | 3087 return true; |
3088 } | 3088 } |
3089 | 3089 |
3090 return false; | 3090 return false; |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3141 if (*cell == 0) continue; | 3141 if (*cell == 0) continue; |
3142 | 3142 |
3143 int live_objects = MarkWordToObjectStarts(*cell, offsets); | 3143 int live_objects = MarkWordToObjectStarts(*cell, offsets); |
3144 for (int i = 0; i < live_objects; i++) { | 3144 for (int i = 0; i < live_objects; i++) { |
3145 Address object_addr = cell_base + offsets[i] * kPointerSize; | 3145 Address object_addr = cell_base + offsets[i] * kPointerSize; |
3146 HeapObject* object = HeapObject::FromAddress(object_addr); | 3146 HeapObject* object = HeapObject::FromAddress(object_addr); |
3147 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3147 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3148 | 3148 |
3149 int size = object->Size(); | 3149 int size = object->Size(); |
3150 | 3150 |
3151 MaybeObject* target = space->AllocateRaw(size); | 3151 HeapObject* target_object; |
3152 if (target->IsFailure()) { | 3152 AllocationResult allocation = space->AllocateRaw(size); |
| 3153 if (!allocation.To(&target_object)) { |
3153 // OS refused to give us memory. | 3154 // OS refused to give us memory. |
3154 V8::FatalProcessOutOfMemory("Evacuation"); | 3155 V8::FatalProcessOutOfMemory("Evacuation"); |
3155 return; | 3156 return; |
3156 } | 3157 } |
3157 | 3158 |
3158 Object* target_object = target->ToObjectUnchecked(); | 3159 MigrateObject(target_object, object, size, space->identity()); |
3159 | |
3160 MigrateObject(HeapObject::cast(target_object), | |
3161 object, | |
3162 size, | |
3163 space->identity()); | |
3164 ASSERT(object->map_word().IsForwardingAddress()); | 3160 ASSERT(object->map_word().IsForwardingAddress()); |
3165 } | 3161 } |
3166 | 3162 |
3167 // Clear marking bits for current cell. | 3163 // Clear marking bits for current cell. |
3168 *cell = 0; | 3164 *cell = 0; |
3169 } | 3165 } |
3170 p->ResetLiveBytes(); | 3166 p->ResetLiveBytes(); |
3171 } | 3167 } |
3172 | 3168 |
3173 | 3169 |
(...skipping 1364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4538 while (buffer != NULL) { | 4534 while (buffer != NULL) { |
4539 SlotsBuffer* next_buffer = buffer->next(); | 4535 SlotsBuffer* next_buffer = buffer->next(); |
4540 DeallocateBuffer(buffer); | 4536 DeallocateBuffer(buffer); |
4541 buffer = next_buffer; | 4537 buffer = next_buffer; |
4542 } | 4538 } |
4543 *buffer_address = NULL; | 4539 *buffer_address = NULL; |
4544 } | 4540 } |
4545 | 4541 |
4546 | 4542 |
4547 } } // namespace v8::internal | 4543 } } // namespace v8::internal |
OLD | NEW |