OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap/incremental-marking.h" | 7 #include "src/heap/incremental-marking.h" |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 22 matching lines...) Expand all Loading... |
33 weak_closure_approximation_rounds_(0), | 33 weak_closure_approximation_rounds_(0), |
34 request_type_(COMPLETE_MARKING) {} | 34 request_type_(COMPLETE_MARKING) {} |
35 | 35 |
36 | 36 |
37 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 37 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
38 Object* value) { | 38 Object* value) { |
39 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { | 39 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { |
40 MarkBit obj_bit = Marking::MarkBitFrom(obj); | 40 MarkBit obj_bit = Marking::MarkBitFrom(obj); |
41 if (Marking::IsBlack(obj_bit)) { | 41 if (Marking::IsBlack(obj_bit)) { |
42 // Object is not going to be rescanned we need to record the slot. | 42 // Object is not going to be rescanned we need to record the slot. |
43 heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), | 43 heap_->mark_compact_collector()->RecordSlot(obj, slot, value); |
44 slot, value); | |
45 } | 44 } |
46 } | 45 } |
47 } | 46 } |
48 | 47 |
49 | 48 |
50 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, Object** slot, | 49 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, Object** slot, |
51 Isolate* isolate) { | 50 Isolate* isolate) { |
52 DCHECK(obj->IsHeapObject()); | 51 DCHECK(obj->IsHeapObject()); |
53 IncrementalMarking* marking = isolate->heap()->incremental_marking(); | 52 IncrementalMarking* marking = isolate->heap()->incremental_marking(); |
54 | 53 |
(...skipping 30 matching lines...) Expand all Loading... |
85 } | 84 } |
86 } | 85 } |
87 | 86 |
88 | 87 |
89 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, | 88 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, |
90 Object** slot, | 89 Object** slot, |
91 Code* value) { | 90 Code* value) { |
92 if (BaseRecordWrite(host, slot, value)) { | 91 if (BaseRecordWrite(host, slot, value)) { |
93 DCHECK(slot != NULL); | 92 DCHECK(slot != NULL); |
94 heap_->mark_compact_collector()->RecordCodeEntrySlot( | 93 heap_->mark_compact_collector()->RecordCodeEntrySlot( |
95 reinterpret_cast<Address>(slot), value); | 94 host, reinterpret_cast<Address>(slot), value); |
96 } | 95 } |
97 } | 96 } |
98 | 97 |
99 | 98 |
100 void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj, | 99 void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj, |
101 RelocInfo* rinfo, | 100 RelocInfo* rinfo, |
102 Object* value) { | 101 Object* value) { |
103 MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value)); | 102 MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value)); |
104 if (Marking::IsWhite(value_bit)) { | 103 if (Marking::IsWhite(value_bit)) { |
105 MarkBit obj_bit = Marking::MarkBitFrom(obj); | 104 MarkBit obj_bit = Marking::MarkBitFrom(obj); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
170 // fully scanned. Fall back to scanning it through to the end in case this | 169 // fully scanned. Fall back to scanning it through to the end in case this |
171 // fails because of a full deque. | 170 // fails because of a full deque. |
172 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 171 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
173 int start_offset = | 172 int start_offset = |
174 Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); | 173 Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); |
175 int end_offset = | 174 int end_offset = |
176 Min(object_size, start_offset + kProgressBarScanningChunk); | 175 Min(object_size, start_offset + kProgressBarScanningChunk); |
177 int already_scanned_offset = start_offset; | 176 int already_scanned_offset = start_offset; |
178 bool scan_until_end = false; | 177 bool scan_until_end = false; |
179 do { | 178 do { |
180 VisitPointersWithAnchor(heap, HeapObject::RawField(object, 0), | 179 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), |
181 HeapObject::RawField(object, start_offset), | 180 HeapObject::RawField(object, end_offset)); |
182 HeapObject::RawField(object, end_offset)); | |
183 start_offset = end_offset; | 181 start_offset = end_offset; |
184 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 182 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
185 scan_until_end = | 183 scan_until_end = |
186 heap->mark_compact_collector()->marking_deque()->IsFull(); | 184 heap->mark_compact_collector()->marking_deque()->IsFull(); |
187 } while (scan_until_end && start_offset < object_size); | 185 } while (scan_until_end && start_offset < object_size); |
188 chunk->set_progress_bar(start_offset); | 186 chunk->set_progress_bar(start_offset); |
189 if (start_offset < object_size) { | 187 if (start_offset < object_size) { |
190 if (Marking::IsGrey(Marking::MarkBitFrom(object))) { | 188 if (Marking::IsGrey(Marking::MarkBitFrom(object))) { |
191 heap->mark_compact_collector()->marking_deque()->UnshiftGrey(object); | 189 heap->mark_compact_collector()->marking_deque()->UnshiftGrey(object); |
192 } else { | 190 } else { |
(...skipping 14 matching lines...) Expand all Loading... |
207 // We will mark cache black with a separate pass when we finish marking. | 205 // We will mark cache black with a separate pass when we finish marking. |
208 // Note that GC can happen when the context is not fully initialized, | 206 // Note that GC can happen when the context is not fully initialized, |
209 // so the cache can be undefined. | 207 // so the cache can be undefined. |
210 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 208 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
211 if (!cache->IsUndefined()) { | 209 if (!cache->IsUndefined()) { |
212 MarkObjectGreyDoNotEnqueue(cache); | 210 MarkObjectGreyDoNotEnqueue(cache); |
213 } | 211 } |
214 VisitNativeContext(map, context); | 212 VisitNativeContext(map, context); |
215 } | 213 } |
216 | 214 |
217 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 215 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
218 Object* obj = *p; | 216 Object* target = *p; |
219 if (obj->IsHeapObject()) { | 217 if (target->IsHeapObject()) { |
220 heap->mark_compact_collector()->RecordSlot(p, p, obj); | 218 heap->mark_compact_collector()->RecordSlot(object, p, target); |
221 MarkObject(heap, obj); | 219 MarkObject(heap, target); |
222 } | 220 } |
223 } | 221 } |
224 | 222 |
225 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { | 223 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, |
| 224 Object** start, Object** end)) { |
226 for (Object** p = start; p < end; p++) { | 225 for (Object** p = start; p < end; p++) { |
227 Object* obj = *p; | 226 Object* target = *p; |
228 if (obj->IsHeapObject()) { | 227 if (target->IsHeapObject()) { |
229 heap->mark_compact_collector()->RecordSlot(start, p, obj); | 228 heap->mark_compact_collector()->RecordSlot(object, p, target); |
230 MarkObject(heap, obj); | 229 MarkObject(heap, target); |
231 } | 230 } |
232 } | 231 } |
233 } | 232 } |
234 | |
235 INLINE(static void VisitPointersWithAnchor(Heap* heap, Object** anchor, | |
236 Object** start, Object** end)) { | |
237 for (Object** p = start; p < end; p++) { | |
238 Object* obj = *p; | |
239 if (obj->IsHeapObject()) { | |
240 heap->mark_compact_collector()->RecordSlot(anchor, p, obj); | |
241 MarkObject(heap, obj); | |
242 } | |
243 } | |
244 } | |
245 | 233 |
246 // Marks the object grey and pushes it on the marking stack. | 234 // Marks the object grey and pushes it on the marking stack. |
247 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 235 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
248 IncrementalMarking::MarkObject(heap, HeapObject::cast(obj)); | 236 IncrementalMarking::MarkObject(heap, HeapObject::cast(obj)); |
249 } | 237 } |
250 | 238 |
251 // Marks the object black without pushing it on the marking stack. | 239 // Marks the object black without pushing it on the marking stack. |
252 // Returns true if object needed marking and false otherwise. | 240 // Returns true if object needed marking and false otherwise. |
253 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 241 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
254 HeapObject* heap_object = HeapObject::cast(obj); | 242 HeapObject* heap_object = HeapObject::cast(obj); |
(...skipping 766 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1021 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 1009 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
1022 idle_marking_delay_counter_++; | 1010 idle_marking_delay_counter_++; |
1023 } | 1011 } |
1024 | 1012 |
1025 | 1013 |
1026 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1014 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1027 idle_marking_delay_counter_ = 0; | 1015 idle_marking_delay_counter_ = 0; |
1028 } | 1016 } |
1029 } // namespace internal | 1017 } // namespace internal |
1030 } // namespace v8 | 1018 } // namespace v8 |
OLD | NEW |