OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
186 MigrateObject(heap, object, target, object_size); | 186 MigrateObject(heap, object, target, object_size); |
187 | 187 |
188 // Update slot to new target using CAS. A concurrent sweeper thread my | 188 // Update slot to new target using CAS. A concurrent sweeper thread my |
189 // filter the slot concurrently. | 189 // filter the slot concurrently. |
190 HeapObject* old = *slot; | 190 HeapObject* old = *slot; |
191 base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot), | 191 base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot), |
192 reinterpret_cast<base::AtomicWord>(old), | 192 reinterpret_cast<base::AtomicWord>(old), |
193 reinterpret_cast<base::AtomicWord>(target)); | 193 reinterpret_cast<base::AtomicWord>(target)); |
194 | 194 |
195 if (object_contents == POINTER_OBJECT) { | 195 if (object_contents == POINTER_OBJECT) { |
196 heap->promotion_queue()->insert( | 196 heap->promotion_queue()->insert(target, object_size, |
197 target, object_size, | 197 ObjectMarking::IsBlack(object)); |
198 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); | |
199 } | 198 } |
200 heap->IncrementPromotedObjectsSize(object_size); | 199 heap->IncrementPromotedObjectsSize(object_size); |
201 return true; | 200 return true; |
202 } | 201 } |
203 return false; | 202 return false; |
204 } | 203 } |
205 | 204 |
206 template <ObjectContents object_contents, AllocationAlignment alignment> | 205 template <ObjectContents object_contents, AllocationAlignment alignment> |
207 static inline void EvacuateObject(Map* map, HeapObject** slot, | 206 static inline void EvacuateObject(Map* map, HeapObject** slot, |
208 HeapObject* object, int object_size) { | 207 HeapObject* object, int object_size) { |
(...skipping 23 matching lines...) Expand all Loading... |
232 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, | 231 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
233 HeapObject* object) { | 232 HeapObject* object) { |
234 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); | 233 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
235 | 234 |
236 if (marks_handling == IGNORE_MARKS) return; | 235 if (marks_handling == IGNORE_MARKS) return; |
237 | 236 |
238 MapWord map_word = object->map_word(); | 237 MapWord map_word = object->map_word(); |
239 DCHECK(map_word.IsForwardingAddress()); | 238 DCHECK(map_word.IsForwardingAddress()); |
240 HeapObject* target = map_word.ToForwardingAddress(); | 239 HeapObject* target = map_word.ToForwardingAddress(); |
241 | 240 |
242 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); | 241 if (ObjectMarking::IsBlack(target)) { |
243 if (Marking::IsBlack(mark_bit)) { | |
244 // This object is black and it might not be rescanned by marker. | 242 // This object is black and it might not be rescanned by marker. |
245 // We should explicitly record code entry slot for compaction because | 243 // We should explicitly record code entry slot for compaction because |
246 // promotion queue processing (IteratePromotedObjectPointers) will | 244 // promotion queue processing (IteratePromotedObjectPointers) will |
247 // miss it as it is not HeapObject-tagged. | 245 // miss it as it is not HeapObject-tagged. |
248 Address code_entry_slot = | 246 Address code_entry_slot = |
249 target->address() + JSFunction::kCodeEntryOffset; | 247 target->address() + JSFunction::kCodeEntryOffset; |
250 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 248 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
251 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 249 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
252 target, code_entry_slot, code); | 250 target, code_entry_slot, code); |
253 } | 251 } |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
443 void ScavengeVisitor::ScavengePointer(Object** p) { | 441 void ScavengeVisitor::ScavengePointer(Object** p) { |
444 Object* object = *p; | 442 Object* object = *p; |
445 if (!heap_->InNewSpace(object)) return; | 443 if (!heap_->InNewSpace(object)) return; |
446 | 444 |
447 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 445 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
448 reinterpret_cast<HeapObject*>(object)); | 446 reinterpret_cast<HeapObject*>(object)); |
449 } | 447 } |
450 | 448 |
451 } // namespace internal | 449 } // namespace internal |
452 } // namespace v8 | 450 } // namespace v8 |
OLD | NEW |