OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
193 MigrateObject(heap, object, target, object_size); | 193 MigrateObject(heap, object, target, object_size); |
194 | 194 |
195 // Update slot to new target using CAS. A concurrent sweeper thread my | 195 // Update slot to new target using CAS. A concurrent sweeper thread my |
196 // filter the slot concurrently. | 196 // filter the slot concurrently. |
197 HeapObject* old = *slot; | 197 HeapObject* old = *slot; |
198 base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot), | 198 base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot), |
199 reinterpret_cast<base::AtomicWord>(old), | 199 reinterpret_cast<base::AtomicWord>(old), |
200 reinterpret_cast<base::AtomicWord>(target)); | 200 reinterpret_cast<base::AtomicWord>(target)); |
201 | 201 |
202 if (object_contents == POINTER_OBJECT) { | 202 if (object_contents == POINTER_OBJECT) { |
203 heap->promotion_queue()->insert(target, object_size, | 203 heap->promotion_queue()->insert( |
204 ObjectMarking::IsBlack(object)); | 204 target, object_size, |
| 205 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
205 } | 206 } |
206 heap->IncrementPromotedObjectsSize(object_size); | 207 heap->IncrementPromotedObjectsSize(object_size); |
207 return true; | 208 return true; |
208 } | 209 } |
209 return false; | 210 return false; |
210 } | 211 } |
211 | 212 |
212 template <ObjectContents object_contents, AllocationAlignment alignment> | 213 template <ObjectContents object_contents, AllocationAlignment alignment> |
213 static inline void EvacuateObject(Map* map, HeapObject** slot, | 214 static inline void EvacuateObject(Map* map, HeapObject** slot, |
214 HeapObject* object, int object_size) { | 215 HeapObject* object, int object_size) { |
(...skipping 23 matching lines...) Expand all Loading... |
238 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, | 239 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
239 HeapObject* object) { | 240 HeapObject* object) { |
240 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); | 241 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
241 | 242 |
242 if (marks_handling == IGNORE_MARKS) return; | 243 if (marks_handling == IGNORE_MARKS) return; |
243 | 244 |
244 MapWord map_word = object->map_word(); | 245 MapWord map_word = object->map_word(); |
245 DCHECK(map_word.IsForwardingAddress()); | 246 DCHECK(map_word.IsForwardingAddress()); |
246 HeapObject* target = map_word.ToForwardingAddress(); | 247 HeapObject* target = map_word.ToForwardingAddress(); |
247 | 248 |
248 if (ObjectMarking::IsBlack(target)) { | 249 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); |
| 250 if (Marking::IsBlack(mark_bit)) { |
249 // This object is black and it might not be rescanned by marker. | 251 // This object is black and it might not be rescanned by marker. |
250 // We should explicitly record code entry slot for compaction because | 252 // We should explicitly record code entry slot for compaction because |
251 // promotion queue processing (IteratePromotedObjectPointers) will | 253 // promotion queue processing (IteratePromotedObjectPointers) will |
252 // miss it as it is not HeapObject-tagged. | 254 // miss it as it is not HeapObject-tagged. |
253 Address code_entry_slot = | 255 Address code_entry_slot = |
254 target->address() + JSFunction::kCodeEntryOffset; | 256 target->address() + JSFunction::kCodeEntryOffset; |
255 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 257 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
256 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 258 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
257 target, code_entry_slot, code); | 259 target, code_entry_slot, code); |
258 } | 260 } |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
468 void ScavengeVisitor::ScavengePointer(Object** p) { | 470 void ScavengeVisitor::ScavengePointer(Object** p) { |
469 Object* object = *p; | 471 Object* object = *p; |
470 if (!heap_->InNewSpace(object)) return; | 472 if (!heap_->InNewSpace(object)) return; |
471 | 473 |
472 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 474 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
473 reinterpret_cast<HeapObject*>(object)); | 475 reinterpret_cast<HeapObject*>(object)); |
474 } | 476 } |
475 | 477 |
476 } // namespace internal | 478 } // namespace internal |
477 } // namespace v8 | 479 } // namespace v8 |
OLD | NEW |