| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
| 6 | 6 |
| 7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
| 8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
| 9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
| 10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 131 // Set the forwarding address. | 131 // Set the forwarding address. |
| 132 source->set_map_word(MapWord::FromForwardingAddress(target)); | 132 source->set_map_word(MapWord::FromForwardingAddress(target)); |
| 133 | 133 |
| 134 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { | 134 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { |
| 135 // Update NewSpace stats if necessary. | 135 // Update NewSpace stats if necessary. |
| 136 RecordCopiedObject(heap, target); | 136 RecordCopiedObject(heap, target); |
| 137 heap->OnMoveEvent(target, source, size); | 137 heap->OnMoveEvent(target, source, size); |
| 138 } | 138 } |
| 139 | 139 |
| 140 if (marks_handling == TRANSFER_MARKS) { | 140 if (marks_handling == TRANSFER_MARKS) { |
| 141 if (Marking::TransferColor(source, target)) { | 141 if (IncrementalMarking::TransferColor(source, target)) { |
| 142 MemoryChunk::IncrementLiveBytesFromGC(target, size); | 142 MemoryChunk::IncrementLiveBytesFromGC(target, size); |
| 143 } | 143 } |
| 144 } | 144 } |
| 145 } | 145 } |
| 146 | 146 |
| 147 template <AllocationAlignment alignment> | 147 template <AllocationAlignment alignment> |
| 148 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, | 148 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, |
| 149 HeapObject* object, int object_size) { | 149 HeapObject* object, int object_size) { |
| 150 Heap* heap = map->GetHeap(); | 150 Heap* heap = map->GetHeap(); |
| 151 | 151 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 184 HeapObject* target = NULL; // Initialization to please compiler. | 184 HeapObject* target = NULL; // Initialization to please compiler. |
| 185 if (allocation.To(&target)) { | 185 if (allocation.To(&target)) { |
| 186 MigrateObject(heap, object, target, object_size); | 186 MigrateObject(heap, object, target, object_size); |
| 187 | 187 |
| 188 // Update slot to new target. | 188 // Update slot to new target. |
| 189 *slot = target; | 189 *slot = target; |
| 190 | 190 |
| 191 if (object_contents == POINTER_OBJECT) { | 191 if (object_contents == POINTER_OBJECT) { |
| 192 heap->promotion_queue()->insert( | 192 heap->promotion_queue()->insert( |
| 193 target, object_size, | 193 target, object_size, |
| 194 Marking::IsBlack(Marking::MarkBitFrom(object))); | 194 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 195 } | 195 } |
| 196 heap->IncrementPromotedObjectsSize(object_size); | 196 heap->IncrementPromotedObjectsSize(object_size); |
| 197 return true; | 197 return true; |
| 198 } | 198 } |
| 199 return false; | 199 return false; |
| 200 } | 200 } |
| 201 | 201 |
| 202 template <ObjectContents object_contents, AllocationAlignment alignment> | 202 template <ObjectContents object_contents, AllocationAlignment alignment> |
| 203 static inline void EvacuateObject(Map* map, HeapObject** slot, | 203 static inline void EvacuateObject(Map* map, HeapObject** slot, |
| 204 HeapObject* object, int object_size) { | 204 HeapObject* object, int object_size) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 231 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, | 231 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
| 232 HeapObject* object) { | 232 HeapObject* object) { |
| 233 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); | 233 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
| 234 | 234 |
| 235 if (marks_handling == IGNORE_MARKS) return; | 235 if (marks_handling == IGNORE_MARKS) return; |
| 236 | 236 |
| 237 MapWord map_word = object->map_word(); | 237 MapWord map_word = object->map_word(); |
| 238 DCHECK(map_word.IsForwardingAddress()); | 238 DCHECK(map_word.IsForwardingAddress()); |
| 239 HeapObject* target = map_word.ToForwardingAddress(); | 239 HeapObject* target = map_word.ToForwardingAddress(); |
| 240 | 240 |
| 241 MarkBit mark_bit = Marking::MarkBitFrom(target); | 241 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); |
| 242 if (Marking::IsBlack(mark_bit)) { | 242 if (Marking::IsBlack(mark_bit)) { |
| 243 // This object is black and it might not be rescanned by marker. | 243 // This object is black and it might not be rescanned by marker. |
| 244 // We should explicitly record code entry slot for compaction because | 244 // We should explicitly record code entry slot for compaction because |
| 245 // promotion queue processing (IteratePromotedObjectPointers) will | 245 // promotion queue processing (IteratePromotedObjectPointers) will |
| 246 // miss it as it is not HeapObject-tagged. | 246 // miss it as it is not HeapObject-tagged. |
| 247 Address code_entry_slot = | 247 Address code_entry_slot = |
| 248 target->address() + JSFunction::kCodeEntryOffset; | 248 target->address() + JSFunction::kCodeEntryOffset; |
| 249 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 249 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
| 250 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 250 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
| 251 target, code_entry_slot, code); | 251 target, code_entry_slot, code); |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 444 void ScavengeVisitor::ScavengePointer(Object** p) { | 444 void ScavengeVisitor::ScavengePointer(Object** p) { |
| 445 Object* object = *p; | 445 Object* object = *p; |
| 446 if (!heap_->InNewSpace(object)) return; | 446 if (!heap_->InNewSpace(object)) return; |
| 447 | 447 |
| 448 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 448 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
| 449 reinterpret_cast<HeapObject*>(object)); | 449 reinterpret_cast<HeapObject*>(object)); |
| 450 } | 450 } |
| 451 | 451 |
| 452 } // namespace internal | 452 } // namespace internal |
| 453 } // namespace v8 | 453 } // namespace v8 |
| OLD | NEW |