OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "src/heap/scavenger.h" |
| 6 |
| 7 #include "src/contexts.h" |
| 8 #include "src/cpu-profiler.h" |
| 9 #include "src/heap/heap.h" |
| 10 #include "src/heap/objects-visiting-inl.h" |
| 11 #include "src/heap/scavenger-inl.h" |
| 12 #include "src/isolate.h" |
| 13 #include "src/log.h" |
| 14 |
| 15 namespace v8 { |
| 16 namespace internal { |
| 17 |
| 18 enum LoggingAndProfiling { |
| 19 LOGGING_AND_PROFILING_ENABLED, |
| 20 LOGGING_AND_PROFILING_DISABLED |
| 21 }; |
| 22 |
| 23 |
| 24 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
| 25 |
| 26 |
| 27 template <MarksHandling marks_handling, |
| 28 LoggingAndProfiling logging_and_profiling_mode> |
| 29 class ScavengingVisitor : public StaticVisitorBase { |
| 30 public: |
| 31 static void Initialize() { |
| 32 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
| 33 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
| 34 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| 35 table_.Register(kVisitByteArray, &EvacuateByteArray); |
| 36 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
| 37 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
| 38 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
| 39 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
| 40 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer); |
| 41 |
| 42 table_.Register( |
| 43 kVisitNativeContext, |
| 44 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 45 Context::kSize>); |
| 46 |
| 47 table_.Register( |
| 48 kVisitConsString, |
| 49 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 50 ConsString::kSize>); |
| 51 |
| 52 table_.Register( |
| 53 kVisitSlicedString, |
| 54 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 55 SlicedString::kSize>); |
| 56 |
| 57 table_.Register( |
| 58 kVisitSymbol, |
| 59 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 60 Symbol::kSize>); |
| 61 |
| 62 table_.Register( |
| 63 kVisitSharedFunctionInfo, |
| 64 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 65 SharedFunctionInfo::kSize>); |
| 66 |
| 67 table_.Register(kVisitJSWeakCollection, |
| 68 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 69 |
| 70 table_.Register(kVisitJSTypedArray, |
| 71 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 72 |
| 73 table_.Register(kVisitJSDataView, |
| 74 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 75 |
| 76 table_.Register(kVisitJSRegExp, |
| 77 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 78 |
| 79 if (marks_handling == IGNORE_MARKS) { |
| 80 table_.Register( |
| 81 kVisitJSFunction, |
| 82 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 83 JSFunction::kSize>); |
| 84 } else { |
| 85 table_.Register(kVisitJSFunction, &EvacuateJSFunction); |
| 86 } |
| 87 |
| 88 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>, |
| 89 kVisitDataObject, kVisitDataObjectGeneric>(); |
| 90 |
| 91 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
| 92 kVisitJSObject, kVisitJSObjectGeneric>(); |
| 93 |
| 94 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
| 95 kVisitStruct, kVisitStructGeneric>(); |
| 96 } |
| 97 |
| 98 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
| 99 return &table_; |
| 100 } |
| 101 |
| 102 private: |
| 103 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| 104 |
| 105 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
| 106 bool should_record = false; |
| 107 #ifdef DEBUG |
| 108 should_record = FLAG_heap_stats; |
| 109 #endif |
| 110 should_record = should_record || FLAG_log_gc; |
| 111 if (should_record) { |
| 112 if (heap->new_space()->Contains(obj)) { |
| 113 heap->new_space()->RecordAllocation(obj); |
| 114 } else { |
| 115 heap->new_space()->RecordPromotion(obj); |
| 116 } |
| 117 } |
| 118 } |
| 119 |
| 120 // Helper function used by CopyObject to copy a source object to an |
| 121 // allocated target object and update the forwarding pointer in the source |
| 122 // object. Returns the target object. |
| 123 INLINE(static void MigrateObject(Heap* heap, HeapObject* source, |
| 124 HeapObject* target, int size)) { |
| 125 // If we migrate into to-space, then the to-space top pointer should be |
| 126 // right after the target object. Incorporate double alignment |
| 127 // over-allocation. |
| 128 DCHECK(!heap->InToSpace(target) || |
| 129 target->address() + size == heap->new_space()->top() || |
| 130 target->address() + size + kPointerSize == heap->new_space()->top()); |
| 131 |
| 132 // Make sure that we do not overwrite the promotion queue which is at |
| 133 // the end of to-space. |
| 134 DCHECK(!heap->InToSpace(target) || |
| 135 heap->promotion_queue()->IsBelowPromotionQueue( |
| 136 heap->new_space()->top())); |
| 137 |
| 138 // Copy the content of source to target. |
| 139 heap->CopyBlock(target->address(), source->address(), size); |
| 140 |
| 141 // Set the forwarding address. |
| 142 source->set_map_word(MapWord::FromForwardingAddress(target)); |
| 143 |
| 144 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { |
| 145 // Update NewSpace stats if necessary. |
| 146 RecordCopiedObject(heap, target); |
| 147 heap->OnMoveEvent(target, source, size); |
| 148 } |
| 149 |
| 150 if (marks_handling == TRANSFER_MARKS) { |
| 151 if (Marking::TransferColor(source, target)) { |
| 152 MemoryChunk::IncrementLiveBytesFromGC(target, size); |
| 153 } |
| 154 } |
| 155 } |
| 156 |
| 157 template <AllocationAlignment alignment> |
| 158 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, |
| 159 HeapObject* object, int object_size) { |
| 160 Heap* heap = map->GetHeap(); |
| 161 |
| 162 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); |
| 163 AllocationResult allocation = |
| 164 heap->new_space()->AllocateRaw(object_size, alignment); |
| 165 |
| 166 HeapObject* target = NULL; // Initialization to please compiler. |
| 167 if (allocation.To(&target)) { |
| 168 // Order is important here: Set the promotion limit before storing a |
| 169 // filler for double alignment or migrating the object. Otherwise we |
| 170 // may end up overwriting promotion queue entries when we migrate the |
| 171 // object. |
| 172 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); |
| 173 |
| 174 MigrateObject(heap, object, target, object_size); |
| 175 |
| 176 // Update slot to new target. |
| 177 *slot = target; |
| 178 |
| 179 heap->IncrementSemiSpaceCopiedObjectSize(object_size); |
| 180 return true; |
| 181 } |
| 182 return false; |
| 183 } |
| 184 |
| 185 |
| 186 template <ObjectContents object_contents, AllocationAlignment alignment> |
| 187 static inline bool PromoteObject(Map* map, HeapObject** slot, |
| 188 HeapObject* object, int object_size) { |
| 189 Heap* heap = map->GetHeap(); |
| 190 |
| 191 AllocationResult allocation = |
| 192 heap->old_space()->AllocateRaw(object_size, alignment); |
| 193 |
| 194 HeapObject* target = NULL; // Initialization to please compiler. |
| 195 if (allocation.To(&target)) { |
| 196 MigrateObject(heap, object, target, object_size); |
| 197 |
| 198 // Update slot to new target. |
| 199 *slot = target; |
| 200 |
| 201 if (object_contents == POINTER_OBJECT) { |
| 202 if (map->instance_type() == JS_FUNCTION_TYPE) { |
| 203 heap->promotion_queue()->insert(target, |
| 204 JSFunction::kNonWeakFieldsEndOffset); |
| 205 } else { |
| 206 heap->promotion_queue()->insert(target, object_size); |
| 207 } |
| 208 } |
| 209 heap->IncrementPromotedObjectsSize(object_size); |
| 210 return true; |
| 211 } |
| 212 return false; |
| 213 } |
| 214 |
| 215 |
| 216 template <ObjectContents object_contents, AllocationAlignment alignment> |
| 217 static inline void EvacuateObject(Map* map, HeapObject** slot, |
| 218 HeapObject* object, int object_size) { |
| 219 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 220 SLOW_DCHECK(object->Size() == object_size); |
| 221 Heap* heap = map->GetHeap(); |
| 222 |
| 223 if (!heap->ShouldBePromoted(object->address(), object_size)) { |
| 224 // A semi-space copy may fail due to fragmentation. In that case, we |
| 225 // try to promote the object. |
| 226 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
| 227 return; |
| 228 } |
| 229 } |
| 230 |
| 231 if (PromoteObject<object_contents, alignment>(map, slot, object, |
| 232 object_size)) { |
| 233 return; |
| 234 } |
| 235 |
| 236 // If promotion failed, we try to copy the object to the other semi-space |
| 237 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
| 238 |
| 239 UNREACHABLE(); |
| 240 } |
| 241 |
| 242 |
| 243 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
| 244 HeapObject* object) { |
| 245 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 246 JSFunction::kSize>(map, slot, object); |
| 247 |
| 248 MapWord map_word = object->map_word(); |
| 249 DCHECK(map_word.IsForwardingAddress()); |
| 250 HeapObject* target = map_word.ToForwardingAddress(); |
| 251 |
| 252 MarkBit mark_bit = Marking::MarkBitFrom(target); |
| 253 if (Marking::IsBlack(mark_bit)) { |
| 254 // This object is black and it might not be rescanned by marker. |
| 255 // We should explicitly record code entry slot for compaction because |
| 256 // promotion queue processing (IterateAndMarkPointersToFromSpace) will |
| 257 // miss it as it is not HeapObject-tagged. |
| 258 Address code_entry_slot = |
| 259 target->address() + JSFunction::kCodeEntryOffset; |
| 260 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
| 261 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
| 262 target, code_entry_slot, code); |
| 263 } |
| 264 } |
| 265 |
| 266 |
| 267 static inline void EvacuateFixedArray(Map* map, HeapObject** slot, |
| 268 HeapObject* object) { |
| 269 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
| 270 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
| 271 object_size); |
| 272 } |
| 273 |
| 274 |
| 275 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, |
| 276 HeapObject* object) { |
| 277 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); |
| 278 int object_size = FixedDoubleArray::SizeFor(length); |
| 279 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); |
| 280 } |
| 281 |
| 282 |
| 283 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, |
| 284 HeapObject* object) { |
| 285 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); |
| 286 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); |
| 287 |
| 288 MapWord map_word = object->map_word(); |
| 289 DCHECK(map_word.IsForwardingAddress()); |
| 290 FixedTypedArrayBase* target = |
| 291 reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress()); |
| 292 if (target->base_pointer() != Smi::FromInt(0)) |
| 293 target->set_base_pointer(target, SKIP_WRITE_BARRIER); |
| 294 } |
| 295 |
| 296 |
| 297 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, |
| 298 HeapObject* object) { |
| 299 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); |
| 300 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); |
| 301 |
| 302 MapWord map_word = object->map_word(); |
| 303 DCHECK(map_word.IsForwardingAddress()); |
| 304 FixedTypedArrayBase* target = |
| 305 reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress()); |
| 306 if (target->base_pointer() != Smi::FromInt(0)) |
| 307 target->set_base_pointer(target, SKIP_WRITE_BARRIER); |
| 308 } |
| 309 |
| 310 |
| 311 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, |
| 312 HeapObject* object) { |
| 313 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
| 314 |
| 315 Heap* heap = map->GetHeap(); |
| 316 MapWord map_word = object->map_word(); |
| 317 DCHECK(map_word.IsForwardingAddress()); |
| 318 HeapObject* target = map_word.ToForwardingAddress(); |
| 319 if (!heap->InNewSpace(target)) { |
| 320 heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target)); |
| 321 } |
| 322 } |
| 323 |
| 324 |
| 325 static inline void EvacuateByteArray(Map* map, HeapObject** slot, |
| 326 HeapObject* object) { |
| 327 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
| 328 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); |
| 329 } |
| 330 |
| 331 |
| 332 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, |
| 333 HeapObject* object) { |
| 334 int object_size = SeqOneByteString::cast(object) |
| 335 ->SeqOneByteStringSize(map->instance_type()); |
| 336 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); |
| 337 } |
| 338 |
| 339 |
| 340 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, |
| 341 HeapObject* object) { |
| 342 int object_size = SeqTwoByteString::cast(object) |
| 343 ->SeqTwoByteStringSize(map->instance_type()); |
| 344 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); |
| 345 } |
| 346 |
| 347 |
| 348 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, |
| 349 HeapObject* object) { |
| 350 DCHECK(IsShortcutCandidate(map->instance_type())); |
| 351 |
| 352 Heap* heap = map->GetHeap(); |
| 353 |
| 354 if (marks_handling == IGNORE_MARKS && |
| 355 ConsString::cast(object)->unchecked_second() == heap->empty_string()) { |
| 356 HeapObject* first = |
| 357 HeapObject::cast(ConsString::cast(object)->unchecked_first()); |
| 358 |
| 359 *slot = first; |
| 360 |
| 361 if (!heap->InNewSpace(first)) { |
| 362 object->set_map_word(MapWord::FromForwardingAddress(first)); |
| 363 return; |
| 364 } |
| 365 |
| 366 MapWord first_word = first->map_word(); |
| 367 if (first_word.IsForwardingAddress()) { |
| 368 HeapObject* target = first_word.ToForwardingAddress(); |
| 369 |
| 370 *slot = target; |
| 371 object->set_map_word(MapWord::FromForwardingAddress(target)); |
| 372 return; |
| 373 } |
| 374 |
| 375 Scavenger::ScavengeObjectSlow(slot, first); |
| 376 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
| 377 return; |
| 378 } |
| 379 |
| 380 int object_size = ConsString::kSize; |
| 381 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
| 382 object_size); |
| 383 } |
| 384 |
| 385 template <ObjectContents object_contents> |
| 386 class ObjectEvacuationStrategy { |
| 387 public: |
| 388 template <int object_size> |
| 389 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
| 390 HeapObject* object) { |
| 391 EvacuateObject<object_contents, kWordAligned>(map, slot, object, |
| 392 object_size); |
| 393 } |
| 394 |
| 395 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) { |
| 396 int object_size = map->instance_size(); |
| 397 EvacuateObject<object_contents, kWordAligned>(map, slot, object, |
| 398 object_size); |
| 399 } |
| 400 }; |
| 401 |
| 402 static VisitorDispatchTable<ScavengingCallback> table_; |
| 403 }; |
| 404 |
| 405 |
| 406 template <MarksHandling marks_handling, |
| 407 LoggingAndProfiling logging_and_profiling_mode> |
| 408 VisitorDispatchTable<ScavengingCallback> |
| 409 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_; |
| 410 |
| 411 |
| 412 // static |
| 413 void Scavenger::Initialize() { |
| 414 ScavengingVisitor<TRANSFER_MARKS, |
| 415 LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
| 416 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
| 417 ScavengingVisitor<TRANSFER_MARKS, |
| 418 LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
| 419 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
| 420 } |
| 421 |
| 422 |
| 423 // static |
| 424 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { |
| 425 SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object)); |
| 426 MapWord first_word = object->map_word(); |
| 427 SLOW_DCHECK(!first_word.IsForwardingAddress()); |
| 428 Map* map = first_word.ToMap(); |
| 429 Scavenger* scavenger = map->GetHeap()->scavenge_collector_; |
| 430 scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object); |
| 431 } |
| 432 |
| 433 |
| 434 void Scavenger::SelectScavengingVisitorsTable() { |
| 435 bool logging_and_profiling = |
| 436 FLAG_verify_predictable || isolate()->logger()->is_logging() || |
| 437 isolate()->cpu_profiler()->is_profiling() || |
| 438 (isolate()->heap_profiler() != NULL && |
| 439 isolate()->heap_profiler()->is_tracking_object_moves()); |
| 440 |
| 441 if (!heap()->incremental_marking()->IsMarking()) { |
| 442 if (!logging_and_profiling) { |
| 443 scavenging_visitors_table_.CopyFrom( |
| 444 ScavengingVisitor<IGNORE_MARKS, |
| 445 LOGGING_AND_PROFILING_DISABLED>::GetTable()); |
| 446 } else { |
| 447 scavenging_visitors_table_.CopyFrom( |
| 448 ScavengingVisitor<IGNORE_MARKS, |
| 449 LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
| 450 } |
| 451 } else { |
| 452 if (!logging_and_profiling) { |
| 453 scavenging_visitors_table_.CopyFrom( |
| 454 ScavengingVisitor<TRANSFER_MARKS, |
| 455 LOGGING_AND_PROFILING_DISABLED>::GetTable()); |
| 456 } else { |
| 457 scavenging_visitors_table_.CopyFrom( |
| 458 ScavengingVisitor<TRANSFER_MARKS, |
| 459 LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
| 460 } |
| 461 |
| 462 if (heap()->incremental_marking()->IsCompacting()) { |
| 463 // When compacting forbid short-circuiting of cons-strings. |
| 464 // Scavenging code relies on the fact that new space object |
| 465 // can't be evacuated into evacuation candidate but |
| 466 // short-circuiting violates this assumption. |
| 467 scavenging_visitors_table_.Register( |
| 468 StaticVisitorBase::kVisitShortcutCandidate, |
| 469 scavenging_visitors_table_.GetVisitorById( |
| 470 StaticVisitorBase::kVisitConsString)); |
| 471 } |
| 472 } |
| 473 } |
| 474 |
| 475 |
| 476 Isolate* Scavenger::isolate() { return heap()->isolate(); } |
| 477 |
| 478 |
| 479 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } |
| 480 |
| 481 |
| 482 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { |
| 483 // Copy all HeapObject pointers in [start, end) |
| 484 for (Object** p = start; p < end; p++) ScavengePointer(p); |
| 485 } |
| 486 |
| 487 |
| 488 void ScavengeVisitor::ScavengePointer(Object** p) { |
| 489 Object* object = *p; |
| 490 if (!heap_->InNewSpace(object)) return; |
| 491 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
| 492 reinterpret_cast<HeapObject*>(object)); |
| 493 } |
| 494 |
| 495 } // namespace internal |
| 496 } // namespace v8 |
OLD | NEW |