Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
| 6 | 6 |
| 7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
| 8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
| 9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
| 10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
| 24 | 24 |
| 25 template <MarksHandling marks_handling, | 25 template <MarksHandling marks_handling, |
| 26 LoggingAndProfiling logging_and_profiling_mode> | 26 LoggingAndProfiling logging_and_profiling_mode> |
| 27 class ScavengingVisitor : public StaticVisitorBase { | 27 class ScavengingVisitor : public StaticVisitorBase { |
| 28 public: | 28 public: |
| 29 static void Initialize() { | 29 static void Initialize() { |
| 30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); | 30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
| 31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
| 32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| 33 table_.Register(kVisitThinString, &EvacuateThinString); | |
| 33 table_.Register(kVisitByteArray, &EvacuateByteArray); | 34 table_.Register(kVisitByteArray, &EvacuateByteArray); |
| 34 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 35 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
| 35 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 36 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
| 36 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); | 37 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
| 37 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); | 38 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
| 38 table_.Register(kVisitJSArrayBuffer, | 39 table_.Register(kVisitJSArrayBuffer, |
| 39 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); | 40 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 40 | 41 |
| 41 table_.Register( | 42 table_.Register( |
| 42 kVisitNativeContext, | 43 kVisitNativeContext, |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 82 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); | 83 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); |
| 83 | 84 |
| 84 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, | 85 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
| 85 kVisitStruct, kVisitStructGeneric>(); | 86 kVisitStruct, kVisitStructGeneric>(); |
| 86 } | 87 } |
| 87 | 88 |
| 88 static VisitorDispatchTable<ScavengingCallback>* GetTable() { | 89 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
| 89 return &table_; | 90 return &table_; |
| 90 } | 91 } |
| 91 | 92 |
| 93 static void EvacuateThinStringNoShortcut(Map* map, HeapObject** slot, | |
| 94 HeapObject* object) { | |
| 95 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | |
| 96 ThinString::kSize); | |
| 97 } | |
| 98 | |
| 92 private: | 99 private: |
| 93 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; | 100 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| 94 | 101 |
| 95 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { | 102 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
| 96 bool should_record = false; | 103 bool should_record = false; |
| 97 #ifdef DEBUG | 104 #ifdef DEBUG |
| 98 should_record = FLAG_heap_stats; | 105 should_record = FLAG_heap_stats; |
| 99 #endif | 106 #endif |
| 100 should_record = should_record || FLAG_log_gc; | 107 should_record = should_record || FLAG_log_gc; |
| 101 if (should_record) { | 108 if (should_record) { |
| (...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 328 Scavenger::ScavengeObjectSlow(slot, first); | 335 Scavenger::ScavengeObjectSlow(slot, first); |
| 329 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 336 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
| 330 return; | 337 return; |
| 331 } | 338 } |
| 332 | 339 |
| 333 int object_size = ConsString::kSize; | 340 int object_size = ConsString::kSize; |
| 334 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 341 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
| 335 object_size); | 342 object_size); |
| 336 } | 343 } |
| 337 | 344 |
| 345 static inline void EvacuateThinString(Map* map, HeapObject** slot, | |
| 346 HeapObject* object) { | |
| 347 if (marks_handling == IGNORE_MARKS) { | |
| 348 HeapObject* actual = ThinString::cast(object)->actual(); | |
| 349 *slot = actual; | |
| 350 // ThinStrings always refer to internalized strings, which are | |
| 351 // always in old space. | |
| 352 DCHECK(!map->GetHeap()->InNewSpace(actual)); | |
| 353 object->set_map_word(MapWord::FromForwardingAddress(actual)); | |
| 354 return; | |
| 355 } | |
| 356 | |
| 357 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | |
| 358 ThinString::kSize); | |
| 359 } | |
| 360 | |
| 338 template <ObjectContents object_contents> | 361 template <ObjectContents object_contents> |
| 339 class ObjectEvacuationStrategy { | 362 class ObjectEvacuationStrategy { |
| 340 public: | 363 public: |
| 341 template <int object_size> | 364 template <int object_size> |
| 342 static inline void VisitSpecialized(Map* map, HeapObject** slot, | 365 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
| 343 HeapObject* object) { | 366 HeapObject* object) { |
| 344 EvacuateObject<object_contents, kWordAligned>(map, slot, object, | 367 EvacuateObject<object_contents, kWordAligned>(map, slot, object, |
| 345 object_size); | 368 object_size); |
| 346 } | 369 } |
| 347 | 370 |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 412 | 435 |
| 413 if (heap()->incremental_marking()->IsCompacting()) { | 436 if (heap()->incremental_marking()->IsCompacting()) { |
| 414 // When compacting forbid short-circuiting of cons-strings. | 437 // When compacting forbid short-circuiting of cons-strings. |
| 415 // Scavenging code relies on the fact that new space object | 438 // Scavenging code relies on the fact that new space object |
| 416 // can't be evacuated into evacuation candidate but | 439 // can't be evacuated into evacuation candidate but |
| 417 // short-circuiting violates this assumption. | 440 // short-circuiting violates this assumption. |
| 418 scavenging_visitors_table_.Register( | 441 scavenging_visitors_table_.Register( |
| 419 StaticVisitorBase::kVisitShortcutCandidate, | 442 StaticVisitorBase::kVisitShortcutCandidate, |
| 420 scavenging_visitors_table_.GetVisitorById( | 443 scavenging_visitors_table_.GetVisitorById( |
| 421 StaticVisitorBase::kVisitConsString)); | 444 StaticVisitorBase::kVisitConsString)); |
| 445 scavenging_visitors_table_.Register( | |
|
Hannes Payer (out of office)
2016/12/21 09:53:14
Instead of using a special visitor when compacting
Jakob Kummerow
2017/01/04 12:45:06
I don't understand what you mean here, can you ple
Jakob Kummerow
2017/01/09 17:42:31
As discussed offline, this appears to be much hard
| |
| 446 StaticVisitorBase::kVisitThinString, | |
| 447 &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>:: | |
| 448 EvacuateThinStringNoShortcut); | |
| 422 } | 449 } |
| 423 } | 450 } |
| 424 } | 451 } |
| 425 | 452 |
| 426 | 453 |
| 427 Isolate* Scavenger::isolate() { return heap()->isolate(); } | 454 Isolate* Scavenger::isolate() { return heap()->isolate(); } |
| 428 | 455 |
| 429 | 456 |
| 430 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } | 457 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } |
| 431 | 458 |
| 432 | 459 |
| 433 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { | 460 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { |
| 434 // Copy all HeapObject pointers in [start, end) | 461 // Copy all HeapObject pointers in [start, end) |
| 435 for (Object** p = start; p < end; p++) ScavengePointer(p); | 462 for (Object** p = start; p < end; p++) ScavengePointer(p); |
| 436 } | 463 } |
| 437 | 464 |
| 438 | 465 |
| 439 void ScavengeVisitor::ScavengePointer(Object** p) { | 466 void ScavengeVisitor::ScavengePointer(Object** p) { |
| 440 Object* object = *p; | 467 Object* object = *p; |
| 441 if (!heap_->InNewSpace(object)) return; | 468 if (!heap_->InNewSpace(object)) return; |
| 442 | 469 |
| 443 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 470 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
| 444 reinterpret_cast<HeapObject*>(object)); | 471 reinterpret_cast<HeapObject*>(object)); |
| 445 } | 472 } |
| 446 | 473 |
| 447 } // namespace internal | 474 } // namespace internal |
| 448 } // namespace v8 | 475 } // namespace v8 |
| OLD | NEW |