OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
(...skipping 12 matching lines...) Expand all Loading... |
23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
24 | 24 |
25 template <MarksHandling marks_handling, | 25 template <MarksHandling marks_handling, |
26 LoggingAndProfiling logging_and_profiling_mode> | 26 LoggingAndProfiling logging_and_profiling_mode> |
27 class ScavengingVisitor : public StaticVisitorBase { | 27 class ScavengingVisitor : public StaticVisitorBase { |
28 public: | 28 public: |
29 static void Initialize() { | 29 static void Initialize() { |
30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); | 30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| 33 table_.Register(kVisitThinString, &EvacuateThinString); |
33 table_.Register(kVisitByteArray, &EvacuateByteArray); | 34 table_.Register(kVisitByteArray, &EvacuateByteArray); |
34 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 35 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
35 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 36 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
36 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); | 37 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
37 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); | 38 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
38 table_.Register(kVisitJSArrayBuffer, | 39 table_.Register(kVisitJSArrayBuffer, |
39 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); | 40 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
40 | 41 |
41 table_.Register( | 42 table_.Register( |
42 kVisitNativeContext, | 43 kVisitNativeContext, |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
82 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); | 83 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); |
83 | 84 |
84 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, | 85 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
85 kVisitStruct, kVisitStructGeneric>(); | 86 kVisitStruct, kVisitStructGeneric>(); |
86 } | 87 } |
87 | 88 |
88 static VisitorDispatchTable<ScavengingCallback>* GetTable() { | 89 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
89 return &table_; | 90 return &table_; |
90 } | 91 } |
91 | 92 |
| 93 static void EvacuateThinStringNoShortcut(Map* map, HeapObject** slot, |
| 94 HeapObject* object) { |
| 95 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
| 96 ThinString::kSize); |
| 97 } |
| 98 |
92 private: | 99 private: |
93 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; | 100 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
94 | 101 |
95 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { | 102 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
96 bool should_record = false; | 103 bool should_record = false; |
97 #ifdef DEBUG | 104 #ifdef DEBUG |
98 should_record = FLAG_heap_stats; | 105 should_record = FLAG_heap_stats; |
99 #endif | 106 #endif |
100 should_record = should_record || FLAG_log_gc; | 107 should_record = should_record || FLAG_log_gc; |
101 if (should_record) { | 108 if (should_record) { |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
332 Scavenger::ScavengeObjectSlow(slot, first); | 339 Scavenger::ScavengeObjectSlow(slot, first); |
333 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 340 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
334 return; | 341 return; |
335 } | 342 } |
336 | 343 |
337 int object_size = ConsString::kSize; | 344 int object_size = ConsString::kSize; |
338 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 345 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
339 object_size); | 346 object_size); |
340 } | 347 } |
341 | 348 |
| 349 static inline void EvacuateThinString(Map* map, HeapObject** slot, |
| 350 HeapObject* object) { |
| 351 if (marks_handling == IGNORE_MARKS) { |
| 352 HeapObject* actual = ThinString::cast(object)->actual(); |
| 353 *slot = actual; |
| 354 // ThinStrings always refer to internalized strings, which are |
| 355 // always in old space. |
| 356 DCHECK(!map->GetHeap()->InNewSpace(actual)); |
| 357 object->set_map_word(MapWord::FromForwardingAddress(actual)); |
| 358 return; |
| 359 } |
| 360 |
| 361 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
| 362 ThinString::kSize); |
| 363 } |
| 364 |
342 template <ObjectContents object_contents> | 365 template <ObjectContents object_contents> |
343 class ObjectEvacuationStrategy { | 366 class ObjectEvacuationStrategy { |
344 public: | 367 public: |
345 template <int object_size> | 368 template <int object_size> |
346 static inline void VisitSpecialized(Map* map, HeapObject** slot, | 369 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
347 HeapObject* object) { | 370 HeapObject* object) { |
348 EvacuateObject<object_contents, kWordAligned>(map, slot, object, | 371 EvacuateObject<object_contents, kWordAligned>(map, slot, object, |
349 object_size); | 372 object_size); |
350 } | 373 } |
351 | 374 |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
416 | 439 |
417 if (heap()->incremental_marking()->IsCompacting()) { | 440 if (heap()->incremental_marking()->IsCompacting()) { |
418 // When compacting forbid short-circuiting of cons-strings. | 441 // When compacting forbid short-circuiting of cons-strings. |
419 // Scavenging code relies on the fact that new space object | 442 // Scavenging code relies on the fact that new space object |
420 // can't be evacuated into evacuation candidate but | 443 // can't be evacuated into evacuation candidate but |
421 // short-circuiting violates this assumption. | 444 // short-circuiting violates this assumption. |
422 scavenging_visitors_table_.Register( | 445 scavenging_visitors_table_.Register( |
423 StaticVisitorBase::kVisitShortcutCandidate, | 446 StaticVisitorBase::kVisitShortcutCandidate, |
424 scavenging_visitors_table_.GetVisitorById( | 447 scavenging_visitors_table_.GetVisitorById( |
425 StaticVisitorBase::kVisitConsString)); | 448 StaticVisitorBase::kVisitConsString)); |
| 449 scavenging_visitors_table_.Register( |
| 450 StaticVisitorBase::kVisitThinString, |
| 451 &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>:: |
| 452 EvacuateThinStringNoShortcut); |
426 } | 453 } |
427 } | 454 } |
428 } | 455 } |
429 | 456 |
430 | 457 |
431 Isolate* Scavenger::isolate() { return heap()->isolate(); } | 458 Isolate* Scavenger::isolate() { return heap()->isolate(); } |
432 | 459 |
433 | 460 |
434 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } | 461 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } |
435 | 462 |
436 | 463 |
437 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { | 464 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { |
438 // Copy all HeapObject pointers in [start, end) | 465 // Copy all HeapObject pointers in [start, end) |
439 for (Object** p = start; p < end; p++) ScavengePointer(p); | 466 for (Object** p = start; p < end; p++) ScavengePointer(p); |
440 } | 467 } |
441 | 468 |
442 | 469 |
443 void ScavengeVisitor::ScavengePointer(Object** p) { | 470 void ScavengeVisitor::ScavengePointer(Object** p) { |
444 Object* object = *p; | 471 Object* object = *p; |
445 if (!heap_->InNewSpace(object)) return; | 472 if (!heap_->InNewSpace(object)) return; |
446 | 473 |
447 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 474 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
448 reinterpret_cast<HeapObject*>(object)); | 475 reinterpret_cast<HeapObject*>(object)); |
449 } | 476 } |
450 | 477 |
451 } // namespace internal | 478 } // namespace internal |
452 } // namespace v8 | 479 } // namespace v8 |
OLD | NEW |