OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
(...skipping 12 matching lines...) Expand all Loading... |
23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
24 | 24 |
25 template <MarksHandling marks_handling, | 25 template <MarksHandling marks_handling, |
26 LoggingAndProfiling logging_and_profiling_mode> | 26 LoggingAndProfiling logging_and_profiling_mode> |
27 class ScavengingVisitor : public StaticVisitorBase { | 27 class ScavengingVisitor : public StaticVisitorBase { |
28 public: | 28 public: |
29 static void Initialize() { | 29 static void Initialize() { |
30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); | 30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
33 table_.Register(kVisitThinString, &EvacuateThinString); | |
34 table_.Register(kVisitByteArray, &EvacuateByteArray); | 33 table_.Register(kVisitByteArray, &EvacuateByteArray); |
35 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 34 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
36 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 35 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
37 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); | 36 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
38 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); | 37 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
39 table_.Register(kVisitJSArrayBuffer, | 38 table_.Register(kVisitJSArrayBuffer, |
40 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); | 39 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
41 | 40 |
42 table_.Register( | 41 table_.Register( |
43 kVisitNativeContext, | 42 kVisitNativeContext, |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
83 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); | 82 kVisitJSApiObject, kVisitJSApiObjectGeneric>(); |
84 | 83 |
85 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, | 84 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
86 kVisitStruct, kVisitStructGeneric>(); | 85 kVisitStruct, kVisitStructGeneric>(); |
87 } | 86 } |
88 | 87 |
89 static VisitorDispatchTable<ScavengingCallback>* GetTable() { | 88 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
90 return &table_; | 89 return &table_; |
91 } | 90 } |
92 | 91 |
93 static void EvacuateThinStringNoShortcut(Map* map, HeapObject** slot, | |
94 HeapObject* object) { | |
95 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | |
96 ThinString::kSize); | |
97 } | |
98 | |
99 private: | 92 private: |
100 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; | 93 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
101 | 94 |
102 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { | 95 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
103 bool should_record = false; | 96 bool should_record = false; |
104 #ifdef DEBUG | 97 #ifdef DEBUG |
105 should_record = FLAG_heap_stats; | 98 should_record = FLAG_heap_stats; |
106 #endif | 99 #endif |
107 should_record = should_record || FLAG_log_gc; | 100 should_record = should_record || FLAG_log_gc; |
108 if (should_record) { | 101 if (should_record) { |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 Scavenger::ScavengeObjectSlow(slot, first); | 332 Scavenger::ScavengeObjectSlow(slot, first); |
340 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 333 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
341 return; | 334 return; |
342 } | 335 } |
343 | 336 |
344 int object_size = ConsString::kSize; | 337 int object_size = ConsString::kSize; |
345 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 338 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, |
346 object_size); | 339 object_size); |
347 } | 340 } |
348 | 341 |
349 static inline void EvacuateThinString(Map* map, HeapObject** slot, | |
350 HeapObject* object) { | |
351 if (marks_handling == IGNORE_MARKS) { | |
352 HeapObject* actual = ThinString::cast(object)->actual(); | |
353 *slot = actual; | |
354 // ThinStrings always refer to internalized strings, which are | |
355 // always in old space. | |
356 DCHECK(!map->GetHeap()->InNewSpace(actual)); | |
357 object->set_map_word(MapWord::FromForwardingAddress(actual)); | |
358 return; | |
359 } | |
360 | |
361 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | |
362 ThinString::kSize); | |
363 } | |
364 | |
365 template <ObjectContents object_contents> | 342 template <ObjectContents object_contents> |
366 class ObjectEvacuationStrategy { | 343 class ObjectEvacuationStrategy { |
367 public: | 344 public: |
368 template <int object_size> | 345 template <int object_size> |
369 static inline void VisitSpecialized(Map* map, HeapObject** slot, | 346 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
370 HeapObject* object) { | 347 HeapObject* object) { |
371 EvacuateObject<object_contents, kWordAligned>(map, slot, object, | 348 EvacuateObject<object_contents, kWordAligned>(map, slot, object, |
372 object_size); | 349 object_size); |
373 } | 350 } |
374 | 351 |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
439 | 416 |
440 if (heap()->incremental_marking()->IsCompacting()) { | 417 if (heap()->incremental_marking()->IsCompacting()) { |
441 // When compacting forbid short-circuiting of cons-strings. | 418 // When compacting forbid short-circuiting of cons-strings. |
442 // Scavenging code relies on the fact that new space object | 419 // Scavenging code relies on the fact that new space object |
443 // can't be evacuated into evacuation candidate but | 420 // can't be evacuated into evacuation candidate but |
444 // short-circuiting violates this assumption. | 421 // short-circuiting violates this assumption. |
445 scavenging_visitors_table_.Register( | 422 scavenging_visitors_table_.Register( |
446 StaticVisitorBase::kVisitShortcutCandidate, | 423 StaticVisitorBase::kVisitShortcutCandidate, |
447 scavenging_visitors_table_.GetVisitorById( | 424 scavenging_visitors_table_.GetVisitorById( |
448 StaticVisitorBase::kVisitConsString)); | 425 StaticVisitorBase::kVisitConsString)); |
449 scavenging_visitors_table_.Register( | |
450 StaticVisitorBase::kVisitThinString, | |
451 &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>:: | |
452 EvacuateThinStringNoShortcut); | |
453 } | 426 } |
454 } | 427 } |
455 } | 428 } |
456 | 429 |
457 | 430 |
458 Isolate* Scavenger::isolate() { return heap()->isolate(); } | 431 Isolate* Scavenger::isolate() { return heap()->isolate(); } |
459 | 432 |
460 | 433 |
461 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } | 434 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); } |
462 | 435 |
463 | 436 |
464 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { | 437 void ScavengeVisitor::VisitPointers(Object** start, Object** end) { |
465 // Copy all HeapObject pointers in [start, end) | 438 // Copy all HeapObject pointers in [start, end) |
466 for (Object** p = start; p < end; p++) ScavengePointer(p); | 439 for (Object** p = start; p < end; p++) ScavengePointer(p); |
467 } | 440 } |
468 | 441 |
469 | 442 |
470 void ScavengeVisitor::ScavengePointer(Object** p) { | 443 void ScavengeVisitor::ScavengePointer(Object** p) { |
471 Object* object = *p; | 444 Object* object = *p; |
472 if (!heap_->InNewSpace(object)) return; | 445 if (!heap_->InNewSpace(object)) return; |
473 | 446 |
474 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 447 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
475 reinterpret_cast<HeapObject*>(object)); | 448 reinterpret_cast<HeapObject*>(object)); |
476 } | 449 } |
477 | 450 |
478 } // namespace internal | 451 } // namespace internal |
479 } // namespace v8 | 452 } // namespace v8 |
OLD | NEW |