OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap-inl.h" | 8 #include "src/heap/heap-inl.h" |
9 #include "src/heap/incremental-marking.h" | 9 #include "src/heap/incremental-marking.h" |
10 #include "src/heap/objects-visiting-inl.h" | 10 #include "src/heap/objects-visiting-inl.h" |
(...skipping 18 matching lines...) Expand all Loading... |
29 class ScavengingVisitor : public StaticVisitorBase { | 29 class ScavengingVisitor : public StaticVisitorBase { |
30 public: | 30 public: |
31 static void Initialize() { | 31 static void Initialize() { |
32 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); | 32 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
33 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 33 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
34 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 34 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
35 table_.Register(kVisitThinString, &EvacuateThinString); | 35 table_.Register(kVisitThinString, &EvacuateThinString); |
36 table_.Register(kVisitByteArray, &EvacuateByteArray); | 36 table_.Register(kVisitByteArray, &EvacuateByteArray); |
37 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 37 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
38 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 38 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
39 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); | 39 table_.Register(kVisitFixedTypedArrayBase, &EvacuateFixedTypedArray); |
40 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); | 40 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
41 table_.Register(kVisitJSArrayBuffer, | 41 table_.Register(kVisitJSArrayBuffer, |
42 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); | 42 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
43 | 43 |
44 table_.Register( | 44 table_.Register( |
45 kVisitNativeContext, | 45 kVisitNativeContext, |
46 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< | 46 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
47 Context::kSize>); | 47 Context::kSize>); |
48 | 48 |
49 table_.Register( | 49 table_.Register( |
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
439 ScavengingVisitor<TRANSFER_MARKS, | 439 ScavengingVisitor<TRANSFER_MARKS, |
440 LOGGING_AND_PROFILING_ENABLED>::GetTable()); | 440 LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
441 } | 441 } |
442 | 442 |
443 if (heap()->incremental_marking()->IsCompacting()) { | 443 if (heap()->incremental_marking()->IsCompacting()) { |
444 // When compacting forbid short-circuiting of cons-strings. | 444 // When compacting forbid short-circuiting of cons-strings. |
445 // Scavenging code relies on the fact that new space object | 445 // Scavenging code relies on the fact that new space object |
446 // can't be evacuated into evacuation candidate but | 446 // can't be evacuated into evacuation candidate but |
447 // short-circuiting violates this assumption. | 447 // short-circuiting violates this assumption. |
448 scavenging_visitors_table_.Register( | 448 scavenging_visitors_table_.Register( |
449 StaticVisitorBase::kVisitShortcutCandidate, | 449 kVisitShortcutCandidate, |
450 scavenging_visitors_table_.GetVisitorById( | 450 scavenging_visitors_table_.GetVisitorById(kVisitConsString)); |
451 StaticVisitorBase::kVisitConsString)); | |
452 scavenging_visitors_table_.Register( | 451 scavenging_visitors_table_.Register( |
453 StaticVisitorBase::kVisitThinString, | 452 kVisitThinString, |
454 &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>:: | 453 &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>:: |
455 EvacuateThinStringNoShortcut); | 454 EvacuateThinStringNoShortcut); |
456 } | 455 } |
457 } | 456 } |
458 } | 457 } |
459 | 458 |
460 | 459 |
461 Isolate* Scavenger::isolate() { return heap()->isolate(); } | 460 Isolate* Scavenger::isolate() { return heap()->isolate(); } |
462 | 461 |
463 void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) { | 462 void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) { |
464 ScavengePointer(p); | 463 ScavengePointer(p); |
465 } | 464 } |
466 | 465 |
467 void RootScavengeVisitor::VisitRootPointers(Root root, Object** start, | 466 void RootScavengeVisitor::VisitRootPointers(Root root, Object** start, |
468 Object** end) { | 467 Object** end) { |
469 // Copy all HeapObject pointers in [start, end) | 468 // Copy all HeapObject pointers in [start, end) |
470 for (Object** p = start; p < end; p++) ScavengePointer(p); | 469 for (Object** p = start; p < end; p++) ScavengePointer(p); |
471 } | 470 } |
472 | 471 |
473 void RootScavengeVisitor::ScavengePointer(Object** p) { | 472 void RootScavengeVisitor::ScavengePointer(Object** p) { |
474 Object* object = *p; | 473 Object* object = *p; |
475 if (!heap_->InNewSpace(object)) return; | 474 if (!heap_->InNewSpace(object)) return; |
476 | 475 |
477 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 476 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
478 reinterpret_cast<HeapObject*>(object)); | 477 reinterpret_cast<HeapObject*>(object)); |
479 } | 478 } |
480 | 479 |
481 } // namespace internal | 480 } // namespace internal |
482 } // namespace v8 | 481 } // namespace v8 |
OLD | NEW |