| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
| 6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/objects-visiting.h" | 8 #include "src/heap/objects-visiting.h" |
| 9 | 9 |
| 10 namespace v8 { | 10 namespace v8 { |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 130 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit); | 130 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit); |
| 131 | 131 |
| 132 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); | 132 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); |
| 133 | 133 |
| 134 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); | 134 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); |
| 135 | 135 |
| 136 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); | 136 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); |
| 137 | 137 |
| 138 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); | 138 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); |
| 139 | 139 |
| 140 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); |
| 141 |
| 140 table_.Register(kVisitNativeContext, &VisitNativeContext); | 142 table_.Register(kVisitNativeContext, &VisitNativeContext); |
| 141 | 143 |
| 142 table_.Register(kVisitAllocationSite, &VisitAllocationSite); | 144 table_.Register(kVisitAllocationSite, &VisitAllocationSite); |
| 143 | 145 |
| 144 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 146 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
| 145 | 147 |
| 146 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | 148 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); |
| 147 | 149 |
| 148 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); | 150 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); |
| 149 | 151 |
| (...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 439 // Flush optimized code map on major GCs without code flushing, | 441 // Flush optimized code map on major GCs without code flushing, |
| 440 // needed because cached code doesn't contain breakpoints. | 442 // needed because cached code doesn't contain breakpoints. |
| 441 shared->ClearOptimizedCodeMap(); | 443 shared->ClearOptimizedCodeMap(); |
| 442 } | 444 } |
| 443 } | 445 } |
| 444 VisitSharedFunctionInfoStrongCode(heap, object); | 446 VisitSharedFunctionInfoStrongCode(heap, object); |
| 445 } | 447 } |
| 446 | 448 |
| 447 | 449 |
| 448 template <typename StaticVisitor> | 450 template <typename StaticVisitor> |
| 451 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( |
| 452 Map* map, HeapObject* object) { |
| 453 Heap* heap = map->GetHeap(); |
| 454 ConstantPoolArray* array = ConstantPoolArray::cast(object); |
| 455 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); |
| 456 while (!code_iter.is_finished()) { |
| 457 Address code_entry = reinterpret_cast<Address>( |
| 458 array->RawFieldOfElementAt(code_iter.next_index())); |
| 459 StaticVisitor::VisitCodeEntry(heap, code_entry); |
| 460 } |
| 461 |
| 462 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); |
| 463 while (!heap_iter.is_finished()) { |
| 464 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); |
| 465 HeapObject* object = HeapObject::cast(*slot); |
| 466 heap->mark_compact_collector()->RecordSlot(slot, slot, object); |
| 467 bool is_weak_object = |
| 468 (array->get_weak_object_state() == |
| 469 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && |
| 470 Code::IsWeakObjectInOptimizedCode(object)); |
| 471 if (!is_weak_object) { |
| 472 StaticVisitor::MarkObject(heap, object); |
| 473 } |
| 474 } |
| 475 } |
| 476 |
| 477 |
| 478 template <typename StaticVisitor> |
| 449 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, | 479 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, |
| 450 HeapObject* object) { | 480 HeapObject* object) { |
| 451 Heap* heap = map->GetHeap(); | 481 Heap* heap = map->GetHeap(); |
| 452 JSFunction* function = JSFunction::cast(object); | 482 JSFunction* function = JSFunction::cast(object); |
| 453 MarkCompactCollector* collector = heap->mark_compact_collector(); | 483 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 454 if (collector->is_code_flushing_enabled()) { | 484 if (collector->is_code_flushing_enabled()) { |
| 455 if (IsFlushable(heap, function)) { | 485 if (IsFlushable(heap, function)) { |
| 456 // This function's code looks flushable. But we have to postpone | 486 // This function's code looks flushable. But we have to postpone |
| 457 // the decision until we see all functions that point to the same | 487 // the decision until we see all functions that point to the same |
| 458 // SharedFunctionInfo because some of them might be optimized. | 488 // SharedFunctionInfo because some of them might be optimized. |
| (...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 789 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | 819 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
| 790 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 820 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
| 791 | 821 |
| 792 // There are two places where we iterate code bodies: here and the | 822 // There are two places where we iterate code bodies: here and the |
| 793 // templated CodeIterateBody (below). They should be kept in sync. | 823 // templated CodeIterateBody (below). They should be kept in sync. |
| 794 IteratePointer(v, kRelocationInfoOffset); | 824 IteratePointer(v, kRelocationInfoOffset); |
| 795 IteratePointer(v, kHandlerTableOffset); | 825 IteratePointer(v, kHandlerTableOffset); |
| 796 IteratePointer(v, kDeoptimizationDataOffset); | 826 IteratePointer(v, kDeoptimizationDataOffset); |
| 797 IteratePointer(v, kTypeFeedbackInfoOffset); | 827 IteratePointer(v, kTypeFeedbackInfoOffset); |
| 798 IterateNextCodeLink(v, kNextCodeLinkOffset); | 828 IterateNextCodeLink(v, kNextCodeLinkOffset); |
| 829 IteratePointer(v, kConstantPoolOffset); |
| 799 | 830 |
| 800 RelocIterator it(this, mode_mask); | 831 RelocIterator it(this, mode_mask); |
| 801 Isolate* isolate = this->GetIsolate(); | 832 Isolate* isolate = this->GetIsolate(); |
| 802 for (; !it.done(); it.next()) { | 833 for (; !it.done(); it.next()) { |
| 803 it.rinfo()->Visit(isolate, v); | 834 it.rinfo()->Visit(isolate, v); |
| 804 } | 835 } |
| 805 } | 836 } |
| 806 | 837 |
| 807 | 838 |
| 808 template <typename StaticVisitor> | 839 template <typename StaticVisitor> |
| (...skipping 16 matching lines...) Expand all Loading... |
| 825 StaticVisitor::VisitPointer( | 856 StaticVisitor::VisitPointer( |
| 826 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); | 857 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); |
| 827 StaticVisitor::VisitPointer( | 858 StaticVisitor::VisitPointer( |
| 828 heap, | 859 heap, |
| 829 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); | 860 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); |
| 830 StaticVisitor::VisitPointer( | 861 StaticVisitor::VisitPointer( |
| 831 heap, | 862 heap, |
| 832 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); | 863 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); |
| 833 StaticVisitor::VisitNextCodeLink( | 864 StaticVisitor::VisitNextCodeLink( |
| 834 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); | 865 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); |
| 866 StaticVisitor::VisitPointer( |
| 867 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset)); |
| 835 | 868 |
| 836 | 869 |
| 837 RelocIterator it(this, mode_mask); | 870 RelocIterator it(this, mode_mask); |
| 838 for (; !it.done(); it.next()) { | 871 for (; !it.done(); it.next()) { |
| 839 it.rinfo()->template Visit<StaticVisitor>(heap); | 872 it.rinfo()->template Visit<StaticVisitor>(heap); |
| 840 } | 873 } |
| 841 } | 874 } |
| 842 } | 875 } |
| 843 } // namespace v8::internal | 876 } // namespace v8::internal |
| 844 | 877 |
| 845 #endif // V8_OBJECTS_VISITING_INL_H_ | 878 #endif // V8_OBJECTS_VISITING_INL_H_ |
| OLD | NEW |