| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
| 6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
| 7 | 7 |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 129 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit); | 129 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit); |
| 130 | 130 |
| 131 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); | 131 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); |
| 132 | 132 |
| 133 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); | 133 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); |
| 134 | 134 |
| 135 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); | 135 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit); |
| 136 | 136 |
| 137 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); | 137 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); |
| 138 | 138 |
| 139 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); | |
| 140 | |
| 141 table_.Register(kVisitNativeContext, &VisitNativeContext); | 139 table_.Register(kVisitNativeContext, &VisitNativeContext); |
| 142 | 140 |
| 143 table_.Register(kVisitAllocationSite, &VisitAllocationSite); | 141 table_.Register(kVisitAllocationSite, &VisitAllocationSite); |
| 144 | 142 |
| 145 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | 143 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
| 146 | 144 |
| 147 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | 145 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); |
| 148 | 146 |
| 149 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); | 147 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); |
| 150 | 148 |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 439 // Flush optimized code map on major GCs without code flushing, | 437 // Flush optimized code map on major GCs without code flushing, |
| 440 // needed because cached code doesn't contain breakpoints. | 438 // needed because cached code doesn't contain breakpoints. |
| 441 shared->ClearOptimizedCodeMap(); | 439 shared->ClearOptimizedCodeMap(); |
| 442 } | 440 } |
| 443 } | 441 } |
| 444 VisitSharedFunctionInfoStrongCode(heap, object); | 442 VisitSharedFunctionInfoStrongCode(heap, object); |
| 445 } | 443 } |
| 446 | 444 |
| 447 | 445 |
| 448 template <typename StaticVisitor> | 446 template <typename StaticVisitor> |
| 449 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray( | |
| 450 Map* map, HeapObject* object) { | |
| 451 Heap* heap = map->GetHeap(); | |
| 452 ConstantPoolArray* array = ConstantPoolArray::cast(object); | |
| 453 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); | |
| 454 while (!code_iter.is_finished()) { | |
| 455 Address code_entry = reinterpret_cast<Address>( | |
| 456 array->RawFieldOfElementAt(code_iter.next_index())); | |
| 457 StaticVisitor::VisitCodeEntry(heap, code_entry); | |
| 458 } | |
| 459 | |
| 460 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); | |
| 461 while (!heap_iter.is_finished()) { | |
| 462 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); | |
| 463 HeapObject* object = HeapObject::cast(*slot); | |
| 464 heap->mark_compact_collector()->RecordSlot(slot, slot, object); | |
| 465 bool is_weak_object = | |
| 466 (array->get_weak_object_state() == | |
| 467 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && | |
| 468 Code::IsWeakObjectInOptimizedCode(object)); | |
| 469 if (!is_weak_object) { | |
| 470 StaticVisitor::MarkObject(heap, object); | |
| 471 } | |
| 472 } | |
| 473 } | |
| 474 | |
| 475 | |
| 476 template <typename StaticVisitor> | |
| 477 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, | 447 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, |
| 478 HeapObject* object) { | 448 HeapObject* object) { |
| 479 Heap* heap = map->GetHeap(); | 449 Heap* heap = map->GetHeap(); |
| 480 JSFunction* function = JSFunction::cast(object); | 450 JSFunction* function = JSFunction::cast(object); |
| 481 MarkCompactCollector* collector = heap->mark_compact_collector(); | 451 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 482 if (collector->is_code_flushing_enabled()) { | 452 if (collector->is_code_flushing_enabled()) { |
| 483 if (IsFlushable(heap, function)) { | 453 if (IsFlushable(heap, function)) { |
| 484 // This function's code looks flushable. But we have to postpone | 454 // This function's code looks flushable. But we have to postpone |
| 485 // the decision until we see all functions that point to the same | 455 // the decision until we see all functions that point to the same |
| 486 // SharedFunctionInfo because some of them might be optimized. | 456 // SharedFunctionInfo because some of them might be optimized. |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 820 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | 790 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
| 821 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); | 791 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
| 822 | 792 |
| 823 // There are two places where we iterate code bodies: here and the | 793 // There are two places where we iterate code bodies: here and the |
| 824 // templated CodeIterateBody (below). They should be kept in sync. | 794 // templated CodeIterateBody (below). They should be kept in sync. |
| 825 IteratePointer(v, kRelocationInfoOffset); | 795 IteratePointer(v, kRelocationInfoOffset); |
| 826 IteratePointer(v, kHandlerTableOffset); | 796 IteratePointer(v, kHandlerTableOffset); |
| 827 IteratePointer(v, kDeoptimizationDataOffset); | 797 IteratePointer(v, kDeoptimizationDataOffset); |
| 828 IteratePointer(v, kTypeFeedbackInfoOffset); | 798 IteratePointer(v, kTypeFeedbackInfoOffset); |
| 829 IterateNextCodeLink(v, kNextCodeLinkOffset); | 799 IterateNextCodeLink(v, kNextCodeLinkOffset); |
| 830 IteratePointer(v, kConstantPoolOffset); | |
| 831 | 800 |
| 832 RelocIterator it(this, mode_mask); | 801 RelocIterator it(this, mode_mask); |
| 833 Isolate* isolate = this->GetIsolate(); | 802 Isolate* isolate = this->GetIsolate(); |
| 834 for (; !it.done(); it.next()) { | 803 for (; !it.done(); it.next()) { |
| 835 it.rinfo()->Visit(isolate, v); | 804 it.rinfo()->Visit(isolate, v); |
| 836 } | 805 } |
| 837 } | 806 } |
| 838 | 807 |
| 839 | 808 |
| 840 template <typename StaticVisitor> | 809 template <typename StaticVisitor> |
| (...skipping 16 matching lines...) Expand all Loading... |
| 857 StaticVisitor::VisitPointer( | 826 StaticVisitor::VisitPointer( |
| 858 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); | 827 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); |
| 859 StaticVisitor::VisitPointer( | 828 StaticVisitor::VisitPointer( |
| 860 heap, | 829 heap, |
| 861 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); | 830 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); |
| 862 StaticVisitor::VisitPointer( | 831 StaticVisitor::VisitPointer( |
| 863 heap, | 832 heap, |
| 864 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); | 833 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); |
| 865 StaticVisitor::VisitNextCodeLink( | 834 StaticVisitor::VisitNextCodeLink( |
| 866 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); | 835 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); |
| 867 StaticVisitor::VisitPointer( | |
| 868 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset)); | |
| 869 | 836 |
| 870 | 837 |
| 871 RelocIterator it(this, mode_mask); | 838 RelocIterator it(this, mode_mask); |
| 872 for (; !it.done(); it.next()) { | 839 for (; !it.done(); it.next()) { |
| 873 it.rinfo()->template Visit<StaticVisitor>(heap); | 840 it.rinfo()->template Visit<StaticVisitor>(heap); |
| 874 } | 841 } |
| 875 } | 842 } |
| 876 } | 843 } |
| 877 } // namespace v8::internal | 844 } // namespace v8::internal |
| 878 | 845 |
| 879 #endif // V8_OBJECTS_VISITING_INL_H_ | 846 #endif // V8_OBJECTS_VISITING_INL_H_ |
| OLD | NEW |