OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
7 | 7 |
8 #include "src/heap/array-buffer-tracker.h" | 8 #include "src/heap/array-buffer-tracker.h" |
9 #include "src/heap/objects-visiting.h" | 9 #include "src/heap/objects-visiting.h" |
10 #include "src/ic/ic-state.h" | 10 #include "src/ic/ic-state.h" |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 | 56 |
57 table_.Register( | 57 table_.Register( |
58 kVisitSharedFunctionInfo, | 58 kVisitSharedFunctionInfo, |
59 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor, | 59 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor, |
60 int>::Visit); | 60 int>::Visit); |
61 | 61 |
62 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); | 62 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); |
63 | 63 |
64 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); | 64 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); |
65 | 65 |
66 table_.Register(kVisitJSFunction, &VisitJSFunction); | 66 // Don't visit code entry. We are using this visitor only during scavenges. |
| 67 table_.Register( |
| 68 kVisitJSFunction, |
| 69 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode, |
| 70 int>::Visit); |
67 | 71 |
68 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | 72 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
69 | 73 |
70 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | 74 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
71 | 75 |
72 table_.Register(kVisitJSDataView, &VisitJSDataView); | 76 table_.Register(kVisitJSDataView, &VisitJSDataView); |
73 | 77 |
74 table_.Register(kVisitFreeSpace, &VisitFreeSpace); | 78 table_.Register(kVisitFreeSpace, &VisitFreeSpace); |
75 | 79 |
76 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); | 80 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); |
(...skipping 423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
500 collector->code_flusher()->AddCandidate(function); | 504 collector->code_flusher()->AddCandidate(function); |
501 // Visit shared function info immediately to avoid double checking | 505 // Visit shared function info immediately to avoid double checking |
502 // of its flushability later. This is just an optimization because | 506 // of its flushability later. This is just an optimization because |
503 // the shared function info would eventually be visited. | 507 // the shared function info would eventually be visited. |
504 SharedFunctionInfo* shared = function->shared(); | 508 SharedFunctionInfo* shared = function->shared(); |
505 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { | 509 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { |
506 StaticVisitor::MarkObject(heap, shared->map()); | 510 StaticVisitor::MarkObject(heap, shared->map()); |
507 VisitSharedFunctionInfoWeakCode(heap, shared); | 511 VisitSharedFunctionInfoWeakCode(heap, shared); |
508 } | 512 } |
509 // Treat the reference to the code object weakly. | 513 // Treat the reference to the code object weakly. |
510 VisitJSFunctionWeakCode(heap, object); | 514 VisitJSFunctionWeakCode(map, object); |
511 return; | 515 return; |
512 } else { | 516 } else { |
513 // Visit all unoptimized code objects to prevent flushing them. | 517 // Visit all unoptimized code objects to prevent flushing them. |
514 StaticVisitor::MarkObject(heap, function->shared()->code()); | 518 StaticVisitor::MarkObject(heap, function->shared()->code()); |
515 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | 519 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
516 MarkInlinedFunctionsCode(heap, function->code()); | 520 MarkInlinedFunctionsCode(heap, function->code()); |
517 } | 521 } |
518 } | 522 } |
519 } | 523 } |
520 VisitJSFunctionStrongCode(heap, object); | 524 VisitJSFunctionStrongCode(map, object); |
521 } | 525 } |
522 | 526 |
523 | 527 |
524 template <typename StaticVisitor> | 528 template <typename StaticVisitor> |
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, | 529 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, |
526 HeapObject* object) { | 530 HeapObject* object) { |
527 int last_property_offset = | 531 int last_property_offset = |
528 JSRegExp::kSize + kPointerSize * map->GetInObjectProperties(); | 532 JSRegExp::kSize + kPointerSize * map->GetInObjectProperties(); |
529 StaticVisitor::VisitPointers( | 533 StaticVisitor::VisitPointers( |
530 map->GetHeap(), object, | 534 map->GetHeap(), object, |
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
795 Object** start_slot = | 799 Object** start_slot = |
796 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); | 800 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); |
797 Object** end_slot = HeapObject::RawField( | 801 Object** end_slot = HeapObject::RawField( |
798 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); | 802 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); |
799 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | 803 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); |
800 } | 804 } |
801 | 805 |
802 | 806 |
803 template <typename StaticVisitor> | 807 template <typename StaticVisitor> |
804 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( | 808 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( |
805 Heap* heap, HeapObject* object) { | 809 Map* map, HeapObject* object) { |
806 Object** start_slot = | 810 typedef FlexibleBodyVisitor<StaticVisitor, |
807 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | 811 JSFunction::BodyDescriptorStrongCode, |
808 Object** end_slot = | 812 void> JSFunctionStrongCodeBodyVisitor; |
809 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | 813 JSFunctionStrongCodeBodyVisitor::Visit(map, object); |
810 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
811 | |
812 VisitCodeEntry(heap, object, | |
813 object->address() + JSFunction::kCodeEntryOffset); | |
814 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
815 JSFunction::kPrototypeOrInitialMapOffset); | |
816 | |
817 start_slot = | |
818 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
819 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
820 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
821 } | 814 } |
822 | 815 |
823 | 816 |
824 template <typename StaticVisitor> | 817 template <typename StaticVisitor> |
825 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( | 818 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( |
826 Heap* heap, HeapObject* object) { | 819 Map* map, HeapObject* object) { |
827 Object** start_slot = | 820 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode, |
828 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | 821 void> JSFunctionWeakCodeBodyVisitor; |
829 Object** end_slot = | 822 JSFunctionWeakCodeBodyVisitor::Visit(map, object); |
830 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
831 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
832 | |
833 // Skip visiting kCodeEntryOffset as it is treated weakly here. | |
834 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
835 JSFunction::kPrototypeOrInitialMapOffset); | |
836 | |
837 start_slot = | |
838 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
839 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
840 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
841 } | 823 } |
842 | 824 |
843 | 825 |
844 void Code::CodeIterateBody(ObjectVisitor* v) { | 826 void Code::CodeIterateBody(ObjectVisitor* v) { |
845 int mode_mask = RelocInfo::kCodeTargetMask | | 827 int mode_mask = RelocInfo::kCodeTargetMask | |
846 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 828 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
847 RelocInfo::ModeMask(RelocInfo::CELL) | | 829 RelocInfo::ModeMask(RelocInfo::CELL) | |
848 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 830 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
849 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | | 831 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | |
850 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | | 832 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
898 | 880 |
899 RelocIterator it(this, mode_mask); | 881 RelocIterator it(this, mode_mask); |
900 for (; !it.done(); it.next()) { | 882 for (; !it.done(); it.next()) { |
901 it.rinfo()->template Visit<StaticVisitor>(heap); | 883 it.rinfo()->template Visit<StaticVisitor>(heap); |
902 } | 884 } |
903 } | 885 } |
904 } // namespace internal | 886 } // namespace internal |
905 } // namespace v8 | 887 } // namespace v8 |
906 | 888 |
907 #endif // V8_OBJECTS_VISITING_INL_H_ | 889 #endif // V8_OBJECTS_VISITING_INL_H_ |
OLD | NEW |