Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_OBJECTS_VISITING_INL_H_ | 5 #ifndef V8_OBJECTS_VISITING_INL_H_ |
| 6 #define V8_OBJECTS_VISITING_INL_H_ | 6 #define V8_OBJECTS_VISITING_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/array-buffer-tracker.h" | 8 #include "src/heap/array-buffer-tracker.h" |
| 9 #include "src/heap/objects-visiting.h" | 9 #include "src/heap/objects-visiting.h" |
| 10 #include "src/ic/ic-state.h" | 10 #include "src/ic/ic-state.h" |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 56 | 56 |
| 57 table_.Register( | 57 table_.Register( |
| 58 kVisitSharedFunctionInfo, | 58 kVisitSharedFunctionInfo, |
| 59 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor, | 59 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor, |
| 60 int>::Visit); | 60 int>::Visit); |
| 61 | 61 |
| 62 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); | 62 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); |
| 63 | 63 |
| 64 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); | 64 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); |
| 65 | 65 |
| 66 table_.Register(kVisitJSFunction, &VisitJSFunction); | 66 // Don't visit code entry. We are using this visitor only during scavenges. |
| 67 table_.Register( | |
| 68 kVisitJSFunction, | |
| 69 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode, | |
| 70 int>::Visit); | |
| 67 | 71 |
| 68 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); | 72 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
| 69 | 73 |
| 70 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); | 74 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
| 71 | 75 |
| 72 table_.Register(kVisitJSDataView, &VisitJSDataView); | 76 table_.Register(kVisitJSDataView, &VisitJSDataView); |
| 73 | 77 |
| 74 table_.Register(kVisitFreeSpace, &VisitFreeSpace); | 78 table_.Register(kVisitFreeSpace, &VisitFreeSpace); |
| 75 | 79 |
| 76 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); | 80 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit); |
| (...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 479 shared->ClearOptimizedCodeMap(); | 483 shared->ClearOptimizedCodeMap(); |
| 480 } | 484 } |
| 481 } | 485 } |
| 482 VisitSharedFunctionInfoStrongCode(heap, object); | 486 VisitSharedFunctionInfoStrongCode(heap, object); |
| 483 } | 487 } |
| 484 | 488 |
| 485 | 489 |
| 486 template <typename StaticVisitor> | 490 template <typename StaticVisitor> |
| 487 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, | 491 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map, |
| 488 HeapObject* object) { | 492 HeapObject* object) { |
| 493 typedef FlexibleBodyVisitor<StaticVisitor, | |
|
Michael Starzinger
2015/11/10 10:40:32
I would vote for keeping delegator methods as ment
Igor Sheludko
2015/11/10 10:58:11
Done.
| |
| 494 JSFunction::BodyDescriptorStrongCode, | |
| 495 void> JSFunctionStrongCodeBodyVisitor; | |
| 496 | |
| 497 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode, | |
| 498 void> JSFunctionWeakCodeBodyVisitor; | |
| 499 | |
| 489 Heap* heap = map->GetHeap(); | 500 Heap* heap = map->GetHeap(); |
| 490 JSFunction* function = JSFunction::cast(object); | 501 JSFunction* function = JSFunction::cast(object); |
| 491 MarkCompactCollector* collector = heap->mark_compact_collector(); | 502 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 492 if (collector->is_code_flushing_enabled()) { | 503 if (collector->is_code_flushing_enabled()) { |
| 493 if (IsFlushable(heap, function)) { | 504 if (IsFlushable(heap, function)) { |
| 494 // This function's code looks flushable. But we have to postpone | 505 // This function's code looks flushable. But we have to postpone |
| 495 // the decision until we see all functions that point to the same | 506 // the decision until we see all functions that point to the same |
| 496 // SharedFunctionInfo because some of them might be optimized. | 507 // SharedFunctionInfo because some of them might be optimized. |
| 497 // That would also make the non-optimized version of the code | 508 // That would also make the non-optimized version of the code |
| 498 // non-flushable, because it is required for bailing out from | 509 // non-flushable, because it is required for bailing out from |
| 499 // optimized code. | 510 // optimized code. |
| 500 collector->code_flusher()->AddCandidate(function); | 511 collector->code_flusher()->AddCandidate(function); |
| 501 // Visit shared function info immediately to avoid double checking | 512 // Visit shared function info immediately to avoid double checking |
| 502 // of its flushability later. This is just an optimization because | 513 // of its flushability later. This is just an optimization because |
| 503 // the shared function info would eventually be visited. | 514 // the shared function info would eventually be visited. |
| 504 SharedFunctionInfo* shared = function->shared(); | 515 SharedFunctionInfo* shared = function->shared(); |
| 505 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { | 516 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) { |
| 506 StaticVisitor::MarkObject(heap, shared->map()); | 517 StaticVisitor::MarkObject(heap, shared->map()); |
| 507 VisitSharedFunctionInfoWeakCode(heap, shared); | 518 VisitSharedFunctionInfoWeakCode(heap, shared); |
| 508 } | 519 } |
| 509 // Treat the reference to the code object weakly. | 520 // Treat the reference to the code object weakly. |
| 510 VisitJSFunctionWeakCode(heap, object); | 521 JSFunctionWeakCodeBodyVisitor::Visit(map, object); |
| 511 return; | 522 return; |
| 512 } else { | 523 } else { |
| 513 // Visit all unoptimized code objects to prevent flushing them. | 524 // Visit all unoptimized code objects to prevent flushing them. |
| 514 StaticVisitor::MarkObject(heap, function->shared()->code()); | 525 StaticVisitor::MarkObject(heap, function->shared()->code()); |
| 515 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | 526 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 516 MarkInlinedFunctionsCode(heap, function->code()); | 527 MarkInlinedFunctionsCode(heap, function->code()); |
| 517 } | 528 } |
| 518 } | 529 } |
| 519 } | 530 } |
| 520 VisitJSFunctionStrongCode(heap, object); | 531 JSFunctionStrongCodeBodyVisitor::Visit(map, object); |
| 521 } | 532 } |
| 522 | 533 |
| 523 | 534 |
| 524 template <typename StaticVisitor> | 535 template <typename StaticVisitor> |
| 525 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, | 536 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, |
| 526 HeapObject* object) { | 537 HeapObject* object) { |
| 527 int last_property_offset = | 538 int last_property_offset = |
| 528 JSRegExp::kSize + kPointerSize * map->GetInObjectProperties(); | 539 JSRegExp::kSize + kPointerSize * map->GetInObjectProperties(); |
| 529 StaticVisitor::VisitPointers( | 540 StaticVisitor::VisitPointers( |
| 530 map->GetHeap(), object, | 541 map->GetHeap(), object, |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 793 SharedFunctionInfo::kOptimizedCodeMapOffset); | 804 SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 794 | 805 |
| 795 Object** start_slot = | 806 Object** start_slot = |
| 796 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); | 807 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); |
| 797 Object** end_slot = HeapObject::RawField( | 808 Object** end_slot = HeapObject::RawField( |
| 798 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); | 809 object, SharedFunctionInfo::BodyDescriptor::kEndOffset); |
| 799 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | 810 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); |
| 800 } | 811 } |
| 801 | 812 |
| 802 | 813 |
| 803 template <typename StaticVisitor> | |
| 804 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( | |
| 805 Heap* heap, HeapObject* object) { | |
| 806 Object** start_slot = | |
| 807 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
| 808 Object** end_slot = | |
| 809 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
| 810 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
| 811 | |
| 812 VisitCodeEntry(heap, object, | |
| 813 object->address() + JSFunction::kCodeEntryOffset); | |
| 814 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
| 815 JSFunction::kPrototypeOrInitialMapOffset); | |
| 816 | |
| 817 start_slot = | |
| 818 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
| 819 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
| 820 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
| 821 } | |
| 822 | |
| 823 | |
| 824 template <typename StaticVisitor> | |
| 825 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( | |
| 826 Heap* heap, HeapObject* object) { | |
| 827 Object** start_slot = | |
| 828 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | |
| 829 Object** end_slot = | |
| 830 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
| 831 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
| 832 | |
| 833 // Skip visiting kCodeEntryOffset as it is treated weakly here. | |
| 834 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == | |
| 835 JSFunction::kPrototypeOrInitialMapOffset); | |
| 836 | |
| 837 start_slot = | |
| 838 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); | |
| 839 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
| 840 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot); | |
| 841 } | |
| 842 | |
| 843 | |
| 844 void Code::CodeIterateBody(ObjectVisitor* v) { | 814 void Code::CodeIterateBody(ObjectVisitor* v) { |
| 845 int mode_mask = RelocInfo::kCodeTargetMask | | 815 int mode_mask = RelocInfo::kCodeTargetMask | |
| 846 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 816 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
| 847 RelocInfo::ModeMask(RelocInfo::CELL) | | 817 RelocInfo::ModeMask(RelocInfo::CELL) | |
| 848 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | | 818 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
| 849 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | | 819 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | |
| 850 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | | 820 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | |
| 851 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | | 821 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | |
| 852 RelocInfo::kDebugBreakSlotMask; | 822 RelocInfo::kDebugBreakSlotMask; |
| 853 | 823 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 898 | 868 |
| 899 RelocIterator it(this, mode_mask); | 869 RelocIterator it(this, mode_mask); |
| 900 for (; !it.done(); it.next()) { | 870 for (; !it.done(); it.next()) { |
| 901 it.rinfo()->template Visit<StaticVisitor>(heap); | 871 it.rinfo()->template Visit<StaticVisitor>(heap); |
| 902 } | 872 } |
| 903 } | 873 } |
| 904 } // namespace internal | 874 } // namespace internal |
| 905 } // namespace v8 | 875 } // namespace v8 |
| 906 | 876 |
| 907 #endif // V8_OBJECTS_VISITING_INL_H_ | 877 #endif // V8_OBJECTS_VISITING_INL_H_ |
| OLD | NEW |