| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 417 void>::Visit); | 417 void>::Visit); |
| 418 | 418 |
| 419 table_.Register(kVisitCode, &VisitCode); | 419 table_.Register(kVisitCode, &VisitCode); |
| 420 | 420 |
| 421 table_.Register(kVisitSharedFunctionInfo, | 421 table_.Register(kVisitSharedFunctionInfo, |
| 422 &VisitSharedFunctionInfoAndFlushCode); | 422 &VisitSharedFunctionInfoAndFlushCode); |
| 423 | 423 |
| 424 table_.Register(kVisitJSFunction, | 424 table_.Register(kVisitJSFunction, |
| 425 &VisitJSFunctionAndFlushCode); | 425 &VisitJSFunctionAndFlushCode); |
| 426 | 426 |
| 427 table_.Register(kVisitJSRegExp, |
| 428 &VisitRegExpAndFlushCode); |
| 429 |
| 427 table_.Register(kVisitPropertyCell, | 430 table_.Register(kVisitPropertyCell, |
| 428 &FixedBodyVisitor<StaticMarkingVisitor, | 431 &FixedBodyVisitor<StaticMarkingVisitor, |
| 429 JSGlobalPropertyCell::BodyDescriptor, | 432 JSGlobalPropertyCell::BodyDescriptor, |
| 430 void>::Visit); | 433 void>::Visit); |
| 431 | 434 |
| 432 table_.RegisterSpecializations<DataObjectVisitor, | 435 table_.RegisterSpecializations<DataObjectVisitor, |
| 433 kVisitDataObject, | 436 kVisitDataObject, |
| 434 kVisitDataObjectGeneric>(); | 437 kVisitDataObjectGeneric>(); |
| 435 | 438 |
| 436 table_.RegisterSpecializations<JSObjectVisitor, | 439 table_.RegisterSpecializations<JSObjectVisitor, |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 557 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( | 560 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
| 558 map->heap()); | 561 map->heap()); |
| 559 } | 562 } |
| 560 | 563 |
| 561 // Code flushing support. | 564 // Code flushing support. |
| 562 | 565 |
| 563 // How many collections newly compiled code object will survive before being | 566 // How many collections newly compiled code object will survive before being |
| 564 // flushed. | 567 // flushed. |
| 565 static const int kCodeAgeThreshold = 5; | 568 static const int kCodeAgeThreshold = 5; |
| 566 | 569 |
| 570 static const int kRegExpCodeThreshold = 5; |
| 571 |
| 567 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | 572 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| 568 Object* undefined = heap->raw_unchecked_undefined_value(); | 573 Object* undefined = heap->raw_unchecked_undefined_value(); |
| 569 return (info->script() != undefined) && | 574 return (info->script() != undefined) && |
| 570 (reinterpret_cast<Script*>(info->script())->source() != undefined); | 575 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 571 } | 576 } |
| 572 | 577 |
| 573 | 578 |
| 574 inline static bool IsCompiled(JSFunction* function) { | 579 inline static bool IsCompiled(JSFunction* function) { |
| 575 return function->unchecked_code() != | 580 return function->unchecked_code() != |
| 576 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); | 581 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 693 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 698 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 694 | 699 |
| 695 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 700 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 696 | 701 |
| 697 FixedBodyVisitor<StaticMarkingVisitor, | 702 FixedBodyVisitor<StaticMarkingVisitor, |
| 698 SharedFunctionInfo::BodyDescriptor, | 703 SharedFunctionInfo::BodyDescriptor, |
| 699 void>::Visit(map, object); | 704 void>::Visit(map, object); |
| 700 } | 705 } |
| 701 | 706 |
| 702 | 707 |
| 708 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, |
| 709 JSRegExp* re, |
| 710 bool is_ascii) { |
| 711 // Make sure that the fixed array is in fact initialized on the RegExp. |
| 712 // We could potentially trigger a GC when initializing the RegExp. |
| 713 if (SafeMap(re->data())->instance_type() != FIXED_ARRAY_TYPE) return; |
| 714 |
| 715 // Make sure this is a RegExp that actually contains code. |
| 716 if (re->TypeTagUnchecked() != JSRegExp::IRREGEXP) return; |
| 717 |
| 718 Object* code = re->DataAtUnchecked(JSRegExp::code_index(is_ascii)); |
| 719 if (!code->IsSmi() && SafeMap(code)->instance_type() == CODE_TYPE) { |
| 720 // Save a copy that can be reinstated if we need the code again. |
| 721 re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), |
| 722 code, |
| 723 heap); |
| 724 // Set a number in the 0-255 range to guarantee no smi overflow. |
| 725 re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), |
| 726 Smi::FromInt(heap->sweep_generation() & 0xff), |
| 727 heap); |
| 728 } else if (code->IsSmi()) { |
| 729 int value = Smi::cast(code)->value(); |
| 730 // The regexp has not been compiled yet or there was a compilation error. |
| 731 if (value == JSRegExp::kUninitializedValue || |
| 732 value == JSRegExp::kCompilationErrorValue) { |
| 733 return; |
| 734 } |
| 735 |
| 736 // Check if we should flush now. |
| 737 if (value == ((heap->sweep_generation() - kRegExpCodeThreshold) & 0xff)) { |
| 738 re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), |
| 739 Smi::FromInt(JSRegExp::kUninitializedValue), |
| 740 heap); |
| 741 re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), |
| 742 Smi::FromInt(JSRegExp::kUninitializedValue), |
| 743 heap); |
| 744 } |
| 745 } |
| 746 } |
| 747 |
| 748 |
| 749 // Works by setting the current sweep_generation (as a smi) in the |
| 750 // code object place in the data array of the RegExp and keeps a copy |
| 751 // around that can be reinstated if we reuse the RegExp before flushing. |
| 752 // If we did not use the code for kRegExpCodeThreshold mark sweep GCs |
| 753 // we flush the code. |
| 754 static void VisitRegExpAndFlushCode(Map* map, HeapObject* object) { |
| 755 Heap* heap = map->heap(); |
| 756 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 757 if (!collector->is_code_flushing_enabled()) { |
| 758 VisitJSRegExpFields(map, object); |
| 759 return; |
| 760 } |
| 761 JSRegExp* re = reinterpret_cast<JSRegExp*>(object); |
| 762 // Flush code or set age on both ascii and two byte code. |
| 763 UpdateRegExpCodeAgeAndFlush(heap, re, true); |
| 764 UpdateRegExpCodeAgeAndFlush(heap, re, false); |
| 765 // Visit the fields of the RegExp, including the updated FixedArray. |
| 766 VisitJSRegExpFields(map, object); |
| 767 } |
| 768 |
| 769 |
| 703 static void VisitSharedFunctionInfoAndFlushCode(Map* map, | 770 static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| 704 HeapObject* object) { | 771 HeapObject* object) { |
| 705 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); | 772 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 706 if (!collector->is_code_flushing_enabled()) { | 773 if (!collector->is_code_flushing_enabled()) { |
| 707 VisitSharedFunctionInfoGeneric(map, object); | 774 VisitSharedFunctionInfoGeneric(map, object); |
| 708 return; | 775 return; |
| 709 } | 776 } |
| 710 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); | 777 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); |
| 711 } | 778 } |
| 712 | 779 |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 823 } | 890 } |
| 824 | 891 |
| 825 VisitPointers(heap, | 892 VisitPointers(heap, |
| 826 SLOT_ADDR(object, | 893 SLOT_ADDR(object, |
| 827 JSFunction::kCodeEntryOffset + kPointerSize), | 894 JSFunction::kCodeEntryOffset + kPointerSize), |
| 828 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); | 895 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); |
| 829 | 896 |
| 830 // Don't visit the next function list field as it is a weak reference. | 897 // Don't visit the next function list field as it is a weak reference. |
| 831 } | 898 } |
| 832 | 899 |
| 900 static inline void VisitJSRegExpFields(Map* map, |
| 901 HeapObject* object) { |
| 902 int last_property_offset = |
| 903 JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
| 904 VisitPointers(map->heap(), |
| 905 SLOT_ADDR(object, JSRegExp::kPropertiesOffset), |
| 906 SLOT_ADDR(object, last_property_offset)); |
| 907 } |
| 908 |
| 833 | 909 |
| 834 static void VisitSharedFunctionInfoFields(Heap* heap, | 910 static void VisitSharedFunctionInfoFields(Heap* heap, |
| 835 HeapObject* object, | 911 HeapObject* object, |
| 836 bool flush_code_candidate) { | 912 bool flush_code_candidate) { |
| 837 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); | 913 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); |
| 838 | 914 |
| 839 if (!flush_code_candidate) { | 915 if (!flush_code_candidate) { |
| 840 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); | 916 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); |
| 841 } | 917 } |
| 842 | 918 |
| (...skipping 2294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3137 } | 3213 } |
| 3138 | 3214 |
| 3139 | 3215 |
| 3140 void MarkCompactCollector::Initialize() { | 3216 void MarkCompactCollector::Initialize() { |
| 3141 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3217 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 3142 StaticMarkingVisitor::Initialize(); | 3218 StaticMarkingVisitor::Initialize(); |
| 3143 } | 3219 } |
| 3144 | 3220 |
| 3145 | 3221 |
| 3146 } } // namespace v8::internal | 3222 } } // namespace v8::internal |
| OLD | NEW |