| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 415 void>::Visit); | 415 void>::Visit); |
| 416 | 416 |
| 417 table_.Register(kVisitCode, &VisitCode); | 417 table_.Register(kVisitCode, &VisitCode); |
| 418 | 418 |
| 419 table_.Register(kVisitSharedFunctionInfo, | 419 table_.Register(kVisitSharedFunctionInfo, |
| 420 &VisitSharedFunctionInfoAndFlushCode); | 420 &VisitSharedFunctionInfoAndFlushCode); |
| 421 | 421 |
| 422 table_.Register(kVisitJSFunction, | 422 table_.Register(kVisitJSFunction, |
| 423 &VisitJSFunctionAndFlushCode); | 423 &VisitJSFunctionAndFlushCode); |
| 424 | 424 |
| 425 table_.Register(kVisitJSRegExp, |
| 426 &VisitRegExpAndFlushCode); |
| 427 |
| 425 table_.Register(kVisitPropertyCell, | 428 table_.Register(kVisitPropertyCell, |
| 426 &FixedBodyVisitor<StaticMarkingVisitor, | 429 &FixedBodyVisitor<StaticMarkingVisitor, |
| 427 JSGlobalPropertyCell::BodyDescriptor, | 430 JSGlobalPropertyCell::BodyDescriptor, |
| 428 void>::Visit); | 431 void>::Visit); |
| 429 | 432 |
| 430 table_.RegisterSpecializations<DataObjectVisitor, | 433 table_.RegisterSpecializations<DataObjectVisitor, |
| 431 kVisitDataObject, | 434 kVisitDataObject, |
| 432 kVisitDataObjectGeneric>(); | 435 kVisitDataObjectGeneric>(); |
| 433 | 436 |
| 434 table_.RegisterSpecializations<JSObjectVisitor, | 437 table_.RegisterSpecializations<JSObjectVisitor, |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 555 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( | 558 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
| 556 map->heap()); | 559 map->heap()); |
| 557 } | 560 } |
| 558 | 561 |
| 559 // Code flushing support. | 562 // Code flushing support. |
| 560 | 563 |
| 561 // How many collections newly compiled code object will survive before being | 564 // How many collections newly compiled code object will survive before being |
| 562 // flushed. | 565 // flushed. |
| 563 static const int kCodeAgeThreshold = 5; | 566 static const int kCodeAgeThreshold = 5; |
| 564 | 567 |
| 568 static const int kRegExpCodeThreshold = 5; |
| 569 |
| 565 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | 570 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| 566 Object* undefined = heap->raw_unchecked_undefined_value(); | 571 Object* undefined = heap->raw_unchecked_undefined_value(); |
| 567 return (info->script() != undefined) && | 572 return (info->script() != undefined) && |
| 568 (reinterpret_cast<Script*>(info->script())->source() != undefined); | 573 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 569 } | 574 } |
| 570 | 575 |
| 571 | 576 |
| 572 inline static bool IsCompiled(JSFunction* function) { | 577 inline static bool IsCompiled(JSFunction* function) { |
| 573 return function->unchecked_code() != | 578 return function->unchecked_code() != |
| 574 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); | 579 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 690 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 695 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 691 | 696 |
| 692 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 697 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 693 | 698 |
| 694 FixedBodyVisitor<StaticMarkingVisitor, | 699 FixedBodyVisitor<StaticMarkingVisitor, |
| 695 SharedFunctionInfo::BodyDescriptor, | 700 SharedFunctionInfo::BodyDescriptor, |
| 696 void>::Visit(map, object); | 701 void>::Visit(map, object); |
| 697 } | 702 } |
| 698 | 703 |
| 699 | 704 |
| 705 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, |
| 706 JSRegExp* re, |
| 707 bool is_ascii) { |
| 708 // Make sure that the fixed array is in fact initialized on the RegExp. |
| 709 // We could potentially trigger a GC when initializing the RegExp. |
| 710 if (SafeMap(re->data())->instance_type() != FIXED_ARRAY_TYPE) return; |
| 711 |
| 712 // Make sure this is a RegExp that actually contains code. |
| 713 if (re->TypeTagUnchecked() != JSRegExp::IRREGEXP) return; |
| 714 |
| 715 Object* code = re->DataAtUnchecked(JSRegExp::code_index(is_ascii)); |
| 716 if (!code->IsSmi() && SafeMap(code)->instance_type() == CODE_TYPE) { |
| 717 // Save a copy that can be reinstated if we need the code again. |
| 718 re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), |
| 719 code, |
| 720 heap); |
| 721 // Set a number in the 0-255 range to guarantee no smi overflow. |
| 722 re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), |
| 723 Smi::FromInt(heap->sweep_generation() & 0xff), |
| 724 heap); |
| 725 } else if (code->IsSmi()) { |
| 726 int value = Smi::cast(code)->value(); |
| 727 // The regexp has not been compiled yet or there was a compilation error. |
| 728 if (value == JSRegExp::kUninitializedValue || |
| 729 value == JSRegExp::kCompilationErrorValue) { |
| 730 return; |
| 731 } |
| 732 |
| 733 // Check if we should flush now. |
| 734 if (value == ((heap->sweep_generation() - kRegExpCodeThreshold) & 0xff)) { |
| 735 re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), |
| 736 Smi::FromInt(JSRegExp::kUninitializedValue), |
| 737 heap); |
| 738 re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), |
| 739 Smi::FromInt(JSRegExp::kUninitializedValue), |
| 740 heap); |
| 741 } |
| 742 } |
| 743 } |
| 744 |
| 745 |
| 746 // Works by setting the current sweep_generation (as a smi) in the |
| 747 // code object place in the data array of the RegExp and keeps a copy |
| 748 // around that can be reinstated if we reuse the RegExp before flushing. |
| 749 // If we did not use the code for kRegExpCodeThreshold mark sweep GCs |
| 750 // we flush the code. |
| 751 static void VisitRegExpAndFlushCode(Map* map, HeapObject* object) { |
| 752 Heap* heap = map->heap(); |
| 753 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 754 if (!collector->is_code_flushing_enabled()) { |
| 755 VisitJSRegExpFields(map, object); |
| 756 return; |
| 757 } |
| 758 JSRegExp* re = reinterpret_cast<JSRegExp*>(object); |
| 759 // Flush code or set age on both ascii and two byte code. |
| 760 UpdateRegExpCodeAgeAndFlush(heap, re, true); |
| 761 UpdateRegExpCodeAgeAndFlush(heap, re, false); |
| 762 // Visit the fields of the RegExp, including the updated FixedArray. |
| 763 VisitJSRegExpFields(map, object); |
| 764 } |
| 765 |
| 766 |
| 700 static void VisitSharedFunctionInfoAndFlushCode(Map* map, | 767 static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| 701 HeapObject* object) { | 768 HeapObject* object) { |
| 702 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); | 769 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 703 if (!collector->is_code_flushing_enabled()) { | 770 if (!collector->is_code_flushing_enabled()) { |
| 704 VisitSharedFunctionInfoGeneric(map, object); | 771 VisitSharedFunctionInfoGeneric(map, object); |
| 705 return; | 772 return; |
| 706 } | 773 } |
| 707 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); | 774 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); |
| 708 } | 775 } |
| 709 | 776 |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 820 } | 887 } |
| 821 | 888 |
| 822 VisitPointers(heap, | 889 VisitPointers(heap, |
| 823 SLOT_ADDR(object, | 890 SLOT_ADDR(object, |
| 824 JSFunction::kCodeEntryOffset + kPointerSize), | 891 JSFunction::kCodeEntryOffset + kPointerSize), |
| 825 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); | 892 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); |
| 826 | 893 |
| 827 // Don't visit the next function list field as it is a weak reference. | 894 // Don't visit the next function list field as it is a weak reference. |
| 828 } | 895 } |
| 829 | 896 |
| 897 static inline void VisitJSRegExpFields(Map* map, |
| 898 HeapObject* object) { |
| 899 int last_property_offset = |
| 900 JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
| 901 VisitPointers(map->heap(), |
| 902 SLOT_ADDR(object, JSRegExp::kPropertiesOffset), |
| 903 SLOT_ADDR(object, last_property_offset)); |
| 904 } |
| 905 |
| 830 | 906 |
| 831 static void VisitSharedFunctionInfoFields(Heap* heap, | 907 static void VisitSharedFunctionInfoFields(Heap* heap, |
| 832 HeapObject* object, | 908 HeapObject* object, |
| 833 bool flush_code_candidate) { | 909 bool flush_code_candidate) { |
| 834 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); | 910 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); |
| 835 | 911 |
| 836 if (!flush_code_candidate) { | 912 if (!flush_code_candidate) { |
| 837 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); | 913 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); |
| 838 } | 914 } |
| 839 | 915 |
| (...skipping 2294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3134 } | 3210 } |
| 3135 | 3211 |
| 3136 | 3212 |
| 3137 void MarkCompactCollector::Initialize() { | 3213 void MarkCompactCollector::Initialize() { |
| 3138 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3214 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 3139 StaticMarkingVisitor::Initialize(); | 3215 StaticMarkingVisitor::Initialize(); |
| 3140 } | 3216 } |
| 3141 | 3217 |
| 3142 | 3218 |
| 3143 } } // namespace v8::internal | 3219 } } // namespace v8::internal |
| OLD | NEW |