| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 854 // the marking stack. Instead, we mark them as both marked and overflowed. | 854 // the marking stack. Instead, we mark them as both marked and overflowed. |
| 855 // When the stack is in the overflowed state, objects marked as overflowed | 855 // When the stack is in the overflowed state, objects marked as overflowed |
| 856 // have been reached and marked but their children have not been visited yet. | 856 // have been reached and marked but their children have not been visited yet. |
| 857 // After emptying the marking stack, we clear the overflow flag and traverse | 857 // After emptying the marking stack, we clear the overflow flag and traverse |
| 858 // the heap looking for objects marked as overflowed, push them on the stack, | 858 // the heap looking for objects marked as overflowed, push them on the stack, |
| 859 // and continue with marking. This process repeats until all reachable | 859 // and continue with marking. This process repeats until all reachable |
| 860 // objects have been marked. | 860 // objects have been marked. |
| 861 | 861 |
| 862 void CodeFlusher::ProcessJSFunctionCandidates() { | 862 void CodeFlusher::ProcessJSFunctionCandidates() { |
| 863 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); | 863 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| 864 Object* undefined = isolate_->heap()->undefined_value(); |
| 864 | 865 |
| 865 JSFunction* candidate = jsfunction_candidates_head_; | 866 JSFunction* candidate = jsfunction_candidates_head_; |
| 866 JSFunction* next_candidate; | 867 JSFunction* next_candidate; |
| 867 while (candidate != NULL) { | 868 while (candidate != NULL) { |
| 868 next_candidate = GetNextCandidate(candidate); | 869 next_candidate = GetNextCandidate(candidate); |
| 870 ClearNextCandidate(candidate, undefined); |
| 869 | 871 |
| 870 SharedFunctionInfo* shared = candidate->shared(); | 872 SharedFunctionInfo* shared = candidate->shared(); |
| 871 | 873 |
| 872 Code* code = shared->code(); | 874 Code* code = shared->code(); |
| 873 MarkBit code_mark = Marking::MarkBitFrom(code); | 875 MarkBit code_mark = Marking::MarkBitFrom(code); |
| 874 if (!code_mark.Get()) { | 876 if (!code_mark.Get()) { |
| 875 shared->set_code(lazy_compile); | 877 shared->set_code(lazy_compile); |
| 876 candidate->set_code(lazy_compile); | 878 candidate->set_code(lazy_compile); |
| 877 } else { | 879 } else if (code == lazy_compile) { |
| 878 candidate->set_code(shared->code()); | 880 candidate->set_code(lazy_compile); |
| 879 } | 881 } |
| 880 | 882 |
| 881 // We are in the middle of a GC cycle so the write barrier in the code | 883 // We are in the middle of a GC cycle so the write barrier in the code |
| 882 // setter did not record the slot update and we have to do that manually. | 884 // setter did not record the slot update and we have to do that manually. |
| 883 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; | 885 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
| 884 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); | 886 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
| 885 isolate_->heap()->mark_compact_collector()-> | 887 isolate_->heap()->mark_compact_collector()-> |
| 886 RecordCodeEntrySlot(slot, target); | 888 RecordCodeEntrySlot(slot, target); |
| 887 | 889 |
| 888 Object** shared_code_slot = | 890 Object** shared_code_slot = |
| 889 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); | 891 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); |
| 890 isolate_->heap()->mark_compact_collector()-> | 892 isolate_->heap()->mark_compact_collector()-> |
| 891 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot); | 893 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot); |
| 892 | 894 |
| 893 candidate = next_candidate; | 895 candidate = next_candidate; |
| 894 } | 896 } |
| 895 | 897 |
| 896 jsfunction_candidates_head_ = NULL; | 898 jsfunction_candidates_head_ = NULL; |
| 897 } | 899 } |
| 898 | 900 |
| 899 | 901 |
| 900 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { | 902 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { |
| 901 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); | 903 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| 902 | 904 |
| 903 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 905 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 904 SharedFunctionInfo* next_candidate; | 906 SharedFunctionInfo* next_candidate; |
| 905 while (candidate != NULL) { | 907 while (candidate != NULL) { |
| 906 next_candidate = GetNextCandidate(candidate); | 908 next_candidate = GetNextCandidate(candidate); |
| 907 SetNextCandidate(candidate, NULL); | 909 ClearNextCandidate(candidate); |
| 908 | 910 |
| 909 Code* code = candidate->code(); | 911 Code* code = candidate->code(); |
| 910 MarkBit code_mark = Marking::MarkBitFrom(code); | 912 MarkBit code_mark = Marking::MarkBitFrom(code); |
| 911 if (!code_mark.Get()) { | 913 if (!code_mark.Get()) { |
| 912 candidate->set_code(lazy_compile); | 914 candidate->set_code(lazy_compile); |
| 913 } | 915 } |
| 914 | 916 |
| 915 Object** code_slot = | 917 Object** code_slot = |
| 916 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); | 918 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); |
| 917 isolate_->heap()->mark_compact_collector()-> | 919 isolate_->heap()->mark_compact_collector()-> |
| 918 RecordSlot(code_slot, code_slot, *code_slot); | 920 RecordSlot(code_slot, code_slot, *code_slot); |
| 919 | 921 |
| 920 candidate = next_candidate; | 922 candidate = next_candidate; |
| 921 } | 923 } |
| 922 | 924 |
| 923 shared_function_info_candidates_head_ = NULL; | 925 shared_function_info_candidates_head_ = NULL; |
| 924 } | 926 } |
| 925 | 927 |
| 926 | 928 |
| 929 void CodeFlusher::EvictCandidate(JSFunction* function) { |
| 930 ASSERT(!function->next_function_link()->IsUndefined()); |
| 931 Object* undefined = isolate_->heap()->undefined_value(); |
| 932 |
| 933 JSFunction* candidate = jsfunction_candidates_head_; |
| 934 JSFunction* next_candidate; |
| 935 if (candidate == function) { |
| 936 next_candidate = GetNextCandidate(function); |
| 937 jsfunction_candidates_head_ = next_candidate; |
| 938 ClearNextCandidate(function, undefined); |
| 939 } else { |
| 940 while (candidate != NULL) { |
| 941 next_candidate = GetNextCandidate(candidate); |
| 942 |
| 943 if (next_candidate == function) { |
| 944 next_candidate = GetNextCandidate(function); |
| 945 SetNextCandidate(candidate, next_candidate); |
| 946 ClearNextCandidate(function, undefined); |
| 947 } |
| 948 |
| 949 candidate = next_candidate; |
| 950 } |
| 951 } |
| 952 } |
| 953 |
| 954 |
| 927 MarkCompactCollector::~MarkCompactCollector() { | 955 MarkCompactCollector::~MarkCompactCollector() { |
| 928 if (code_flusher_ != NULL) { | 956 if (code_flusher_ != NULL) { |
| 929 delete code_flusher_; | 957 delete code_flusher_; |
| 930 code_flusher_ = NULL; | 958 code_flusher_ = NULL; |
| 931 } | 959 } |
| 932 } | 960 } |
| 933 | 961 |
| 934 | 962 |
| 935 static inline HeapObject* ShortCircuitConsString(Object** p) { | 963 static inline HeapObject* ShortCircuitConsString(Object** p) { |
| 936 // Optimization: If the heap object pointed to by p is a non-symbol | 964 // Optimization: If the heap object pointed to by p is a non-symbol |
| (...skipping 484 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1421 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), | 1449 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), |
| 1422 frame->LookupCode()); | 1450 frame->LookupCode()); |
| 1423 } | 1451 } |
| 1424 } | 1452 } |
| 1425 } | 1453 } |
| 1426 | 1454 |
| 1427 | 1455 |
| 1428 void MarkCompactCollector::PrepareForCodeFlushing() { | 1456 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 1429 ASSERT(heap() == Isolate::Current()->heap()); | 1457 ASSERT(heap() == Isolate::Current()->heap()); |
| 1430 | 1458 |
| 1431 // TODO(1609) Currently incremental marker does not support code flushing. | 1459 // If code flushing is disabled, there is no need to prepare for it. |
| 1432 if (!FLAG_flush_code || was_marked_incrementally_) { | 1460 if (!is_code_flushing_enabled()) return; |
| 1433 EnableCodeFlushing(false); | |
| 1434 return; | |
| 1435 } | |
| 1436 | |
| 1437 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 1438 if (heap()->isolate()->debug()->IsLoaded() || | |
| 1439 heap()->isolate()->debug()->has_break_points()) { | |
| 1440 EnableCodeFlushing(false); | |
| 1441 return; | |
| 1442 } | |
| 1443 #endif | |
| 1444 | |
| 1445 EnableCodeFlushing(true); | |
| 1446 | 1461 |
| 1447 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray | 1462 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray |
| 1448 // relies on it being marked before any other descriptor array. | 1463 // relies on it being marked before any other descriptor array. |
| 1449 HeapObject* descriptor_array = heap()->empty_descriptor_array(); | 1464 HeapObject* descriptor_array = heap()->empty_descriptor_array(); |
| 1450 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array); | 1465 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array); |
| 1451 MarkObject(descriptor_array, descriptor_array_mark); | 1466 MarkObject(descriptor_array, descriptor_array_mark); |
| 1452 | 1467 |
| 1453 // Make sure we are not referencing the code from the stack. | 1468 // Make sure we are not referencing the code from the stack. |
| 1454 ASSERT(this == heap()->mark_compact_collector()); | 1469 ASSERT(this == heap()->mark_compact_collector()); |
| 1455 PrepareThreadForCodeFlushing(heap()->isolate(), | 1470 PrepareThreadForCodeFlushing(heap()->isolate(), |
| (...skipping 540 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1996 MarkCompactWeakObjectRetainer mark_compact_object_retainer; | 2011 MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
| 1997 heap()->ProcessWeakReferences(&mark_compact_object_retainer); | 2012 heap()->ProcessWeakReferences(&mark_compact_object_retainer); |
| 1998 | 2013 |
| 1999 // Remove object groups after marking phase. | 2014 // Remove object groups after marking phase. |
| 2000 heap()->isolate()->global_handles()->RemoveObjectGroups(); | 2015 heap()->isolate()->global_handles()->RemoveObjectGroups(); |
| 2001 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); | 2016 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 2002 | 2017 |
| 2003 // Flush code from collected candidates. | 2018 // Flush code from collected candidates. |
| 2004 if (is_code_flushing_enabled()) { | 2019 if (is_code_flushing_enabled()) { |
| 2005 code_flusher_->ProcessCandidates(); | 2020 code_flusher_->ProcessCandidates(); |
| 2006 // TODO(1609) Currently incremental marker does not support code flushing, | |
| 2007 // we need to disable it before incremental marking steps for next cycle. | |
| 2008 EnableCodeFlushing(false); | |
| 2009 } | 2021 } |
| 2010 | 2022 |
| 2011 if (!FLAG_watch_ic_patching) { | 2023 if (!FLAG_watch_ic_patching) { |
| 2012 // Clean up dead objects from the runtime profiler. | 2024 // Clean up dead objects from the runtime profiler. |
| 2013 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); | 2025 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); |
| 2014 } | 2026 } |
| 2015 | 2027 |
| 2016 if (FLAG_track_gc_object_stats) { | 2028 if (FLAG_track_gc_object_stats) { |
| 2017 heap()->CheckpointObjectStats(); | 2029 heap()->CheckpointObjectStats(); |
| 2018 } | 2030 } |
| (...skipping 1734 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3753 while (buffer != NULL) { | 3765 while (buffer != NULL) { |
| 3754 SlotsBuffer* next_buffer = buffer->next(); | 3766 SlotsBuffer* next_buffer = buffer->next(); |
| 3755 DeallocateBuffer(buffer); | 3767 DeallocateBuffer(buffer); |
| 3756 buffer = next_buffer; | 3768 buffer = next_buffer; |
| 3757 } | 3769 } |
| 3758 *buffer_address = NULL; | 3770 *buffer_address = NULL; |
| 3759 } | 3771 } |
| 3760 | 3772 |
| 3761 | 3773 |
| 3762 } } // namespace v8::internal | 3774 } } // namespace v8::internal |
| OLD | NEW |