OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
423 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | 423 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); |
424 | 424 |
425 if (FLAG_gc_verbose) Print(); | 425 if (FLAG_gc_verbose) Print(); |
426 | 426 |
427 ReportStatisticsBeforeGC(); | 427 ReportStatisticsBeforeGC(); |
428 #endif // DEBUG | 428 #endif // DEBUG |
429 | 429 |
430 store_buffer()->GCPrologue(); | 430 store_buffer()->GCPrologue(); |
431 | 431 |
432 if (isolate()->concurrent_osr_enabled()) { | 432 if (isolate()->concurrent_osr_enabled()) { |
433 isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs(); | 433 isolate()->optimizing_compile_dispatcher()->AgeBufferedOsrJobs(); |
434 } | 434 } |
435 | 435 |
436 if (new_space_.IsAtMaximumCapacity()) { | 436 if (new_space_.IsAtMaximumCapacity()) { |
437 maximum_size_scavenges_++; | 437 maximum_size_scavenges_++; |
438 } else { | 438 } else { |
439 maximum_size_scavenges_ = 0; | 439 maximum_size_scavenges_ = 0; |
440 } | 440 } |
441 CheckNewSpaceExpansionCriteria(); | 441 CheckNewSpaceExpansionCriteria(); |
442 } | 442 } |
443 | 443 |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
760 // handles, but won't collect weakly reachable objects until next | 760 // handles, but won't collect weakly reachable objects until next |
761 // major GC. Therefore if we collect aggressively and weak handle callback | 761 // major GC. Therefore if we collect aggressively and weak handle callback |
762 // has been invoked, we rerun major GC to release objects which become | 762 // has been invoked, we rerun major GC to release objects which become |
763 // garbage. | 763 // garbage. |
764 // Note: as weak callbacks can execute arbitrary code, we cannot | 764 // Note: as weak callbacks can execute arbitrary code, we cannot |
765 // hope that eventually there will be no weak callbacks invocations. | 765 // hope that eventually there will be no weak callbacks invocations. |
766 // Therefore stop recollecting after several attempts. | 766 // Therefore stop recollecting after several attempts. |
767 if (isolate()->concurrent_recompilation_enabled()) { | 767 if (isolate()->concurrent_recompilation_enabled()) { |
768 // The optimizing compiler may be unnecessarily holding on to memory. | 768 // The optimizing compiler may be unnecessarily holding on to memory. |
769 DisallowHeapAllocation no_recursive_gc; | 769 DisallowHeapAllocation no_recursive_gc; |
770 isolate()->optimizing_compiler_thread()->Flush(); | 770 isolate()->optimizing_compile_dispatcher()->Flush(); |
771 } | 771 } |
772 isolate()->ClearSerializerData(); | 772 isolate()->ClearSerializerData(); |
773 mark_compact_collector()->SetFlags(kMakeHeapIterableMask | | 773 mark_compact_collector()->SetFlags(kMakeHeapIterableMask | |
774 kReduceMemoryFootprintMask); | 774 kReduceMemoryFootprintMask); |
775 isolate_->compilation_cache()->Clear(); | 775 isolate_->compilation_cache()->Clear(); |
776 const int kMaxNumberOfAttempts = 7; | 776 const int kMaxNumberOfAttempts = 7; |
777 const int kMinNumberOfAttempts = 2; | 777 const int kMinNumberOfAttempts = 2; |
778 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 778 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
779 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL) && | 779 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL) && |
780 attempt + 1 >= kMinNumberOfAttempts) { | 780 attempt + 1 >= kMinNumberOfAttempts) { |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
878 } | 878 } |
879 | 879 |
880 | 880 |
881 int Heap::NotifyContextDisposed(bool dependant_context) { | 881 int Heap::NotifyContextDisposed(bool dependant_context) { |
882 if (!dependant_context) { | 882 if (!dependant_context) { |
883 tracer()->ResetSurvivalEvents(); | 883 tracer()->ResetSurvivalEvents(); |
884 old_generation_size_configured_ = false; | 884 old_generation_size_configured_ = false; |
885 } | 885 } |
886 if (isolate()->concurrent_recompilation_enabled()) { | 886 if (isolate()->concurrent_recompilation_enabled()) { |
887 // Flush the queued recompilation tasks. | 887 // Flush the queued recompilation tasks. |
888 isolate()->optimizing_compiler_thread()->Flush(); | 888 isolate()->optimizing_compile_dispatcher()->Flush(); |
889 } | 889 } |
890 AgeInlineCaches(); | 890 AgeInlineCaches(); |
891 set_retained_maps(ArrayList::cast(empty_fixed_array())); | 891 set_retained_maps(ArrayList::cast(empty_fixed_array())); |
892 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis()); | 892 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis()); |
893 return ++contexts_disposed_; | 893 return ++contexts_disposed_; |
894 } | 894 } |
895 | 895 |
896 | 896 |
897 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index, | 897 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index, |
898 int len) { | 898 int len) { |
(...skipping 5407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6306 static_cast<int>(object_sizes_last_time_[index])); | 6306 static_cast<int>(object_sizes_last_time_[index])); |
6307 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6307 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
6308 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6308 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
6309 | 6309 |
6310 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6310 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
6311 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6311 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
6312 ClearObjectStats(); | 6312 ClearObjectStats(); |
6313 } | 6313 } |
6314 } | 6314 } |
6315 } // namespace v8::internal | 6315 } // namespace v8::internal |
OLD | NEW |