OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
644 tracer.set_collector(collector); | 644 tracer.set_collector(collector); |
645 | 645 |
646 { | 646 { |
647 HistogramTimerScope histogram_timer_scope( | 647 HistogramTimerScope histogram_timer_scope( |
648 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() | 648 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() |
649 : isolate_->counters()->gc_compactor()); | 649 : isolate_->counters()->gc_compactor()); |
650 next_gc_likely_to_collect_more = | 650 next_gc_likely_to_collect_more = |
651 PerformGarbageCollection(collector, &tracer); | 651 PerformGarbageCollection(collector, &tracer); |
652 } | 652 } |
653 | 653 |
654 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped()); | |
655 | |
656 // This can do debug callbacks and restart incremental marking. | |
657 GarbageCollectionEpilogue(); | 654 GarbageCollectionEpilogue(); |
658 } | 655 } |
659 | 656 |
660 // Start incremental marking for the next cycle. The heap snapshot | 657 // Start incremental marking for the next cycle. The heap snapshot |
661 // generator needs incremental marking to stay off after it aborted. | 658 // generator needs incremental marking to stay off after it aborted. |
662 if (!mark_compact_collector()->abort_incremental_marking() && | 659 if (!mark_compact_collector()->abort_incremental_marking() && |
663 incremental_marking()->IsStopped() && | 660 incremental_marking()->IsStopped() && |
664 incremental_marking()->WorthActivating() && | 661 incremental_marking()->WorthActivating() && |
665 NextGCIsLikelyToBeFull()) { | 662 NextGCIsLikelyToBeFull()) { |
666 incremental_marking()->Start(); | 663 incremental_marking()->Start(); |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
953 } | 950 } |
954 } | 951 } |
955 | 952 |
956 if (new_space_high_promotion_mode_active_ && | 953 if (new_space_high_promotion_mode_active_ && |
957 new_space_.Capacity() > new_space_.InitialCapacity()) { | 954 new_space_.Capacity() > new_space_.InitialCapacity()) { |
958 new_space_.Shrink(); | 955 new_space_.Shrink(); |
959 } | 956 } |
960 | 957 |
961 isolate_->counters()->objs_since_last_young()->Set(0); | 958 isolate_->counters()->objs_since_last_young()->Set(0); |
962 | 959 |
| 960 // Callbacks that fire after this point might trigger nested GCs and |
| 961 // restart incremental marking, the assertion can't be moved down. |
| 962 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped()); |
| 963 |
963 gc_post_processing_depth_++; | 964 gc_post_processing_depth_++; |
964 { DisableAssertNoAllocation allow_allocation; | 965 { DisableAssertNoAllocation allow_allocation; |
965 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 966 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
966 next_gc_likely_to_collect_more = | 967 next_gc_likely_to_collect_more = |
967 isolate_->global_handles()->PostGarbageCollectionProcessing(collector); | 968 isolate_->global_handles()->PostGarbageCollectionProcessing(collector); |
968 } | 969 } |
969 gc_post_processing_depth_--; | 970 gc_post_processing_depth_--; |
970 | 971 |
971 // Update relocatables. | 972 // Update relocatables. |
972 Relocatable::PostGarbageCollectionProcessing(); | 973 Relocatable::PostGarbageCollectionProcessing(); |
(...skipping 6359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7332 static_cast<int>(object_sizes_last_time_[index])); | 7333 static_cast<int>(object_sizes_last_time_[index])); |
7333 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7334 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
7334 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7335 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7335 | 7336 |
7336 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7337 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7337 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7338 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7338 ClearObjectStats(); | 7339 ClearObjectStats(); |
7339 } | 7340 } |
7340 | 7341 |
7341 } } // namespace v8::internal | 7342 } } // namespace v8::internal |
OLD | NEW |