OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
761 | 761 |
762 #ifdef DEBUG | 762 #ifdef DEBUG |
763 // Reset the allocation timeout to the GC interval, but make sure to | 763 // Reset the allocation timeout to the GC interval, but make sure to |
764 // allow at least a few allocations after a collection. The reason | 764 // allow at least a few allocations after a collection. The reason |
765 // for this is that we have a lot of allocation sequences and we | 765 // for this is that we have a lot of allocation sequences and we |
766 // assume that a garbage collection will allow the subsequent | 766 // assume that a garbage collection will allow the subsequent |
767 // allocation attempts to go through. | 767 // allocation attempts to go through. |
768 allocation_timeout_ = Max(6, FLAG_gc_interval); | 768 allocation_timeout_ = Max(6, FLAG_gc_interval); |
769 #endif | 769 #endif |
770 | 770 |
| 771 // There may be an allocation memento behind every object in new space. |
| 772 // If we evacuate a not full new space or if we are on the last page of |
| 773 // the new space, then there may be uninitialized memory behind the top |
| 774 // pointer of the new space page. We store a filler object there to |
| 775 // identify the unused space. |
| 776 Address from_top = new_space_.top(); |
| 777 Address from_limit = new_space_.limit(); |
| 778 if (from_top < from_limit) { |
| 779 int remaining_in_page = static_cast<int>(from_limit - from_top); |
| 780 CreateFillerObjectAt(from_top, remaining_in_page); |
| 781 } |
| 782 |
771 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { | 783 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { |
772 if (FLAG_trace_incremental_marking) { | 784 if (FLAG_trace_incremental_marking) { |
773 PrintF("[IncrementalMarking] Scavenge during marking.\n"); | 785 PrintF("[IncrementalMarking] Scavenge during marking.\n"); |
774 } | 786 } |
775 } | 787 } |
776 | 788 |
777 if (collector == MARK_COMPACTOR && | 789 if (collector == MARK_COMPACTOR && |
778 !mark_compact_collector()->abort_incremental_marking() && | 790 !mark_compact_collector()->abort_incremental_marking() && |
779 !incremental_marking()->IsStopped() && | 791 !incremental_marking()->IsStopped() && |
780 !incremental_marking()->should_hurry() && | 792 !incremental_marking()->should_hurry() && |
(...skipping 6873 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7654 static_cast<int>(object_sizes_last_time_[index])); | 7666 static_cast<int>(object_sizes_last_time_[index])); |
7655 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7667 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
7656 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7668 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7657 | 7669 |
7658 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7670 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7659 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7671 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7660 ClearObjectStats(); | 7672 ClearObjectStats(); |
7661 } | 7673 } |
7662 | 7674 |
7663 } } // namespace v8::internal | 7675 } } // namespace v8::internal |
OLD | NEW |