Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(295)

Side by Side Diff: src/heap.cc

Issue 167423004: Add filler at the new space top when forcing scavenge. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove PerformScavenge Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 755 matching lines...) Expand 10 before | Expand all | Expand 10 after
766 break; 766 break;
767 } 767 }
768 } 768 }
769 mark_compact_collector()->SetFlags(kNoGCFlags); 769 mark_compact_collector()->SetFlags(kNoGCFlags);
770 new_space_.Shrink(); 770 new_space_.Shrink();
771 UncommitFromSpace(); 771 UncommitFromSpace();
772 incremental_marking()->UncommitMarkingDeque(); 772 incremental_marking()->UncommitMarkingDeque();
773 } 773 }
774 774
775 775
776 void Heap::EnsureFillerObjectAtTop() {
777 // There may be an allocation memento behind every object in new space.
778 // If we evacuate a not full new space or if we are on the last page of
779 // the new space, then there may be uninitialized memory behind the top
780 // pointer of the new space page. We store a filler object there to
781 // identify the unused space.
782 Address from_top = new_space_.top();
783 Address from_limit = new_space_.limit();
784 if (from_top < from_limit) {
785 int remaining_in_page = static_cast<int>(from_limit - from_top);
786 CreateFillerObjectAt(from_top, remaining_in_page);
787 }
788 }
789
790
776 bool Heap::CollectGarbage(GarbageCollector collector, 791 bool Heap::CollectGarbage(GarbageCollector collector,
777 const char* gc_reason, 792 const char* gc_reason,
778 const char* collector_reason, 793 const char* collector_reason,
779 const v8::GCCallbackFlags gc_callback_flags) { 794 const v8::GCCallbackFlags gc_callback_flags) {
780 // The VM is in the GC state until exiting this function. 795 // The VM is in the GC state until exiting this function.
781 VMState<GC> state(isolate_); 796 VMState<GC> state(isolate_);
782 797
783 #ifdef DEBUG 798 #ifdef DEBUG
784 // Reset the allocation timeout to the GC interval, but make sure to 799 // Reset the allocation timeout to the GC interval, but make sure to
785 // allow at least a few allocations after a collection. The reason 800 // allow at least a few allocations after a collection. The reason
786 // for this is that we have a lot of allocation sequences and we 801 // for this is that we have a lot of allocation sequences and we
787 // assume that a garbage collection will allow the subsequent 802 // assume that a garbage collection will allow the subsequent
788 // allocation attempts to go through. 803 // allocation attempts to go through.
789 allocation_timeout_ = Max(6, FLAG_gc_interval); 804 allocation_timeout_ = Max(6, FLAG_gc_interval);
790 #endif 805 #endif
791 806
792 // There may be an allocation memento behind every object in new space. 807 EnsureFillerObjectAtTop();
793 // If we evacuate a not full new space or if we are on the last page of
794 // the new space, then there may be uninitialized memory behind the top
795 // pointer of the new space page. We store a filler object there to
796 // identify the unused space.
797 Address from_top = new_space_.top();
798 Address from_limit = new_space_.limit();
799 if (from_top < from_limit) {
800 int remaining_in_page = static_cast<int>(from_limit - from_top);
801 CreateFillerObjectAt(from_top, remaining_in_page);
802 }
803 808
804 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { 809 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
805 if (FLAG_trace_incremental_marking) { 810 if (FLAG_trace_incremental_marking) {
806 PrintF("[IncrementalMarking] Scavenge during marking.\n"); 811 PrintF("[IncrementalMarking] Scavenge during marking.\n");
807 } 812 }
808 } 813 }
809 814
810 if (collector == MARK_COMPACTOR && 815 if (collector == MARK_COMPACTOR &&
811 !mark_compact_collector()->abort_incremental_marking() && 816 !mark_compact_collector()->abort_incremental_marking() &&
812 !incremental_marking()->IsStopped() && 817 !incremental_marking()->IsStopped() &&
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
866 if (isolate()->concurrent_recompilation_enabled()) { 871 if (isolate()->concurrent_recompilation_enabled()) {
867 // Flush the queued recompilation tasks. 872 // Flush the queued recompilation tasks.
868 isolate()->optimizing_compiler_thread()->Flush(); 873 isolate()->optimizing_compiler_thread()->Flush();
869 } 874 }
870 flush_monomorphic_ics_ = true; 875 flush_monomorphic_ics_ = true;
871 AgeInlineCaches(); 876 AgeInlineCaches();
872 return ++contexts_disposed_; 877 return ++contexts_disposed_;
873 } 878 }
874 879
875 880
876 void Heap::PerformScavenge() {
877 GCTracer tracer(this, NULL, NULL);
878 if (incremental_marking()->IsStopped()) {
879 PerformGarbageCollection(SCAVENGER, &tracer);
880 } else {
881 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
882 }
883 }
884
885
886 void Heap::MoveElements(FixedArray* array, 881 void Heap::MoveElements(FixedArray* array,
887 int dst_index, 882 int dst_index,
888 int src_index, 883 int src_index,
889 int len) { 884 int len) {
890 if (len == 0) return; 885 if (len == 0) return;
891 886
892 ASSERT(array->map() != fixed_cow_array_map()); 887 ASSERT(array->map() != fixed_cow_array_map());
893 Object** dst_objects = array->data_start() + dst_index; 888 Object** dst_objects = array->data_start() + dst_index;
894 OS::MemMove(dst_objects, 889 OS::MemMove(dst_objects,
895 array->data_start() + src_index, 890 array->data_start() + src_index,
(...skipping 6852 matching lines...) Expand 10 before | Expand all | Expand 10 after
7748 static_cast<int>(object_sizes_last_time_[index])); 7743 static_cast<int>(object_sizes_last_time_[index]));
7749 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 7744 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
7750 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7745 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7751 7746
7752 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7747 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7753 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7748 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7754 ClearObjectStats(); 7749 ClearObjectStats();
7755 } 7750 }
7756 7751
7757 } } // namespace v8::internal 7752 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | test/cctest/test-api.cc » ('j') | test/cctest/test-mementos.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698