| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 // ------------------------------------------------------------------------- | 39 // ------------------------------------------------------------------------- |
| 40 // MarkCompactCollector | 40 // MarkCompactCollector |
| 41 | 41 |
| 42 bool MarkCompactCollector::force_compaction_ = false; | 42 bool MarkCompactCollector::force_compaction_ = false; |
| 43 bool MarkCompactCollector::compacting_collection_ = false; | 43 bool MarkCompactCollector::compacting_collection_ = false; |
| 44 bool MarkCompactCollector::compact_on_next_gc_ = false; |
| 44 | 45 |
| 45 int MarkCompactCollector::previous_marked_count_ = 0; | 46 int MarkCompactCollector::previous_marked_count_ = 0; |
| 46 GCTracer* MarkCompactCollector::tracer_ = NULL; | 47 GCTracer* MarkCompactCollector::tracer_ = NULL; |
| 47 | 48 |
| 48 | 49 |
| 49 #ifdef DEBUG | 50 #ifdef DEBUG |
| 50 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | 51 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; |
| 51 | 52 |
| 52 // Counters used for debugging the marking phase of mark-compact or mark-sweep | 53 // Counters used for debugging the marking phase of mark-compact or mark-sweep |
| 53 // collection. | 54 // collection. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 97 ASSERT(previous_marked_count_ == 0); | 98 ASSERT(previous_marked_count_ == 0); |
| 98 tracer_ = NULL; | 99 tracer_ = NULL; |
| 99 } | 100 } |
| 100 | 101 |
| 101 | 102 |
| 102 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 103 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| 103 // Rather than passing the tracer around we stash it in a static member | 104 // Rather than passing the tracer around we stash it in a static member |
| 104 // variable. | 105 // variable. |
| 105 tracer_ = tracer; | 106 tracer_ = tracer; |
| 106 | 107 |
| 107 static const int kFragmentationLimit = 50; // Percent. | |
| 108 #ifdef DEBUG | 108 #ifdef DEBUG |
| 109 ASSERT(state_ == IDLE); | 109 ASSERT(state_ == IDLE); |
| 110 state_ = PREPARE_GC; | 110 state_ = PREPARE_GC; |
| 111 #endif | 111 #endif |
| 112 ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 112 ASSERT(!FLAG_always_compact || !FLAG_never_compact); |
| 113 | 113 |
| 114 compacting_collection_ = FLAG_always_compact || force_compaction_; | 114 compacting_collection_ = |
| 115 | 115 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; |
| 116 // We compact the old generation if it gets too fragmented (ie, we could | 116 compact_on_next_gc_ = false; |
| 117 // recover an expected amount of space by reclaiming the waste and free | |
| 118 // list blocks). We always compact when the flag --gc-global is true | |
| 119 // because objects do not get promoted out of new space on non-compacting | |
| 120 // GCs. | |
| 121 if (!compacting_collection_) { | |
| 122 int old_gen_recoverable = 0; | |
| 123 int old_gen_used = 0; | |
| 124 | |
| 125 OldSpaces spaces; | |
| 126 while (OldSpace* space = spaces.next()) { | |
| 127 old_gen_recoverable += space->Waste() + space->AvailableFree(); | |
| 128 old_gen_used += space->Size(); | |
| 129 } | |
| 130 int old_gen_fragmentation = | |
| 131 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); | |
| 132 if (old_gen_fragmentation > kFragmentationLimit) { | |
| 133 compacting_collection_ = true; | |
| 134 } | |
| 135 } | |
| 136 | 117 |
| 137 if (FLAG_never_compact) compacting_collection_ = false; | 118 if (FLAG_never_compact) compacting_collection_ = false; |
| 138 if (FLAG_collect_maps) CreateBackPointers(); | 119 if (FLAG_collect_maps) CreateBackPointers(); |
| 139 | 120 |
| 140 #ifdef DEBUG | 121 #ifdef DEBUG |
| 141 if (compacting_collection_) { | 122 if (compacting_collection_) { |
| 142 // We will write bookkeeping information to the remembered set area | 123 // We will write bookkeeping information to the remembered set area |
| 143 // starting now. | 124 // starting now. |
| 144 Page::set_rset_state(Page::NOT_IN_USE); | 125 Page::set_rset_state(Page::NOT_IN_USE); |
| 145 } | 126 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 166 void MarkCompactCollector::Finish() { | 147 void MarkCompactCollector::Finish() { |
| 167 #ifdef DEBUG | 148 #ifdef DEBUG |
| 168 ASSERT(state_ == SWEEP_SPACES || state_ == REBUILD_RSETS); | 149 ASSERT(state_ == SWEEP_SPACES || state_ == REBUILD_RSETS); |
| 169 state_ = IDLE; | 150 state_ = IDLE; |
| 170 #endif | 151 #endif |
| 171 // The stub cache is not traversed during GC; clear the cache to | 152 // The stub cache is not traversed during GC; clear the cache to |
| 172 // force lazy re-initialization of it. This must be done after the | 153 // force lazy re-initialization of it. This must be done after the |
| 173 // GC, because it relies on the new address of certain old space | 154 // GC, because it relies on the new address of certain old space |
| 174 // objects (empty string, illegal builtin). | 155 // objects (empty string, illegal builtin). |
| 175 StubCache::Clear(); | 156 StubCache::Clear(); |
| 157 |
| 158 // If we've just compacted old space there's no reason to check the |
| 159 // fragmentation limit. Just return. |
| 160 if (HasCompacted()) return; |
| 161 |
| 162 // We compact the old generation on the next GC if it has gotten too |
| 163 // fragmented (ie, we could recover an expected amount of space by |
| 164 // reclaiming the waste and free list blocks). |
| 165 static const int kFragmentationLimit = 15; // Percent. |
| 166 static const int kFragmentationAllowed = 1 * MB; // Absolute. |
| 167 int old_gen_recoverable = 0; |
| 168 int old_gen_used = 0; |
| 169 |
| 170 OldSpaces spaces; |
| 171 while (OldSpace* space = spaces.next()) { |
| 172 old_gen_recoverable += space->Waste() + space->AvailableFree(); |
| 173 old_gen_used += space->Size(); |
| 174 } |
| 175 |
| 176 int old_gen_fragmentation = |
| 177 static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used); |
| 178 if (old_gen_fragmentation > kFragmentationLimit && |
| 179 old_gen_recoverable > kFragmentationAllowed) { |
| 180 compact_on_next_gc_ = true; |
| 181 } |
| 176 } | 182 } |
| 177 | 183 |
| 178 | 184 |
| 179 // ------------------------------------------------------------------------- | 185 // ------------------------------------------------------------------------- |
| 180 // Phase 1: tracing and marking live objects. | 186 // Phase 1: tracing and marking live objects. |
| 181 // before: all objects are in normal state. | 187 // before: all objects are in normal state. |
| 182 // after: a live object's map pointer is marked as '00'. | 188 // after: a live object's map pointer is marked as '00'. |
| 183 | 189 |
| 184 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 190 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
| 185 // collection. Before marking, all objects are in their normal state. After | 191 // collection. Before marking, all objects are in their normal state. After |
| (...skipping 1648 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1834 | 1840 |
| 1835 void MarkCompactCollector::RebuildRSets() { | 1841 void MarkCompactCollector::RebuildRSets() { |
| 1836 #ifdef DEBUG | 1842 #ifdef DEBUG |
| 1837 ASSERT(state_ == RELOCATE_OBJECTS); | 1843 ASSERT(state_ == RELOCATE_OBJECTS); |
| 1838 state_ = REBUILD_RSETS; | 1844 state_ = REBUILD_RSETS; |
| 1839 #endif | 1845 #endif |
| 1840 Heap::RebuildRSets(); | 1846 Heap::RebuildRSets(); |
| 1841 } | 1847 } |
| 1842 | 1848 |
| 1843 } } // namespace v8::internal | 1849 } } // namespace v8::internal |
| OLD | NEW |