OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 // the given page. | 112 // the given page. |
113 INLINE(static Address FirstMarkedObject(Page* page, | 113 INLINE(static Address FirstMarkedObject(Page* page, |
114 uint32_t cell_index, | 114 uint32_t cell_index, |
115 uint32_t cell)) { | 115 uint32_t cell)) { |
116 ASSERT(cell != 0); | 116 ASSERT(cell != 0); |
117 uint32_t bit = CompilerIntrinsics::CountTrailingZeros(cell); | 117 uint32_t bit = CompilerIntrinsics::CountTrailingZeros(cell); |
118 return page->MarkbitIndexToAddress( | 118 return page->MarkbitIndexToAddress( |
119 Page::MarkbitsBitmap::CellToIndex(cell_index) + bit); | 119 Page::MarkbitsBitmap::CellToIndex(cell_index) + bit); |
120 } | 120 } |
121 | 121 |
122 INLINE(static Address FirstLiveObject(Address start, | |
123 Address limit)) { | |
124 ASSERT(!Heap::InNewSpace(start)); | |
125 if (start >= limit) return start; | |
126 | |
127 Page* page = Page::FromAddress(start); | |
128 | |
129 // If start is above linearity boundary is continuous then | |
130 // it coincides with a start of the live object. Just | |
131 // return it. | |
132 if (start >= page->linearity_boundary()) return start; | |
133 | |
134 Page::MarkbitsBitmap* bitmap = page->markbits(); | |
135 uint32_t markbit = page->AddressToMarkbitIndex(start); | |
136 | |
137 // If the start address is marked return it. | |
138 if (bitmap->Get(markbit)) return start; | |
139 | |
140 uint32_t* cells = bitmap->cells(); | |
141 uint32_t cell_index = Page::MarkbitsBitmap::IndexToCell(markbit); | |
142 | |
143 // Round limit towards start of the next cell. | |
144 uint32_t last_cell_index = | |
145 Page::MarkbitsBitmap::IndexToCell( | |
146 Page::MarkbitsBitmap::CellAlignIndex( | |
147 page->AddressToMarkbitIndex(limit))); | |
148 | |
149 ASSERT(cell_index < last_cell_index); | |
150 | |
151 while (cell_index < last_cell_index && cells[cell_index] == 0) cell_index++; | |
152 | |
153 if (cell_index == last_cell_index) return limit; | |
154 | |
155 return FirstMarkedObject(page, cell_index, cells[cell_index]); | |
156 } | |
157 | |
158 INLINE(static Address NextLiveObject(HeapObject* obj, | |
159 int size, | |
160 Address end)) { | |
161 ASSERT(!Heap::InNewSpace(obj)); | |
162 Page* page = Page::FromAddress(obj->address()); | |
163 Address watermark = page->linearity_boundary(); | |
164 Address next_addr = obj->address() + size; | |
165 | |
166 if (next_addr >= watermark) return next_addr; | |
167 | |
168 Page::MarkbitsBitmap* bitmap = page->markbits(); | |
169 uint32_t markbit = page->AddressToMarkbitIndex(next_addr); | |
170 | |
171 if (bitmap->Get(markbit)) return next_addr; | |
172 | |
173 uint32_t* cells = bitmap->cells(); | |
174 uint32_t cell_index = Page::MarkbitsBitmap::IndexToCell(markbit); | |
175 | |
176 ASSERT(IsMarked(obj)); | |
177 | |
178 uint32_t bit = Page::MarkbitsBitmap::IndexToBit(markbit); | |
179 uint32_t mask = (~1) << bit; | |
180 if ((cells[cell_index] & mask) != 0) { | |
181 // There are more marked objects in this cell. | |
182 return FirstMarkedObject(page, cell_index, cells[cell_index] & mask); | |
183 } | |
184 | |
185 Address limit = Min(watermark, end); | |
186 | |
187 // Round limit towards start of the next cell. | |
188 uint32_t last_cell_index = | |
189 Page::MarkbitsBitmap::IndexToCell( | |
190 Page::MarkbitsBitmap::CellAlignIndex( | |
191 page->AddressToMarkbitIndex(limit))); | |
192 | |
193 // We expect last_cell to be bigger than cell because | |
194 // we rounded limit towards start of the next cell | |
195 // and limit is bigger than address of the current. | |
196 ASSERT(cell_index < last_cell_index); | |
197 | |
198 // We skip current cell because it contains no unvisited | |
199 // live objects. | |
200 do { | |
201 cell_index++; | |
202 } while (cell_index < last_cell_index && cells[cell_index] == 0); | |
203 | |
204 // If we reached last_cell return limit | |
205 // not the start of the last_cell because | |
206 // limit can be in the middle of the previous cell. | |
207 if (cell_index == last_cell_index) return limit; | |
208 | |
209 return FirstMarkedObject(page, cell_index, cells[cell_index]); | |
210 } | |
211 | |
212 static void TransferMark(Address old_start, Address new_start); | 122 static void TransferMark(Address old_start, Address new_start); |
213 | 123 |
214 static bool Setup(); | 124 static bool Setup(); |
215 | 125 |
216 static void TearDown(); | 126 static void TearDown(); |
217 | 127 |
218 private: | 128 private: |
219 class BitmapStorageDescriptor { | 129 class BitmapStorageDescriptor { |
220 public: | 130 public: |
221 INLINE(static int CellsCount(Address addr)) { | 131 INLINE(static int CellsCount(Address addr)) { |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
288 typedef void (*EncodingFunction)(HeapObject* old_object, | 198 typedef void (*EncodingFunction)(HeapObject* old_object, |
289 int object_size, | 199 int object_size, |
290 Object* new_object, | 200 Object* new_object, |
291 int* offset); | 201 int* offset); |
292 | 202 |
293 // Type of functions to process non-live objects. | 203 // Type of functions to process non-live objects. |
294 typedef void (*ProcessNonLiveFunction)(HeapObject* object); | 204 typedef void (*ProcessNonLiveFunction)(HeapObject* object); |
295 | 205 |
296 // Set the global force_compaction flag, it must be called before Prepare | 206 // Set the global force_compaction flag, it must be called before Prepare |
297 // to take effect. | 207 // to take effect. |
298 static void SetForceCompaction(bool value) { | 208 static void SetFlags(int flags) { |
299 force_compaction_ = value; | 209 force_compaction_ = ((flags & Heap::kForceCompactionMask) != 0); |
| 210 sweep_precisely_ = ((flags & Heap::kSweepPreciselyMask) != 0); |
300 } | 211 } |
301 | 212 |
302 | 213 |
303 static void Initialize(); | 214 static void Initialize(); |
304 | 215 |
305 // Prepares for GC by resetting relocation info in old and map spaces and | 216 // Prepares for GC by resetting relocation info in old and map spaces and |
306 // choosing spaces to compact. | 217 // choosing spaces to compact. |
307 static void Prepare(GCTracer* tracer); | 218 static void Prepare(GCTracer* tracer); |
308 | 219 |
309 // Performs a global garbage collection. | 220 // Performs a global garbage collection. |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
353 RELOCATE_OBJECTS | 264 RELOCATE_OBJECTS |
354 }; | 265 }; |
355 | 266 |
356 // The current stage of the collector. | 267 // The current stage of the collector. |
357 static CollectorState state_; | 268 static CollectorState state_; |
358 #endif | 269 #endif |
359 | 270 |
360 // Global flag that forces a compaction. | 271 // Global flag that forces a compaction. |
361 static bool force_compaction_; | 272 static bool force_compaction_; |
362 | 273 |
| 274 // Global flag that forces sweeping to be precise, so we can traverse the |
| 275 // heap. |
| 276 static bool sweep_precisely_; |
| 277 |
363 // Global flag indicating whether spaces were compacted on the last GC. | 278 // Global flag indicating whether spaces were compacted on the last GC. |
364 static bool compacting_collection_; | 279 static bool compacting_collection_; |
365 | 280 |
366 // Global flag indicating whether spaces will be compacted on the next GC. | 281 // Global flag indicating whether spaces will be compacted on the next GC. |
367 static bool compact_on_next_gc_; | 282 static bool compact_on_next_gc_; |
368 | 283 |
369 // A pointer to the current stack-allocated GC tracer object during a full | 284 // A pointer to the current stack-allocated GC tracer object during a full |
370 // collection (NULL before and after). | 285 // collection (NULL before and after). |
371 static GCTracer* tracer_; | 286 static GCTracer* tracer_; |
372 | 287 |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 | 457 |
543 friend class UnmarkObjectVisitor; | 458 friend class UnmarkObjectVisitor; |
544 static void UnmarkObject(HeapObject* obj); | 459 static void UnmarkObject(HeapObject* obj); |
545 #endif | 460 #endif |
546 }; | 461 }; |
547 | 462 |
548 | 463 |
549 } } // namespace v8::internal | 464 } } // namespace v8::internal |
550 | 465 |
551 #endif // V8_MARK_COMPACT_H_ | 466 #endif // V8_MARK_COMPACT_H_ |
OLD | NEW |