Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 80 INLINE(void MarkObjectByPointer(Object** p)) { | 80 INLINE(void MarkObjectByPointer(Object** p)) { |
| 81 Object* obj = *p; | 81 Object* obj = *p; |
| 82 // Since we can be sure that the object is not tagged as a failure we can | 82 // Since we can be sure that the object is not tagged as a failure we can |
| 83 // inline a slightly more efficient tag check here than IsHeapObject() would | 83 // inline a slightly more efficient tag check here than IsHeapObject() would |
| 84 // produce. | 84 // produce. |
| 85 if (obj->NonFailureIsHeapObject()) { | 85 if (obj->NonFailureIsHeapObject()) { |
| 86 HeapObject* heap_object = HeapObject::cast(obj); | 86 HeapObject* heap_object = HeapObject::cast(obj); |
| 87 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); | 87 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); |
| 88 if (mark_bit.data_only()) { | 88 if (mark_bit.data_only()) { |
| 89 incremental_marking_->MarkBlackOrKeepGrey(mark_bit); | 89 incremental_marking_->MarkBlackOrKeepGrey(mark_bit); |
| 90 } else { | 90 } else if (Marking::IsWhite(mark_bit)) { |
| 91 if (Marking::IsWhite(mark_bit)) { | 91 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); |
| 92 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); | |
| 93 } | |
| 94 } | 92 } |
| 95 } | 93 } |
| 96 } | 94 } |
| 97 | 95 |
| 98 Heap* heap_; | 96 Heap* heap_; |
| 99 IncrementalMarking* incremental_marking_; | 97 IncrementalMarking* incremental_marking_; |
| 100 }; | 98 }; |
| 101 | 99 |
| 102 | 100 |
| 103 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { | 101 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 146 chunk->scan_on_scavenge()) { | 144 chunk->scan_on_scavenge()) { |
| 147 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); | 145 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); |
| 148 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); | 146 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); |
| 149 } else { | 147 } else { |
| 150 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); | 148 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); |
| 151 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); | 149 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); |
| 152 } | 150 } |
| 153 } | 151 } |
| 154 | 152 |
| 155 | 153 |
| 156 void IncrementalMarking::SetNewSpacePageFlags(MemoryChunk* chunk, | 154 void IncrementalMarking::SetNewSpacePageFlags(NewSpacePage* chunk, |
| 157 bool is_marking) { | 155 bool is_marking) { |
| 158 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); | 156 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); |
| 159 if (is_marking) { | 157 if (is_marking) { |
| 160 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); | 158 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); |
| 161 } else { | 159 } else { |
| 162 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); | 160 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); |
| 163 } | 161 } |
| 162 chunk->SetFlag(MemoryChunk::SCAN_ON_SCAVENGE); | |
|
Vyacheslav Egorov (Chromium)
2011/06/14 11:52:55
I am not sure this should be here.
Erik Corry
2011/06/14 12:23:17
I think it's correct. Scan-on-scavenge means that
Lasse Reichstein
2011/06/14 14:13:03
Keeping it.
| |
| 164 } | 163 } |
| 165 | 164 |
| 166 | 165 |
| 167 void IncrementalMarking::DeactivateWriteBarrierForSpace(PagedSpace* space) { | 166 void IncrementalMarking::DeactivateWriteBarrierForSpace(PagedSpace* space) { |
| 168 PageIterator it(space); | 167 PageIterator it(space); |
| 169 while (it.has_next()) { | 168 while (it.has_next()) { |
| 170 Page* p = it.next(); | 169 Page* p = it.next(); |
| 171 SetOldSpacePageFlags(p, false); | 170 SetOldSpacePageFlags(p, false); |
| 172 } | 171 } |
| 173 } | 172 } |
| 174 | 173 |
| 175 | 174 |
| 175 void IncrementalMarking::DeactivateWriteBarrierForSpace(NewSpace* space) { | |
|
Erik Corry
2011/06/14 12:23:17
This should really be called 'DeactivateIncrementa
Lasse Reichstein
2011/06/14 14:13:03
search-and-replaced.
| |
| 176 NewSpacePageIterator it(space->ToSpaceLow(), space->ToSpaceHigh()); | |
| 177 while (it.has_next()) { | |
| 178 NewSpacePage* p = it.next(); | |
| 179 SetNewSpacePageFlags(p, false); | |
| 180 } | |
| 181 } | |
| 182 | |
| 183 | |
| 176 void IncrementalMarking::DeactivateWriteBarrier() { | 184 void IncrementalMarking::DeactivateWriteBarrier() { |
| 177 DeactivateWriteBarrierForSpace(heap_->old_pointer_space()); | 185 DeactivateWriteBarrierForSpace(heap_->old_pointer_space()); |
| 178 DeactivateWriteBarrierForSpace(heap_->old_data_space()); | 186 DeactivateWriteBarrierForSpace(heap_->old_data_space()); |
| 179 DeactivateWriteBarrierForSpace(heap_->cell_space()); | 187 DeactivateWriteBarrierForSpace(heap_->cell_space()); |
| 180 DeactivateWriteBarrierForSpace(heap_->map_space()); | 188 DeactivateWriteBarrierForSpace(heap_->map_space()); |
| 181 DeactivateWriteBarrierForSpace(heap_->code_space()); | 189 DeactivateWriteBarrierForSpace(heap_->code_space()); |
| 182 | 190 DeactivateWriteBarrierForSpace(heap_->new_space()); |
| 183 SetNewSpacePageFlags(heap_->new_space()->ActivePage(), false); | |
| 184 | 191 |
| 185 LargePage* lop = heap_->lo_space()->first_page(); | 192 LargePage* lop = heap_->lo_space()->first_page(); |
| 186 while (lop->is_valid()) { | 193 while (lop->is_valid()) { |
| 187 SetOldSpacePageFlags(lop, false); | 194 SetOldSpacePageFlags(lop, false); |
| 188 lop = lop->next_page(); | 195 lop = lop->next_page(); |
| 189 } | 196 } |
| 190 } | 197 } |
| 191 | 198 |
| 192 | 199 |
| 193 void IncrementalMarking::ClearMarkbits(PagedSpace* space) { | 200 void IncrementalMarking::ClearMarkbits(PagedSpace* space) { |
| 194 PageIterator it(space); | 201 PageIterator it(space); |
| 195 while (it.has_next()) { | 202 while (it.has_next()) { |
| 196 Page* p = it.next(); | 203 Page* p = it.next(); |
| 197 p->markbits()->Clear(); | 204 p->markbits()->Clear(); |
| 198 SetOldSpacePageFlags(p, true); | 205 SetOldSpacePageFlags(p, true); |
| 199 } | 206 } |
| 200 } | 207 } |
| 201 | 208 |
| 202 | 209 |
| 210 void IncrementalMarking::ClearMarkbits(NewSpace* space) { | |
| 211 NewSpacePageIterator it(space->ToSpaceLow(), space->ToSpaceHigh()); | |
|
Vyacheslav Egorov (Chromium)
2011/06/14 11:52:55
I find iterator interface confusing as new space i
Erik Corry
2011/06/14 12:23:17
Agreed. It would make more sense if it just took
Lasse Reichstein
2011/06/14 14:13:03
Actually that's why I want a page iterator to iter
Lasse Reichstein
2011/06/14 14:13:03
Changed to use NewSpacePageIterator(space), which
| |
| 212 while (it.has_next()) { | |
| 213 NewSpacePage* p = it.next(); | |
| 214 p->markbits()->Clear(); | |
| 215 SetNewSpacePageFlags(p, true); | |
| 216 } | |
| 217 } | |
| 218 | |
| 219 | |
| 203 void IncrementalMarking::ClearMarkbits() { | 220 void IncrementalMarking::ClearMarkbits() { |
| 204 // TODO(gc): Clear the mark bits in the sweeper. | 221 // TODO(gc): Clear the mark bits in the sweeper. |
| 205 ClearMarkbits(heap_->old_pointer_space()); | 222 ClearMarkbits(heap_->old_pointer_space()); |
| 206 ClearMarkbits(heap_->old_data_space()); | 223 ClearMarkbits(heap_->old_data_space()); |
| 207 ClearMarkbits(heap_->cell_space()); | 224 ClearMarkbits(heap_->cell_space()); |
| 208 ClearMarkbits(heap_->map_space()); | 225 ClearMarkbits(heap_->map_space()); |
| 209 ClearMarkbits(heap_->code_space()); | 226 ClearMarkbits(heap_->code_space()); |
| 210 heap_->new_space()->ActivePage()->markbits()->Clear(); | 227 ClearMarkbits(heap_->new_space()); |
| 211 | |
| 212 SetNewSpacePageFlags(heap_->new_space()->ActivePage(), true); | |
| 213 | 228 |
| 214 LargePage* lop = heap_->lo_space()->first_page(); | 229 LargePage* lop = heap_->lo_space()->first_page(); |
| 215 while (lop->is_valid()) { | 230 while (lop->is_valid()) { |
| 216 SetOldSpacePageFlags(lop, true); | 231 SetOldSpacePageFlags(lop, true); |
| 217 lop = lop->next_page(); | 232 lop = lop->next_page(); |
| 218 } | 233 } |
| 219 } | 234 } |
| 220 | 235 |
| 221 | 236 |
| 222 #ifdef DEBUG | 237 #ifdef DEBUG |
| 223 void IncrementalMarking::VerifyMarkbitsAreClean(PagedSpace* space) { | 238 void IncrementalMarking::VerifyMarkbitsAreClean(PagedSpace* space) { |
| 224 PageIterator it(space); | 239 PageIterator it(space); |
| 225 | 240 |
| 226 while (it.has_next()) { | 241 while (it.has_next()) { |
| 227 Page* p = it.next(); | 242 Page* p = it.next(); |
| 228 ASSERT(p->markbits()->IsClean()); | 243 ASSERT(p->markbits()->IsClean()); |
| 229 } | 244 } |
| 230 } | 245 } |
| 231 | 246 |
| 232 | 247 |
| 248 void IncrementalMarking::VerifyMarkbitsAreClean(NewSpace* space) { | |
| 249 NewSpacePageIterator it(space->ToSpaceLow(), space->ToSpaceHigh()); | |
| 250 | |
| 251 while (it.has_next()) { | |
| 252 NewSpacePage* p = it.next(); | |
| 253 ASSERT(p->markbits()->IsClean()); | |
| 254 } | |
| 255 } | |
| 256 | |
| 257 | |
| 233 void IncrementalMarking::VerifyMarkbitsAreClean() { | 258 void IncrementalMarking::VerifyMarkbitsAreClean() { |
| 234 VerifyMarkbitsAreClean(heap_->old_pointer_space()); | 259 VerifyMarkbitsAreClean(heap_->old_pointer_space()); |
| 235 VerifyMarkbitsAreClean(heap_->old_data_space()); | 260 VerifyMarkbitsAreClean(heap_->old_data_space()); |
| 236 VerifyMarkbitsAreClean(heap_->code_space()); | 261 VerifyMarkbitsAreClean(heap_->code_space()); |
| 237 VerifyMarkbitsAreClean(heap_->cell_space()); | 262 VerifyMarkbitsAreClean(heap_->cell_space()); |
| 238 VerifyMarkbitsAreClean(heap_->map_space()); | 263 VerifyMarkbitsAreClean(heap_->map_space()); |
| 239 ASSERT(heap_->new_space()->ActivePage()->markbits()->IsClean()); | 264 VerifyMarkbitsAreClean(heap_->new_space()); |
| 240 } | 265 } |
| 241 #endif | 266 #endif |
| 242 | 267 |
| 243 | 268 |
| 244 bool IncrementalMarking::WorthActivating() { | 269 bool IncrementalMarking::WorthActivating() { |
| 245 #ifndef DEBUG | 270 #ifndef DEBUG |
| 246 static const intptr_t kActivationThreshold = 8 * MB; | 271 static const intptr_t kActivationThreshold = 8 * MB; |
| 247 #else | 272 #else |
| 248 // TODO(gc) consider setting this to some low level so that some | 273 // TODO(gc) consider setting this to some low level so that some |
| 249 // debug tests run with incremental marking and some without. | 274 // debug tests run with incremental marking and some without. |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 341 | 366 |
| 342 // Ready to start incremental marking. | 367 // Ready to start incremental marking. |
| 343 if (FLAG_trace_incremental_marking) { | 368 if (FLAG_trace_incremental_marking) { |
| 344 PrintF("[IncrementalMarking] Running\n"); | 369 PrintF("[IncrementalMarking] Running\n"); |
| 345 } | 370 } |
| 346 } | 371 } |
| 347 | 372 |
| 348 | 373 |
| 349 void IncrementalMarking::PrepareForScavenge() { | 374 void IncrementalMarking::PrepareForScavenge() { |
| 350 if (!IsMarking()) return; | 375 if (!IsMarking()) return; |
| 351 heap_->new_space()->InactivePage()->markbits()->Clear(); | 376 NewSpacePageIterator it(heap_->new_space()->FromSpaceLow(), |
| 377 heap_->new_space()->FromSpaceHigh()); | |
| 378 while (it.has_next()) { | |
| 379 it.next()->markbits()->Clear(); | |
| 380 } | |
| 352 } | 381 } |
| 353 | 382 |
| 354 | 383 |
| 355 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { | 384 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { |
| 356 if (!IsMarking()) return; | 385 if (!IsMarking()) return; |
| 357 | 386 |
| 358 intptr_t current = marking_deque_.bottom(); | 387 intptr_t current = marking_deque_.bottom(); |
| 359 intptr_t mask = marking_deque_.mask(); | 388 intptr_t mask = marking_deque_.mask(); |
| 360 intptr_t limit = marking_deque_.top(); | 389 intptr_t limit = marking_deque_.top(); |
| 361 HeapObject** array = marking_deque_.array(); | 390 HeapObject** array = marking_deque_.array(); |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 527 } | 556 } |
| 528 | 557 |
| 529 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { | 558 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { |
| 530 double end = OS::TimeCurrentMillis(); | 559 double end = OS::TimeCurrentMillis(); |
| 531 steps_took_ += (end - start); | 560 steps_took_ += (end - start); |
| 532 } | 561 } |
| 533 } | 562 } |
| 534 | 563 |
| 535 | 564 |
| 536 } } // namespace v8::internal | 565 } } // namespace v8::internal |
| OLD | NEW |