Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(548)

Side by Side Diff: src/incremental-marking.cc

Issue 6970004: Introduce lazy sweeping. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 99
100 100
101 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { 101 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
102 public: 102 public:
103 IncrementalMarkingRootMarkingVisitor(Heap* heap, 103 IncrementalMarkingRootMarkingVisitor(Heap* heap,
104 IncrementalMarking* incremental_marking) 104 IncrementalMarking* incremental_marking)
105 : heap_(heap), 105 : heap_(heap),
106 incremental_marking_(incremental_marking) { 106 incremental_marking_(incremental_marking) {
107 } 107 }
108 108
109
110 void VisitPointer(Object** p) { 109 void VisitPointer(Object** p) {
111 MarkObjectByPointer(p); 110 MarkObjectByPointer(p);
112 } 111 }
113 112
114 void VisitPointers(Object** start, Object** end) { 113 void VisitPointers(Object** start, Object** end) {
115 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 114 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
116 } 115 }
117 116
118 private: 117 private:
119 void MarkObjectByPointer(Object** p) { 118 void MarkObjectByPointer(Object** p) {
120 Object* obj = *p; 119 Object* obj = *p;
121 if (!obj->IsHeapObject()) return; 120 if (!obj->IsHeapObject()) return;
122 121
123 HeapObject* heap_object = HeapObject::cast(obj); 122 HeapObject* heap_object = HeapObject::cast(obj);
124 MarkBit mark_bit = heap_->marking()->MarkBitFrom(heap_object); 123 MarkBit mark_bit = heap_->marking()->MarkBitFrom(heap_object);
125 if (mark_bit.data_only()) { 124 if (mark_bit.data_only()) {
126 incremental_marking_->MarkBlackOrKeepGrey(mark_bit); 125 incremental_marking_->MarkBlackOrKeepGrey(mark_bit);
127 } else { 126 } else {
128 if (IncrementalMarking::IsWhite(mark_bit)) { 127 if (IncrementalMarking::IsWhite(mark_bit)) {
129 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); 128 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
130 } 129 }
131 } 130 }
132 } 131 }
133 132
134 Heap* heap_; 133 Heap* heap_;
135 IncrementalMarking* incremental_marking_; 134 IncrementalMarking* incremental_marking_;
136 }; 135 };
137 136
138 137
139 static void ClearMarkbits(PagedSpace* space) { 138 void IncrementalMarking::ClearMarkbits(PagedSpace* space) {
140 PageIterator it(space); 139 PageIterator it(space);
141 140
142 while (it.has_next()) { 141 while (it.has_next()) {
143 Page* p = it.next(); 142 Page* p = it.next();
144 p->markbits()->Clear(); 143 p->markbits()->Clear();
145 } 144 }
146 } 145 }
147 146
148 147
149 static void ClearMarkbits() { 148 void IncrementalMarking::ClearMarkbits() {
150 // TODO(gc): Clear the mark bits in the sweeper. 149 // TODO(gc): Clear the mark bits in the sweeper.
151 // TODO(gc) ISOLATES MERGE 150 ClearMarkbits(heap_->old_pointer_space());
152 ClearMarkbits(HEAP->old_pointer_space()); 151 ClearMarkbits(heap_->old_data_space());
153 ClearMarkbits(HEAP->old_data_space()); 152 ClearMarkbits(heap_->cell_space());
154 ClearMarkbits(HEAP->cell_space()); 153 ClearMarkbits(heap_->map_space());
155 ClearMarkbits(HEAP->map_space()); 154 ClearMarkbits(heap_->code_space());
Erik Corry 2011/05/09 21:20:11 Who clears the mark bits in the large object space
156 ClearMarkbits(HEAP->code_space());
157 } 155 }
158 156
159 157
160 #ifdef DEBUG 158 #ifdef DEBUG
161 static void VerifyMarkbitsAreClean(PagedSpace* space) { 159 void IncrementalMarking::VerifyMarkbitsAreClean(PagedSpace* space) {
162 PageIterator it(space); 160 PageIterator it(space);
163 161
164 while (it.has_next()) { 162 while (it.has_next()) {
165 Page* p = it.next(); 163 Page* p = it.next();
166 ASSERT(p->markbits()->IsClean()); 164 ASSERT(p->markbits()->IsClean());
167 }
168 } 165 }
166 }
169 167
170 static void VerifyMarkbitsAreClean() { 168
171 // TODO(gc) ISOLATES MERGE 169 void IncrementalMarking::VerifyMarkbitsAreClean() {
172 VerifyMarkbitsAreClean(HEAP->old_pointer_space()); 170 VerifyMarkbitsAreClean(heap_->old_pointer_space());
173 VerifyMarkbitsAreClean(HEAP->old_data_space()); 171 VerifyMarkbitsAreClean(heap_->old_data_space());
174 VerifyMarkbitsAreClean(HEAP->code_space()); 172 VerifyMarkbitsAreClean(heap_->code_space());
175 VerifyMarkbitsAreClean(HEAP->cell_space()); 173 VerifyMarkbitsAreClean(heap_->cell_space());
176 VerifyMarkbitsAreClean(HEAP->map_space()); 174 VerifyMarkbitsAreClean(heap_->map_space());
177 } 175 }
178 #endif 176 #endif
179 177
178
180 bool IncrementalMarking::WorthActivating() { 179 bool IncrementalMarking::WorthActivating() {
181 #ifndef DEBUG 180 #ifndef DEBUG
182 static const intptr_t kActivationThreshold = 8 * MB; 181 static const intptr_t kActivationThreshold = 8 * MB;
183 #else 182 #else
184 // TODO(gc) consider setting this to some low level so that some 183 // TODO(gc) consider setting this to some low level so that some
185 // debug tests run with incremental marking and some without. 184 // debug tests run with incremental marking and some without.
186 static const intptr_t kActivationThreshold = 0; 185 static const intptr_t kActivationThreshold = 0;
187 #endif 186 #endif
188 187
189 // TODO(gc) ISOLATES MERGE 188 // TODO(gc) ISOLATES MERGE
(...skipping 15 matching lines...) Expand all
205 CodeStub::RecordWrite) { 204 CodeStub::RecordWrite) {
206 Object* e = stubs->ValueAt(i); 205 Object* e = stubs->ValueAt(i);
207 if (e->IsCode()) { 206 if (e->IsCode()) {
208 RecordWriteStub::Patch(Code::cast(e), enable); 207 RecordWriteStub::Patch(Code::cast(e), enable);
209 } 208 }
210 } 209 }
211 } 210 }
212 } 211 }
213 } 212 }
214 213
214
215 static VirtualMemory* marking_deque_memory = NULL; 215 static VirtualMemory* marking_deque_memory = NULL;
216 216
217
217 static void EnsureMarkingDequeIsCommitted() { 218 static void EnsureMarkingDequeIsCommitted() {
218 if (marking_deque_memory == NULL) { 219 if (marking_deque_memory == NULL) {
219 marking_deque_memory = new VirtualMemory(4*MB); 220 marking_deque_memory = new VirtualMemory(4*MB);
220 marking_deque_memory->Commit( 221 marking_deque_memory->Commit(
221 reinterpret_cast<Address>(marking_deque_memory->address()), 222 reinterpret_cast<Address>(marking_deque_memory->address()),
222 marking_deque_memory->size(), 223 marking_deque_memory->size(),
223 false); // Not executable. 224 false); // Not executable.
224 } 225 }
225 } 226 }
226 227
227 228
228 void IncrementalMarking::Start() { 229 void IncrementalMarking::Start() {
229 if (FLAG_trace_incremental_marking) { 230 if (FLAG_trace_incremental_marking) {
230 PrintF("[IncrementalMarking] Start\n"); 231 PrintF("[IncrementalMarking] Start\n");
231 } 232 }
232 ASSERT(FLAG_incremental_marking); 233 ASSERT(FLAG_incremental_marking);
233 ASSERT(state_ == STOPPED); 234 ASSERT(state_ == STOPPED);
234 state_ = MARKING;
235 235
236 ResetStepCounters(); 236 ResetStepCounters();
237 237
238 if (heap_->old_pointer_space()->IsSweepingComplete() &&
239 heap_->old_data_space()->IsSweepingComplete()) {
240 StartMarking();
241 } else {
242 PrintF("[IncrementalMarking] Start sweeping.\n");
antonm 2011/05/10 13:09:38 if (FLAG_trace_incremental_marking)?
243 state_ = SWEEPING;
244 }
245 }
246
247
248 void IncrementalMarking::StartMarking() {
249 if (FLAG_trace_incremental_marking) {
250 PrintF("[IncrementalMarking] Start marking\n");
251 }
252
253 state_ = MARKING;
254
238 PatchIncrementalMarkingRecordWriteStubs(true); 255 PatchIncrementalMarkingRecordWriteStubs(true);
239 256
240 EnsureMarkingDequeIsCommitted(); 257 EnsureMarkingDequeIsCommitted();
241 258
242 // Initialize marking stack. 259 // Initialize marking stack.
243 Address addr = static_cast<Address>(marking_deque_memory->address()); 260 Address addr = static_cast<Address>(marking_deque_memory->address());
244 marking_deque_.Initialize(addr, 261 marking_deque_.Initialize(addr,
245 addr + marking_deque_memory->size()); 262 addr + marking_deque_memory->size());
246 263
247 // Clear markbits. 264 // Clear markbits.
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 state_ = COMPLETE; 351 state_ = COMPLETE;
335 if (FLAG_trace_incremental_marking) { 352 if (FLAG_trace_incremental_marking) {
336 double end = OS::TimeCurrentMillis(); 353 double end = OS::TimeCurrentMillis();
337 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", 354 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
338 static_cast<int>(end - start)); 355 static_cast<int>(end - start));
339 } 356 }
340 } 357 }
341 } 358 }
342 359
343 360
361 void IncrementalMarking::Abort() {
362 if (IsStopped()) return;
363 heap_->new_space()->LowerInlineAllocationLimit(0);
364 IncrementalMarking::set_should_hurry(false);
365 ResetStepCounters();
366 if (IsMarking()) PatchIncrementalMarkingRecordWriteStubs(false);
367 heap_->isolate()->stack_guard()->Continue(GC_REQUEST);
368 state_ = STOPPED;
369 }
370
371
344 void IncrementalMarking::Finalize() { 372 void IncrementalMarking::Finalize() {
345 Hurry(); 373 Hurry();
346 state_ = STOPPED; 374 state_ = STOPPED;
347 heap_->new_space()->LowerInlineAllocationLimit(0); 375 heap_->new_space()->LowerInlineAllocationLimit(0);
348 IncrementalMarking::set_should_hurry(false); 376 IncrementalMarking::set_should_hurry(false);
349 ResetStepCounters(); 377 ResetStepCounters();
350 PatchIncrementalMarkingRecordWriteStubs(false); 378 PatchIncrementalMarkingRecordWriteStubs(false);
351 ASSERT(marking_deque_.IsEmpty()); 379 ASSERT(marking_deque_.IsEmpty());
352 ISOLATE->stack_guard()->Continue(GC_REQUEST); 380 heap_->isolate()->stack_guard()->Continue(GC_REQUEST);
353 } 381 }
354 382
355 383
356 void IncrementalMarking::MarkingComplete() { 384 void IncrementalMarking::MarkingComplete() {
357 state_ = COMPLETE; 385 state_ = COMPLETE;
358 // We will set the stack guard to request a GC now. This will mean the rest 386 // We will set the stack guard to request a GC now. This will mean the rest
359 // of the GC gets performed as soon as possible (we can't do a GC here in a 387 // of the GC gets performed as soon as possible (we can't do a GC here in a
360 // record-write context). If a few things get allocated between now and then 388 // record-write context). If a few things get allocated between now and then
361 // that shouldn't make us do a scavenge and keep being incremental, so we set 389 // that shouldn't make us do a scavenge and keep being incremental, so we set
362 // the should-hurry flag to indicate that there can't be much work left to do. 390 // the should-hurry flag to indicate that there can't be much work left to do.
363 set_should_hurry(true); 391 set_should_hurry(true);
364 if (FLAG_trace_incremental_marking) { 392 if (FLAG_trace_incremental_marking) {
365 PrintF("[IncrementalMarking] Complete (normal).\n"); 393 PrintF("[IncrementalMarking] Complete (normal).\n");
366 } 394 }
367 // TODO(gc) ISOLATES 395 // TODO(gc) ISOLATES
368 ISOLATE->stack_guard()->RequestGC(); 396 ISOLATE->stack_guard()->RequestGC();
369 } 397 }
370 398
371 399
372 void IncrementalMarking::Step(intptr_t allocated_bytes) { 400 void IncrementalMarking::Step(intptr_t allocated_bytes) {
373 if (state_ == MARKING && 401 if (heap_->gc_state() != Heap::NOT_IN_GC) return;
374 heap_->gc_state() == Heap::NOT_IN_GC && 402 if (!FLAG_incremental_marking_steps) return;
375 FLAG_incremental_marking_steps) {
376 allocated_ += allocated_bytes;
377 403
378 if (allocated_ >= kAllocatedThreshold) { 404 allocated_ += allocated_bytes;
379 double start = 0;
380 405
381 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { 406 if (allocated_ < kAllocatedThreshold) return;
382 start = OS::TimeCurrentMillis();
383 }
384 407
385 intptr_t bytes_to_process = allocated_ * allocation_marking_factor_; 408 intptr_t bytes_to_process = allocated_ * allocation_marking_factor_;
386 int count = 0;
387 409
388 Map* filler_map = heap_->one_pointer_filler_map(); 410 double start = 0;
389 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
390 Marking* marking = heap_->marking();
391 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
392 HeapObject* obj = marking_deque_.Pop();
393 411
394 // Explicitly skip one word fillers. Incremental markbit patterns are 412 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
395 // correct only for objects that occupy at least two words. 413 start = OS::TimeCurrentMillis();
396 Map* map = obj->map(); 414 }
397 if (map != filler_map) {
398 ASSERT(IsGrey(marking->MarkBitFrom(obj)));
399 int size = obj->SizeFromMap(map);
400 bytes_to_process -= size;
401 MarkBit map_mark_bit = marking->MarkBitFromOldSpace(map);
402 if (IsWhite(map_mark_bit)) WhiteToGreyAndPush(map, map_mark_bit);
403 // TODO(gc) switch to static visitor instead of normal visitor.
404 obj->IterateBody(map->instance_type(), size, &marking_visitor);
405 MarkBit obj_mark_bit = marking->MarkBitFrom(obj);
406 MarkBlack(obj_mark_bit);
407 }
408 count++;
409 }
410 allocated_ = 0;
411 if (marking_deque_.IsEmpty()) MarkingComplete();
412 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
413 double end = OS::TimeCurrentMillis();
414 steps_took_ += (end - start);
415 }
416 415
417 steps_count_++; 416 if (state_ == SWEEPING) {
417 if (heap_->old_pointer_space()->AdvanceSweeper(bytes_to_process) &&
418 heap_->old_data_space()->AdvanceSweeper(bytes_to_process)) {
419 StartMarking();
420 }
421 } else if (state_ == MARKING) {
422 Map* filler_map = heap_->one_pointer_filler_map();
423 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
424 Marking* marking = heap_->marking();
425 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
426 HeapObject* obj = marking_deque_.Pop();
418 427
419 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { 428 // Explicitly skip one word fillers. Incremental markbit patterns are
420 allocation_marking_factor_ += kAllocationMarkingFactorSpeedup; 429 // correct only for objects that occupy at least two words.
421 allocation_marking_factor_ *= 1.3; 430 Map* map = obj->map();
422 if (FLAG_trace_gc) { 431 if (map != filler_map) {
423 PrintF("Marking speed increased to %d\n", allocation_marking_factor_); 432 ASSERT(IsGrey(marking->MarkBitFrom(obj)));
424 } 433 int size = obj->SizeFromMap(map);
434 bytes_to_process -= size;
435 MarkBit map_mark_bit = marking->MarkBitFromOldSpace(map);
436 if (IsWhite(map_mark_bit)) WhiteToGreyAndPush(map, map_mark_bit);
437 // TODO(gc) switch to static visitor instead of normal visitor.
438 obj->IterateBody(map->instance_type(), size, &marking_visitor);
439 MarkBit obj_mark_bit = marking->MarkBitFrom(obj);
440 MarkBlack(obj_mark_bit);
425 } 441 }
426 } 442 }
443 if (marking_deque_.IsEmpty()) MarkingComplete();
444 }
445
446 allocated_ = 0;
447
448 steps_count_++;
449
450 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
451 allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
452 allocation_marking_factor_ *= 1.3;
453 if (FLAG_trace_gc) {
454 PrintF("Marking speed increased to %d\n", allocation_marking_factor_);
455 }
456 }
457
458 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
459 double end = OS::TimeCurrentMillis();
460 steps_took_ += (end - start);
427 } 461 }
428 } 462 }
429 463
430 464
431 } } // namespace v8::internal 465 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698