Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(320)

Side by Side Diff: src/incremental-marking.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 16 matching lines...) Expand all
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "incremental-marking.h" 30 #include "incremental-marking.h"
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 33
34 namespace v8 { 34 namespace v8 {
35 namespace internal { 35 namespace internal {
36 36
37 IncrementalMarking::State IncrementalMarking::state_ = STOPPED;
38 MarkingStack IncrementalMarking::marking_stack_;
39 37
40 double IncrementalMarking::steps_took_ = 0; 38 IncrementalMarking::IncrementalMarking(Heap* heap)
41 int IncrementalMarking::steps_count_ = 0; 39 : heap_(heap),
42 bool IncrementalMarking::should_hurry_ = false; 40 state_(STOPPED),
43 intptr_t IncrementalMarking::allocation_marking_factor_ = 0; 41 steps_count_(0),
42 steps_took_(0),
43 should_hurry_(false),
44 allocation_marking_factor_(0),
45 allocated_(0) {
46 }
44 47
45 static intptr_t allocated = 0; 48
49 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
50 Object* value,
51 Isolate* isolate) {
52 isolate->heap()->incremental_marking()->RecordWrite(obj, value);
53 }
54
46 55
47 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { 56 class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
48 public: 57 public:
58 IncrementalMarkingMarkingVisitor(Heap* heap,
59 IncrementalMarking* incremental_marking)
60 : heap_(heap),
61 incremental_marking_(incremental_marking) {
62 }
63
49 void VisitPointer(Object** p) { 64 void VisitPointer(Object** p) {
50 MarkObjectByPointer(p); 65 MarkObjectByPointer(p);
51 } 66 }
52 67
53 void VisitPointers(Object** start, Object** end) { 68 void VisitPointers(Object** start, Object** end) {
54 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 69 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
55 } 70 }
56 71
57 private: 72 private:
58 // Mark object pointed to by p. 73 // Mark object pointed to by p.
59 INLINE(static void MarkObjectByPointer(Object** p)) { 74 INLINE(void MarkObjectByPointer(Object** p)) {
60 Object* obj = *p; 75 Object* obj = *p;
61 // Since we can be sure that the object is not tagged as a failure we can 76 // Since we can be sure that the object is not tagged as a failure we can
62 // inline a slightly more efficient tag check here than IsHeapObject() would 77 // inline a slightly more efficient tag check here than IsHeapObject() would
63 // produce. 78 // produce.
64 if (obj->NonFailureIsHeapObject()) { 79 if (obj->NonFailureIsHeapObject()) {
65 HeapObject* heap_object = HeapObject::cast(obj); 80 HeapObject* heap_object = HeapObject::cast(obj);
66 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 81 MarkBit mark_bit = heap_->marking()->MarkBitFrom(heap_object);
67 if (mark_bit.data_only()) { 82 if (mark_bit.data_only()) {
68 IncrementalMarking::MarkBlackOrKeepGrey(mark_bit); 83 incremental_marking_->MarkBlackOrKeepGrey(mark_bit);
69 } else { 84 } else {
70 if (IncrementalMarking::IsWhite(mark_bit)) { 85 if (IncrementalMarking::IsWhite(mark_bit)) {
71 IncrementalMarking::WhiteToGreyAndPush(heap_object, mark_bit); 86 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
72 } 87 }
73 } 88 }
74 } 89 }
75 } 90 }
91
92 Heap* heap_;
93 IncrementalMarking* incremental_marking_;
76 }; 94 };
77 95
78 96
79 static IncrementalMarkingMarkingVisitor marking_visitor;
80
81 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { 97 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
82 public: 98 public:
99 IncrementalMarkingRootMarkingVisitor(Heap* heap,
100 IncrementalMarking* incremental_marking)
101 : heap_(heap),
102 incremental_marking_(incremental_marking) {
103 }
104
105
83 void VisitPointer(Object** p) { 106 void VisitPointer(Object** p) {
84 MarkObjectByPointer(p); 107 MarkObjectByPointer(p);
85 } 108 }
86 109
87 void VisitPointers(Object** start, Object** end) { 110 void VisitPointers(Object** start, Object** end) {
88 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 111 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
89 } 112 }
90 113
91 private: 114 private:
92 void MarkObjectByPointer(Object** p) { 115 void MarkObjectByPointer(Object** p) {
93 Object* obj = *p; 116 Object* obj = *p;
94 if (!obj->IsHeapObject()) return; 117 if (!obj->IsHeapObject()) return;
95 118
96 HeapObject* heap_object = HeapObject::cast(obj); 119 HeapObject* heap_object = HeapObject::cast(obj);
97 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 120 MarkBit mark_bit = heap_->marking()->MarkBitFrom(heap_object);
98 if (mark_bit.data_only()) { 121 if (mark_bit.data_only()) {
99 IncrementalMarking::MarkBlackOrKeepGrey(mark_bit); 122 incremental_marking_->MarkBlackOrKeepGrey(mark_bit);
100 } else { 123 } else {
101 if (IncrementalMarking::IsWhite(mark_bit)) { 124 if (IncrementalMarking::IsWhite(mark_bit)) {
102 IncrementalMarking::WhiteToGreyAndPush(heap_object, mark_bit); 125 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
103 } 126 }
104 } 127 }
105 } 128 }
129
130 Heap* heap_;
131 IncrementalMarking* incremental_marking_;
106 }; 132 };
107 133
108 134
109 static void ClearMarkbits(PagedSpace* space) { 135 static void ClearMarkbits(PagedSpace* space) {
110 PageIterator it(space); 136 PageIterator it(space);
111 137
112 while (it.has_next()) { 138 while (it.has_next()) {
113 Page* p = it.next(); 139 Page* p = it.next();
114 p->markbits()->Clear(); 140 p->markbits()->Clear();
115 } 141 }
116 } 142 }
117 143
118 144
119 static void ClearMarkbits() { 145 static void ClearMarkbits() {
120 // TODO(gc): Clear the mark bits in the sweeper. 146 // TODO(gc): Clear the mark bits in the sweeper.
121 ClearMarkbits(Heap::old_pointer_space()); 147 // TODO(gc) ISOLATES MERGE
122 ClearMarkbits(Heap::old_data_space()); 148 ClearMarkbits(HEAP->old_pointer_space());
123 ClearMarkbits(Heap::cell_space()); 149 ClearMarkbits(HEAP->old_data_space());
124 ClearMarkbits(Heap::map_space()); 150 ClearMarkbits(HEAP->cell_space());
125 ClearMarkbits(Heap::code_space()); 151 ClearMarkbits(HEAP->map_space());
152 ClearMarkbits(HEAP->code_space());
126 } 153 }
127 154
128 155
129 #ifdef DEBUG 156 #ifdef DEBUG
130 static void VerifyMarkbitsAreClean(PagedSpace* space) { 157 static void VerifyMarkbitsAreClean(PagedSpace* space) {
131 PageIterator it(space); 158 PageIterator it(space);
132 159
133 while (it.has_next()) { 160 while (it.has_next()) {
134 Page* p = it.next(); 161 Page* p = it.next();
135 ASSERT(p->markbits()->IsClean()); 162 ASSERT(p->markbits()->IsClean());
136 } 163 }
137 } 164 }
138 165
139 static void VerifyMarkbitsAreClean() { 166 static void VerifyMarkbitsAreClean() {
140 VerifyMarkbitsAreClean(Heap::old_pointer_space()); 167 // TODO(gc) ISOLATES MERGE
141 VerifyMarkbitsAreClean(Heap::old_data_space()); 168 VerifyMarkbitsAreClean(HEAP->old_pointer_space());
142 VerifyMarkbitsAreClean(Heap::code_space()); 169 VerifyMarkbitsAreClean(HEAP->old_data_space());
143 VerifyMarkbitsAreClean(Heap::cell_space()); 170 VerifyMarkbitsAreClean(HEAP->code_space());
144 VerifyMarkbitsAreClean(Heap::map_space()); 171 VerifyMarkbitsAreClean(HEAP->cell_space());
172 VerifyMarkbitsAreClean(HEAP->map_space());
145 } 173 }
146 #endif 174 #endif
147 175
148 bool IncrementalMarking::WorthActivating() { 176 bool IncrementalMarking::WorthActivating() {
149 #ifndef DEBUG 177 #ifndef DEBUG
150 static const intptr_t kActivationThreshold = 8 * MB; 178 static const intptr_t kActivationThreshold = 8 * MB;
151 #else 179 #else
152 // TODO(gc) consider setting this to some low level so that some 180 // TODO(gc) consider setting this to some low level so that some
153 // debug tests run with incremental marking and some without. 181 // debug tests run with incremental marking and some without.
154 static const intptr_t kActivationThreshold = 0; 182 static const intptr_t kActivationThreshold = 0;
155 #endif 183 #endif
156 184
185 // TODO(gc) ISOLATES MERGE
157 return FLAG_incremental_marking && 186 return FLAG_incremental_marking &&
158 Heap::PromotedSpaceSize() > kActivationThreshold; 187 heap_->PromotedSpaceSize() > kActivationThreshold;
159 } 188 }
160 189
161 190
162 static void PatchIncrementalMarkingRecordWriteStubs(bool enable) { 191 static void PatchIncrementalMarkingRecordWriteStubs(bool enable) {
163 NumberDictionary* stubs = Heap::code_stubs(); 192 NumberDictionary* stubs = HEAP->code_stubs();
164 193
165 int capacity = stubs->Capacity(); 194 int capacity = stubs->Capacity();
166 for (int i = 0; i < capacity; i++) { 195 for (int i = 0; i < capacity; i++) {
167 Object* k = stubs->KeyAt(i); 196 Object* k = stubs->KeyAt(i);
168 if (stubs->IsKey(k)) { 197 if (stubs->IsKey(k)) {
169 uint32_t key = NumberToUint32(k); 198 uint32_t key = NumberToUint32(k);
170 199
171 if (CodeStub::MajorKeyFromKey(key) == 200 if (CodeStub::MajorKeyFromKey(key) ==
172 CodeStub::RecordWrite) { 201 CodeStub::RecordWrite) {
173 Object* e = stubs->ValueAt(i); 202 Object* e = stubs->ValueAt(i);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
205 PatchIncrementalMarkingRecordWriteStubs(true); 234 PatchIncrementalMarkingRecordWriteStubs(true);
206 235
207 EnsureMarkingStackIsCommitted(); 236 EnsureMarkingStackIsCommitted();
208 237
209 // Initialize marking stack. 238 // Initialize marking stack.
210 Address addr = static_cast<Address>(marking_stack_memory->address()); 239 Address addr = static_cast<Address>(marking_stack_memory->address());
211 marking_stack_.Initialize(addr, 240 marking_stack_.Initialize(addr,
212 addr + marking_stack_memory->size()); 241 addr + marking_stack_memory->size());
213 242
214 // Clear markbits. 243 // Clear markbits.
215 Address new_space_low = Heap::new_space()->ToSpaceLow(); 244 Address new_space_low = heap_->new_space()->ToSpaceLow();
216 Address new_space_high = Heap::new_space()->ToSpaceHigh(); 245 Address new_space_high = heap_->new_space()->ToSpaceHigh();
217 Marking::ClearRange(new_space_low, 246 heap_->marking()->ClearRange(
218 static_cast<int>(new_space_high - new_space_low)); 247 new_space_low, static_cast<int>(new_space_high - new_space_low));
219 248
220 ClearMarkbits(); 249 ClearMarkbits();
221 250
222 #ifdef DEBUG 251 #ifdef DEBUG
223 VerifyMarkbitsAreClean(); 252 VerifyMarkbitsAreClean();
224 #endif 253 #endif
225 254
226 Heap::new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); 255 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold);
227 256
228 // Mark strong roots grey. 257 // Mark strong roots grey.
229 IncrementalMarkingRootMarkingVisitor visitor; 258 IncrementalMarkingRootMarkingVisitor visitor(heap_, this);
230 Heap::IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); 259 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
231 260
232 // Ready to start incremental marking. 261 // Ready to start incremental marking.
233 if (FLAG_trace_incremental_marking) { 262 if (FLAG_trace_incremental_marking) {
234 PrintF("[IncrementalMarking] Running\n"); 263 PrintF("[IncrementalMarking] Running\n");
235 } 264 }
236 } 265 }
237 266
238 267
239 void IncrementalMarking::PrepareForScavenge() { 268 void IncrementalMarking::PrepareForScavenge() {
240 if (IsStopped()) return; 269 if (IsStopped()) return;
241 270
242 Address new_space_low = Heap::new_space()->FromSpaceLow(); 271 Address new_space_low = heap_->new_space()->FromSpaceLow();
243 Address new_space_high = Heap::new_space()->FromSpaceHigh(); 272 Address new_space_high = heap_->new_space()->FromSpaceHigh();
244 Marking::ClearRange(new_space_low, 273 heap_->marking()->ClearRange(
245 static_cast<int>(new_space_high - new_space_low)); 274 new_space_low, static_cast<int>(new_space_high - new_space_low));
246 } 275 }
247 276
248 277
249 void IncrementalMarking::UpdateMarkingStackAfterScavenge() { 278 void IncrementalMarking::UpdateMarkingStackAfterScavenge() {
250 if (IsStopped()) return; 279 if (IsStopped()) return;
251 280
252 HeapObject** current = marking_stack_.low(); 281 HeapObject** current = marking_stack_.low();
253 HeapObject** top = marking_stack_.top(); 282 HeapObject** top = marking_stack_.top();
254 HeapObject** new_top = current; 283 HeapObject** new_top = current;
255 284
256 while (current < top) { 285 while (current < top) {
257 HeapObject* obj = *current++; 286 HeapObject* obj = *current++;
258 if (Heap::InNewSpace(obj)) { 287 if (heap_->InNewSpace(obj)) {
259 MapWord map_word = obj->map_word(); 288 MapWord map_word = obj->map_word();
260 if (map_word.IsForwardingAddress()) { 289 if (map_word.IsForwardingAddress()) {
261 HeapObject* dest = map_word.ToForwardingAddress(); 290 HeapObject* dest = map_word.ToForwardingAddress();
262 WhiteToGrey(dest, Marking::MarkBitFrom(dest)); 291 WhiteToGrey(dest, heap_->marking()->MarkBitFrom(dest));
263 *new_top++ = dest; 292 *new_top++ = dest;
264 ASSERT(Color(obj) == Color(dest)); 293 ASSERT(Color(obj) == Color(dest));
265 } 294 }
266 } else { 295 } else {
267 *new_top++ = obj; 296 *new_top++ = obj;
268 } 297 }
269 } 298 }
270 299
271 marking_stack_.set_top(new_top); 300 marking_stack_.set_top(new_top);
272 } 301 }
273 302
274 303
275 void IncrementalMarking::Hurry() { 304 void IncrementalMarking::Hurry() {
276 if (state() == MARKING) { 305 if (state() == MARKING) {
277 double start = 0.0; 306 double start = 0.0;
278 if (FLAG_trace_incremental_marking) { 307 if (FLAG_trace_incremental_marking) {
279 PrintF("[IncrementalMarking] Hurry\n"); 308 PrintF("[IncrementalMarking] Hurry\n");
280 start = OS::TimeCurrentMillis(); 309 start = OS::TimeCurrentMillis();
281 } 310 }
282 // TODO(gc) hurry can mark objects it encounters black as mutator 311 // TODO(gc) hurry can mark objects it encounters black as mutator
283 // was stopped. 312 // was stopped.
284 Map* filler_map = Heap::one_pointer_filler_map(); 313 Map* filler_map = heap_->one_pointer_filler_map();
314 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
285 while (!marking_stack_.is_empty()) { 315 while (!marking_stack_.is_empty()) {
286 HeapObject* obj = marking_stack_.Pop(); 316 HeapObject* obj = marking_stack_.Pop();
287 317
288 // Explicitly skip one word fillers. Incremental markbit patterns are 318 // Explicitly skip one word fillers. Incremental markbit patterns are
289 // correct only for objects that occupy at least two words. 319 // correct only for objects that occupy at least two words.
290 if (obj->map() != filler_map) { 320 if (obj->map() != filler_map) {
291 obj->Iterate(&marking_visitor); 321 obj->Iterate(&marking_visitor);
292 MarkBit mark_bit = Marking::MarkBitFrom(obj); 322 MarkBit mark_bit = heap_->marking()->MarkBitFrom(obj);
293 MarkBlack(mark_bit); 323 MarkBlack(mark_bit);
294 } 324 }
295 } 325 }
296 state_ = COMPLETE; 326 state_ = COMPLETE;
297 if (FLAG_trace_incremental_marking) { 327 if (FLAG_trace_incremental_marking) {
298 double end = OS::TimeCurrentMillis(); 328 double end = OS::TimeCurrentMillis();
299 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", 329 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
300 static_cast<int>(end - start)); 330 static_cast<int>(end - start));
301 } 331 }
302 } 332 }
(...skipping 14 matching lines...) Expand all
317 state_ = COMPLETE; 347 state_ = COMPLETE;
318 // We will set the stack guard to request a GC now. This will mean the rest 348 // We will set the stack guard to request a GC now. This will mean the rest
319 // of the GC gets performed as soon as possible (we can't do a GC here in a 349 // of the GC gets performed as soon as possible (we can't do a GC here in a
320 // record-write context). If a few things get allocated between now and then 350 // record-write context). If a few things get allocated between now and then
321 // that shouldn't make us do a scavenge and keep being incremental, so we set 351 // that shouldn't make us do a scavenge and keep being incremental, so we set
322 // the should-hurry flag to indicate that there can't be much work left to do. 352 // the should-hurry flag to indicate that there can't be much work left to do.
323 set_should_hurry(true); 353 set_should_hurry(true);
324 if (FLAG_trace_incremental_marking) { 354 if (FLAG_trace_incremental_marking) {
325 PrintF("[IncrementalMarking] Complete (normal).\n"); 355 PrintF("[IncrementalMarking] Complete (normal).\n");
326 } 356 }
327 StackGuard::RequestGC(); 357 // TODO(gc) ISOLATES
358 ISOLATE->stack_guard()->RequestGC();
328 } 359 }
329 360
330 361
331 void IncrementalMarking::Step(intptr_t allocated_bytes) { 362 void IncrementalMarking::Step(intptr_t allocated_bytes) {
332 if (state_ == MARKING && 363 if (state_ == MARKING &&
333 Heap::gc_state() == Heap::NOT_IN_GC && 364 heap_->gc_state() == Heap::NOT_IN_GC &&
334 FLAG_incremental_marking_steps) { 365 FLAG_incremental_marking_steps) {
335 allocated += allocated_bytes; 366 allocated_ += allocated_bytes;
336 367
337 if (allocated >= kAllocatedThreshold) { 368 if (allocated_ >= kAllocatedThreshold) {
338 double start = 0; 369 double start = 0;
339 370
340 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { 371 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
341 start = OS::TimeCurrentMillis(); 372 start = OS::TimeCurrentMillis();
342 } 373 }
343 374
344 intptr_t bytes_to_process = allocated * allocation_marking_factor_; 375 intptr_t bytes_to_process = allocated_ * allocation_marking_factor_;
345 int count = 0; 376 int count = 0;
346 377
347 Map* filler_map = Heap::one_pointer_filler_map(); 378 Map* filler_map = heap_->one_pointer_filler_map();
379 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
380 Marking* marking = heap_->marking();
348 while (!marking_stack_.is_empty() && bytes_to_process > 0) { 381 while (!marking_stack_.is_empty() && bytes_to_process > 0) {
349 HeapObject* obj = marking_stack_.Pop(); 382 HeapObject* obj = marking_stack_.Pop();
350 383
351 // Explicitly skip one word fillers. Incremental markbit patterns are 384 // Explicitly skip one word fillers. Incremental markbit patterns are
352 // correct only for objects that occupy at least two words. 385 // correct only for objects that occupy at least two words.
353 Map* map = obj->map(); 386 Map* map = obj->map();
354 if (map != filler_map) { 387 if (map != filler_map) {
355 ASSERT(IsGrey(Marking::MarkBitFrom(obj))); 388 ASSERT(IsGrey(marking->MarkBitFrom(obj)));
356 int size = obj->SizeFromMap(map); 389 int size = obj->SizeFromMap(map);
357 bytes_to_process -= size; 390 bytes_to_process -= size;
358 MarkBit map_mark_bit = Marking::MarkBitFromOldSpace(map); 391 MarkBit map_mark_bit = marking->MarkBitFromOldSpace(map);
359 if (IsWhite(map_mark_bit)) WhiteToGreyAndPush(map, map_mark_bit); 392 if (IsWhite(map_mark_bit)) WhiteToGreyAndPush(map, map_mark_bit);
360 // TODO(gc) switch to static visitor instead of normal visitor. 393 // TODO(gc) switch to static visitor instead of normal visitor.
361 obj->IterateBody(map->instance_type(), size, &marking_visitor); 394 obj->IterateBody(map->instance_type(), size, &marking_visitor);
362 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); 395 MarkBit obj_mark_bit = marking->MarkBitFrom(obj);
363 MarkBlack(obj_mark_bit); 396 MarkBlack(obj_mark_bit);
364 } 397 }
365 count++; 398 count++;
366 } 399 }
367 allocated = 0; 400 allocated_ = 0;
368 if (marking_stack_.is_empty()) MarkingComplete(); 401 if (marking_stack_.is_empty()) MarkingComplete();
369 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { 402 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
370 double end = OS::TimeCurrentMillis(); 403 double end = OS::TimeCurrentMillis();
371 steps_took_ += (end - start); 404 steps_took_ += (end - start);
372 } 405 }
373 406
374 steps_count_++; 407 steps_count_++;
375 408
376 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { 409 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
377 allocation_marking_factor_ += kAllocationMarkingFactorSpeedup; 410 allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
378 } 411 }
379 } 412 }
380 } 413 }
381 } 414 }
382 415
383 416
384 } } // namespace v8::internal 417 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698