OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/compilation-cache.h" | 9 #include "src/compilation-cache.h" |
10 #include "src/cpu-profiler.h" | 10 #include "src/cpu-profiler.h" |
11 #include "src/deoptimizer.h" | 11 #include "src/deoptimizer.h" |
12 #include "src/execution.h" | 12 #include "src/execution.h" |
13 #include "src/gdb-jit.h" | 13 #include "src/gdb-jit.h" |
14 #include "src/global-handles.h" | 14 #include "src/global-handles.h" |
| 15 #include "src/heap/incremental-marking.h" |
| 16 #include "src/heap/mark-compact.h" |
| 17 #include "src/heap/spaces-inl.h" |
| 18 #include "src/heap/sweeper-thread.h" |
15 #include "src/heap-profiler.h" | 19 #include "src/heap-profiler.h" |
16 #include "src/ic-inl.h" | 20 #include "src/ic-inl.h" |
17 #include "src/incremental-marking.h" | |
18 #include "src/mark-compact.h" | |
19 #include "src/objects-visiting.h" | 21 #include "src/objects-visiting.h" |
20 #include "src/objects-visiting-inl.h" | 22 #include "src/objects-visiting-inl.h" |
21 #include "src/spaces-inl.h" | |
22 #include "src/stub-cache.h" | 23 #include "src/stub-cache.h" |
23 #include "src/sweeper-thread.h" | |
24 | 24 |
25 namespace v8 { | 25 namespace v8 { |
26 namespace internal { | 26 namespace internal { |
27 | 27 |
28 | 28 |
29 const char* Marking::kWhiteBitPattern = "00"; | 29 const char* Marking::kWhiteBitPattern = "00"; |
30 const char* Marking::kBlackBitPattern = "10"; | 30 const char* Marking::kBlackBitPattern = "10"; |
31 const char* Marking::kGreyBitPattern = "11"; | 31 const char* Marking::kGreyBitPattern = "11"; |
32 const char* Marking::kImpossibleBitPattern = "01"; | 32 const char* Marking::kImpossibleBitPattern = "01"; |
33 | 33 |
34 | 34 |
35 // ------------------------------------------------------------------------- | 35 // ------------------------------------------------------------------------- |
36 // MarkCompactCollector | 36 // MarkCompactCollector |
37 | 37 |
38 MarkCompactCollector::MarkCompactCollector(Heap* heap) : // NOLINT | 38 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| 39 : // NOLINT |
39 #ifdef DEBUG | 40 #ifdef DEBUG |
40 state_(IDLE), | 41 state_(IDLE), |
41 #endif | 42 #endif |
42 sweep_precisely_(false), | 43 sweep_precisely_(false), |
43 reduce_memory_footprint_(false), | 44 reduce_memory_footprint_(false), |
44 abort_incremental_marking_(false), | 45 abort_incremental_marking_(false), |
45 marking_parity_(ODD_MARKING_PARITY), | 46 marking_parity_(ODD_MARKING_PARITY), |
46 compacting_(false), | 47 compacting_(false), |
47 was_marked_incrementally_(false), | 48 was_marked_incrementally_(false), |
48 sweeping_in_progress_(false), | 49 sweeping_in_progress_(false), |
49 pending_sweeper_jobs_semaphore_(0), | 50 pending_sweeper_jobs_semaphore_(0), |
50 sequential_sweeping_(false), | 51 sequential_sweeping_(false), |
51 migration_slots_buffer_(NULL), | 52 migration_slots_buffer_(NULL), |
52 heap_(heap), | 53 heap_(heap), |
53 code_flusher_(NULL), | 54 code_flusher_(NULL), |
54 have_code_to_deoptimize_(false) { } | 55 have_code_to_deoptimize_(false) { |
| 56 } |
55 | 57 |
56 #ifdef VERIFY_HEAP | 58 #ifdef VERIFY_HEAP |
57 class VerifyMarkingVisitor: public ObjectVisitor { | 59 class VerifyMarkingVisitor : public ObjectVisitor { |
58 public: | 60 public: |
59 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} | 61 explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {} |
60 | 62 |
61 void VisitPointers(Object** start, Object** end) { | 63 void VisitPointers(Object** start, Object** end) { |
62 for (Object** current = start; current < end; current++) { | 64 for (Object** current = start; current < end; current++) { |
63 if ((*current)->IsHeapObject()) { | 65 if ((*current)->IsHeapObject()) { |
64 HeapObject* object = HeapObject::cast(*current); | 66 HeapObject* object = HeapObject::cast(*current); |
65 CHECK(heap_->mark_compact_collector()->IsMarked(object)); | 67 CHECK(heap_->mark_compact_collector()->IsMarked(object)); |
66 } | 68 } |
67 } | 69 } |
(...skipping 18 matching lines...) Expand all Loading... |
86 private: | 88 private: |
87 Heap* heap_; | 89 Heap* heap_; |
88 }; | 90 }; |
89 | 91 |
90 | 92 |
91 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 93 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
92 VerifyMarkingVisitor visitor(heap); | 94 VerifyMarkingVisitor visitor(heap); |
93 HeapObject* object; | 95 HeapObject* object; |
94 Address next_object_must_be_here_or_later = bottom; | 96 Address next_object_must_be_here_or_later = bottom; |
95 | 97 |
96 for (Address current = bottom; | 98 for (Address current = bottom; current < top; current += kPointerSize) { |
97 current < top; | |
98 current += kPointerSize) { | |
99 object = HeapObject::FromAddress(current); | 99 object = HeapObject::FromAddress(current); |
100 if (MarkCompactCollector::IsMarked(object)) { | 100 if (MarkCompactCollector::IsMarked(object)) { |
101 CHECK(current >= next_object_must_be_here_or_later); | 101 CHECK(current >= next_object_must_be_here_or_later); |
102 object->Iterate(&visitor); | 102 object->Iterate(&visitor); |
103 next_object_must_be_here_or_later = current + object->Size(); | 103 next_object_must_be_here_or_later = current + object->Size(); |
104 } | 104 } |
105 } | 105 } |
106 } | 106 } |
107 | 107 |
108 | 108 |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
147 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 147 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
148 if (MarkCompactCollector::IsMarked(obj)) { | 148 if (MarkCompactCollector::IsMarked(obj)) { |
149 obj->Iterate(&visitor); | 149 obj->Iterate(&visitor); |
150 } | 150 } |
151 } | 151 } |
152 | 152 |
153 heap->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); | 153 heap->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); |
154 } | 154 } |
155 | 155 |
156 | 156 |
157 class VerifyEvacuationVisitor: public ObjectVisitor { | 157 class VerifyEvacuationVisitor : public ObjectVisitor { |
158 public: | 158 public: |
159 void VisitPointers(Object** start, Object** end) { | 159 void VisitPointers(Object** start, Object** end) { |
160 for (Object** current = start; current < end; current++) { | 160 for (Object** current = start; current < end; current++) { |
161 if ((*current)->IsHeapObject()) { | 161 if ((*current)->IsHeapObject()) { |
162 HeapObject* object = HeapObject::cast(*current); | 162 HeapObject* object = HeapObject::cast(*current); |
163 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(object)); | 163 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(object)); |
164 } | 164 } |
165 } | 165 } |
166 } | 166 } |
167 }; | 167 }; |
168 | 168 |
169 | 169 |
170 static void VerifyEvacuation(Page* page) { | 170 static void VerifyEvacuation(Page* page) { |
171 VerifyEvacuationVisitor visitor; | 171 VerifyEvacuationVisitor visitor; |
172 HeapObjectIterator iterator(page, NULL); | 172 HeapObjectIterator iterator(page, NULL); |
173 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; | 173 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
174 heap_object = iterator.Next()) { | 174 heap_object = iterator.Next()) { |
175 // We skip free space objects. | 175 // We skip free space objects. |
176 if (!heap_object->IsFiller()) { | 176 if (!heap_object->IsFiller()) { |
177 heap_object->Iterate(&visitor); | 177 heap_object->Iterate(&visitor); |
178 } | 178 } |
179 } | 179 } |
180 } | 180 } |
181 | 181 |
182 | 182 |
183 static void VerifyEvacuation(NewSpace* space) { | 183 static void VerifyEvacuation(NewSpace* space) { |
184 NewSpacePageIterator it(space->bottom(), space->top()); | 184 NewSpacePageIterator it(space->bottom(), space->top()); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
223 VerifyEvacuation(heap, heap->map_space()); | 223 VerifyEvacuation(heap, heap->map_space()); |
224 VerifyEvacuation(heap->new_space()); | 224 VerifyEvacuation(heap->new_space()); |
225 | 225 |
226 VerifyEvacuationVisitor visitor; | 226 VerifyEvacuationVisitor visitor; |
227 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 227 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
228 } | 228 } |
229 #endif // VERIFY_HEAP | 229 #endif // VERIFY_HEAP |
230 | 230 |
231 | 231 |
232 #ifdef DEBUG | 232 #ifdef DEBUG |
233 class VerifyNativeContextSeparationVisitor: public ObjectVisitor { | 233 class VerifyNativeContextSeparationVisitor : public ObjectVisitor { |
234 public: | 234 public: |
235 VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {} | 235 VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {} |
236 | 236 |
237 void VisitPointers(Object** start, Object** end) { | 237 void VisitPointers(Object** start, Object** end) { |
238 for (Object** current = start; current < end; current++) { | 238 for (Object** current = start; current < end; current++) { |
239 if ((*current)->IsHeapObject()) { | 239 if ((*current)->IsHeapObject()) { |
240 HeapObject* object = HeapObject::cast(*current); | 240 HeapObject* object = HeapObject::cast(*current); |
241 if (object->IsString()) continue; | 241 if (object->IsString()) continue; |
242 switch (object->map()->instance_type()) { | 242 switch (object->map()->instance_type()) { |
243 case JS_FUNCTION_TYPE: | 243 case JS_FUNCTION_TYPE: |
(...skipping 18 matching lines...) Expand all Loading... |
262 break; | 262 break; |
263 case FIXED_ARRAY_TYPE: | 263 case FIXED_ARRAY_TYPE: |
264 if (object->IsContext()) { | 264 if (object->IsContext()) { |
265 CheckContext(object); | 265 CheckContext(object); |
266 } else { | 266 } else { |
267 FixedArray* array = FixedArray::cast(object); | 267 FixedArray* array = FixedArray::cast(object); |
268 int length = array->length(); | 268 int length = array->length(); |
269 // Set array length to zero to prevent cycles while iterating | 269 // Set array length to zero to prevent cycles while iterating |
270 // over array bodies, this is easier than intrusive marking. | 270 // over array bodies, this is easier than intrusive marking. |
271 array->set_length(0); | 271 array->set_length(0); |
272 array->IterateBody( | 272 array->IterateBody(FIXED_ARRAY_TYPE, FixedArray::SizeFor(length), |
273 FIXED_ARRAY_TYPE, FixedArray::SizeFor(length), this); | 273 this); |
274 array->set_length(length); | 274 array->set_length(length); |
275 } | 275 } |
276 break; | 276 break; |
277 case CELL_TYPE: | 277 case CELL_TYPE: |
278 case JS_PROXY_TYPE: | 278 case JS_PROXY_TYPE: |
279 case JS_VALUE_TYPE: | 279 case JS_VALUE_TYPE: |
280 case TYPE_FEEDBACK_INFO_TYPE: | 280 case TYPE_FEEDBACK_INFO_TYPE: |
281 object->Iterate(this); | 281 object->Iterate(this); |
282 break; | 282 break; |
283 case DECLARED_ACCESSOR_INFO_TYPE: | 283 case DECLARED_ACCESSOR_INFO_TYPE: |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
325 } | 325 } |
326 #endif | 326 #endif |
327 | 327 |
328 | 328 |
329 void MarkCompactCollector::SetUp() { | 329 void MarkCompactCollector::SetUp() { |
330 free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space())); | 330 free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space())); |
331 free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space())); | 331 free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space())); |
332 } | 332 } |
333 | 333 |
334 | 334 |
335 void MarkCompactCollector::TearDown() { | 335 void MarkCompactCollector::TearDown() { AbortCompaction(); } |
336 AbortCompaction(); | |
337 } | |
338 | 336 |
339 | 337 |
340 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 338 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { |
341 p->MarkEvacuationCandidate(); | 339 p->MarkEvacuationCandidate(); |
342 evacuation_candidates_.Add(p); | 340 evacuation_candidates_.Add(p); |
343 } | 341 } |
344 | 342 |
345 | 343 |
346 static void TraceFragmentation(PagedSpace* space) { | 344 static void TraceFragmentation(PagedSpace* space) { |
347 int number_of_pages = space->CountTotalPages(); | 345 int number_of_pages = space->CountTotalPages(); |
348 intptr_t reserved = (number_of_pages * space->AreaSize()); | 346 intptr_t reserved = (number_of_pages * space->AreaSize()); |
349 intptr_t free = reserved - space->SizeOfObjects(); | 347 intptr_t free = reserved - space->SizeOfObjects(); |
350 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", | 348 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", |
351 AllocationSpaceName(space->identity()), | 349 AllocationSpaceName(space->identity()), number_of_pages, |
352 number_of_pages, | 350 static_cast<int>(free), static_cast<double>(free) * 100 / reserved); |
353 static_cast<int>(free), | |
354 static_cast<double>(free) * 100 / reserved); | |
355 } | 351 } |
356 | 352 |
357 | 353 |
358 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { | 354 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { |
359 if (!compacting_) { | 355 if (!compacting_) { |
360 DCHECK(evacuation_candidates_.length() == 0); | 356 DCHECK(evacuation_candidates_.length() == 0); |
361 | 357 |
362 #ifdef ENABLE_GDB_JIT_INTERFACE | 358 #ifdef ENABLE_GDB_JIT_INTERFACE |
363 // If GDBJIT interface is active disable compaction. | 359 // If GDBJIT interface is active disable compaction. |
364 if (FLAG_gdbjit) return false; | 360 if (FLAG_gdbjit) return false; |
365 #endif | 361 #endif |
366 | 362 |
367 CollectEvacuationCandidates(heap()->old_pointer_space()); | 363 CollectEvacuationCandidates(heap()->old_pointer_space()); |
368 CollectEvacuationCandidates(heap()->old_data_space()); | 364 CollectEvacuationCandidates(heap()->old_data_space()); |
369 | 365 |
370 if (FLAG_compact_code_space && | 366 if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION || |
371 (mode == NON_INCREMENTAL_COMPACTION || | 367 FLAG_incremental_code_compaction)) { |
372 FLAG_incremental_code_compaction)) { | |
373 CollectEvacuationCandidates(heap()->code_space()); | 368 CollectEvacuationCandidates(heap()->code_space()); |
374 } else if (FLAG_trace_fragmentation) { | 369 } else if (FLAG_trace_fragmentation) { |
375 TraceFragmentation(heap()->code_space()); | 370 TraceFragmentation(heap()->code_space()); |
376 } | 371 } |
377 | 372 |
378 if (FLAG_trace_fragmentation) { | 373 if (FLAG_trace_fragmentation) { |
379 TraceFragmentation(heap()->map_space()); | 374 TraceFragmentation(heap()->map_space()); |
380 TraceFragmentation(heap()->cell_space()); | 375 TraceFragmentation(heap()->cell_space()); |
381 TraceFragmentation(heap()->property_cell_space()); | 376 TraceFragmentation(heap()->property_cell_space()); |
382 } | 377 } |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
474 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 469 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
475 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 470 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
476 CHECK(Marking::IsWhite(mark_bit)); | 471 CHECK(Marking::IsWhite(mark_bit)); |
477 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 472 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
478 } | 473 } |
479 } | 474 } |
480 | 475 |
481 | 476 |
482 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 477 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
483 HeapObjectIterator code_iterator(heap()->code_space()); | 478 HeapObjectIterator code_iterator(heap()->code_space()); |
484 for (HeapObject* obj = code_iterator.Next(); | 479 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
485 obj != NULL; | |
486 obj = code_iterator.Next()) { | 480 obj = code_iterator.Next()) { |
487 Code* code = Code::cast(obj); | 481 Code* code = Code::cast(obj); |
488 if (!code->is_optimized_code() && !code->is_weak_stub()) continue; | 482 if (!code->is_optimized_code() && !code->is_weak_stub()) continue; |
489 if (WillBeDeoptimized(code)) continue; | 483 if (WillBeDeoptimized(code)) continue; |
490 code->VerifyEmbeddedObjectsDependency(); | 484 code->VerifyEmbeddedObjectsDependency(); |
491 } | 485 } |
492 } | 486 } |
493 | 487 |
494 | 488 |
495 void MarkCompactCollector::VerifyOmittedMapChecks() { | 489 void MarkCompactCollector::VerifyOmittedMapChecks() { |
496 HeapObjectIterator iterator(heap()->map_space()); | 490 HeapObjectIterator iterator(heap()->map_space()); |
497 for (HeapObject* obj = iterator.Next(); | 491 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { |
498 obj != NULL; | |
499 obj = iterator.Next()) { | |
500 Map* map = Map::cast(obj); | 492 Map* map = Map::cast(obj); |
501 map->VerifyOmittedMapChecks(); | 493 map->VerifyOmittedMapChecks(); |
502 } | 494 } |
503 } | 495 } |
504 #endif // VERIFY_HEAP | 496 #endif // VERIFY_HEAP |
505 | 497 |
506 | 498 |
507 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { | 499 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { |
508 PageIterator it(space); | 500 PageIterator it(space); |
509 | 501 |
(...skipping 27 matching lines...) Expand all Loading... |
537 mark_bit.Clear(); | 529 mark_bit.Clear(); |
538 mark_bit.Next().Clear(); | 530 mark_bit.Next().Clear(); |
539 Page::FromAddress(obj->address())->ResetProgressBar(); | 531 Page::FromAddress(obj->address())->ResetProgressBar(); |
540 Page::FromAddress(obj->address())->ResetLiveBytes(); | 532 Page::FromAddress(obj->address())->ResetLiveBytes(); |
541 } | 533 } |
542 } | 534 } |
543 | 535 |
544 | 536 |
545 class MarkCompactCollector::SweeperTask : public v8::Task { | 537 class MarkCompactCollector::SweeperTask : public v8::Task { |
546 public: | 538 public: |
547 SweeperTask(Heap* heap, PagedSpace* space) | 539 SweeperTask(Heap* heap, PagedSpace* space) : heap_(heap), space_(space) {} |
548 : heap_(heap), space_(space) {} | |
549 | 540 |
550 virtual ~SweeperTask() {} | 541 virtual ~SweeperTask() {} |
551 | 542 |
552 private: | 543 private: |
553 // v8::Task overrides. | 544 // v8::Task overrides. |
554 virtual void Run() V8_OVERRIDE { | 545 virtual void Run() V8_OVERRIDE { |
555 heap_->mark_compact_collector()->SweepInParallel(space_, 0); | 546 heap_->mark_compact_collector()->SweepInParallel(space_, 0); |
556 heap_->mark_compact_collector()->pending_sweeper_jobs_semaphore_.Signal(); | 547 heap_->mark_compact_collector()->pending_sweeper_jobs_semaphore_.Signal(); |
557 } | 548 } |
558 | 549 |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
693 | 684 |
694 #ifdef DEBUG | 685 #ifdef DEBUG |
695 ObjectColor new_color = Color(new_mark_bit); | 686 ObjectColor new_color = Color(new_mark_bit); |
696 DCHECK(new_color == old_color); | 687 DCHECK(new_color == old_color); |
697 #endif | 688 #endif |
698 } | 689 } |
699 | 690 |
700 | 691 |
701 const char* AllocationSpaceName(AllocationSpace space) { | 692 const char* AllocationSpaceName(AllocationSpace space) { |
702 switch (space) { | 693 switch (space) { |
703 case NEW_SPACE: return "NEW_SPACE"; | 694 case NEW_SPACE: |
704 case OLD_POINTER_SPACE: return "OLD_POINTER_SPACE"; | 695 return "NEW_SPACE"; |
705 case OLD_DATA_SPACE: return "OLD_DATA_SPACE"; | 696 case OLD_POINTER_SPACE: |
706 case CODE_SPACE: return "CODE_SPACE"; | 697 return "OLD_POINTER_SPACE"; |
707 case MAP_SPACE: return "MAP_SPACE"; | 698 case OLD_DATA_SPACE: |
708 case CELL_SPACE: return "CELL_SPACE"; | 699 return "OLD_DATA_SPACE"; |
| 700 case CODE_SPACE: |
| 701 return "CODE_SPACE"; |
| 702 case MAP_SPACE: |
| 703 return "MAP_SPACE"; |
| 704 case CELL_SPACE: |
| 705 return "CELL_SPACE"; |
709 case PROPERTY_CELL_SPACE: | 706 case PROPERTY_CELL_SPACE: |
710 return "PROPERTY_CELL_SPACE"; | 707 return "PROPERTY_CELL_SPACE"; |
711 case LO_SPACE: return "LO_SPACE"; | 708 case LO_SPACE: |
| 709 return "LO_SPACE"; |
712 default: | 710 default: |
713 UNREACHABLE(); | 711 UNREACHABLE(); |
714 } | 712 } |
715 | 713 |
716 return NULL; | 714 return NULL; |
717 } | 715 } |
718 | 716 |
719 | 717 |
720 // Returns zero for pages that have so little fragmentation that it is not | 718 // Returns zero for pages that have so little fragmentation that it is not |
721 // worth defragmenting them. Otherwise a positive integer that gives an | 719 // worth defragmenting them. Otherwise a positive integer that gives an |
722 // estimate of fragmentation on an arbitrary scale. | 720 // estimate of fragmentation on an arbitrary scale. |
723 static int FreeListFragmentation(PagedSpace* space, Page* p) { | 721 static int FreeListFragmentation(PagedSpace* space, Page* p) { |
724 // If page was not swept then there are no free list items on it. | 722 // If page was not swept then there are no free list items on it. |
725 if (!p->WasSwept()) { | 723 if (!p->WasSwept()) { |
726 if (FLAG_trace_fragmentation) { | 724 if (FLAG_trace_fragmentation) { |
727 PrintF("%p [%s]: %d bytes live (unswept)\n", | 725 PrintF("%p [%s]: %d bytes live (unswept)\n", reinterpret_cast<void*>(p), |
728 reinterpret_cast<void*>(p), | 726 AllocationSpaceName(space->identity()), p->LiveBytes()); |
729 AllocationSpaceName(space->identity()), | |
730 p->LiveBytes()); | |
731 } | 727 } |
732 return 0; | 728 return 0; |
733 } | 729 } |
734 | 730 |
735 PagedSpace::SizeStats sizes; | 731 PagedSpace::SizeStats sizes; |
736 space->ObtainFreeListStatistics(p, &sizes); | 732 space->ObtainFreeListStatistics(p, &sizes); |
737 | 733 |
738 intptr_t ratio; | 734 intptr_t ratio; |
739 intptr_t ratio_threshold; | 735 intptr_t ratio_threshold; |
740 intptr_t area_size = space->AreaSize(); | 736 intptr_t area_size = space->AreaSize(); |
741 if (space->identity() == CODE_SPACE) { | 737 if (space->identity() == CODE_SPACE) { |
742 ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 / | 738 ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 / area_size; |
743 area_size; | |
744 ratio_threshold = 10; | 739 ratio_threshold = 10; |
745 } else { | 740 } else { |
746 ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 / | 741 ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 / area_size; |
747 area_size; | |
748 ratio_threshold = 15; | 742 ratio_threshold = 15; |
749 } | 743 } |
750 | 744 |
751 if (FLAG_trace_fragmentation) { | 745 if (FLAG_trace_fragmentation) { |
752 PrintF("%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n", | 746 PrintF("%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n", |
753 reinterpret_cast<void*>(p), | 747 reinterpret_cast<void*>(p), AllocationSpaceName(space->identity()), |
754 AllocationSpaceName(space->identity()), | |
755 static_cast<int>(sizes.small_size_), | 748 static_cast<int>(sizes.small_size_), |
756 static_cast<double>(sizes.small_size_ * 100) / | 749 static_cast<double>(sizes.small_size_ * 100) / area_size, |
757 area_size, | |
758 static_cast<int>(sizes.medium_size_), | 750 static_cast<int>(sizes.medium_size_), |
759 static_cast<double>(sizes.medium_size_ * 100) / | 751 static_cast<double>(sizes.medium_size_ * 100) / area_size, |
760 area_size, | |
761 static_cast<int>(sizes.large_size_), | 752 static_cast<int>(sizes.large_size_), |
762 static_cast<double>(sizes.large_size_ * 100) / | 753 static_cast<double>(sizes.large_size_ * 100) / area_size, |
763 area_size, | |
764 static_cast<int>(sizes.huge_size_), | 754 static_cast<int>(sizes.huge_size_), |
765 static_cast<double>(sizes.huge_size_ * 100) / | 755 static_cast<double>(sizes.huge_size_ * 100) / area_size, |
766 area_size, | |
767 (ratio > ratio_threshold) ? "[fragmented]" : ""); | 756 (ratio > ratio_threshold) ? "[fragmented]" : ""); |
768 } | 757 } |
769 | 758 |
770 if (FLAG_always_compact && sizes.Total() != area_size) { | 759 if (FLAG_always_compact && sizes.Total() != area_size) { |
771 return 1; | 760 return 1; |
772 } | 761 } |
773 | 762 |
774 if (ratio <= ratio_threshold) return 0; // Not fragmented. | 763 if (ratio <= ratio_threshold) return 0; // Not fragmented. |
775 | 764 |
776 return static_cast<int>(ratio - ratio_threshold); | 765 return static_cast<int>(ratio - ratio_threshold); |
777 } | 766 } |
778 | 767 |
779 | 768 |
780 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { | 769 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { |
781 DCHECK(space->identity() == OLD_POINTER_SPACE || | 770 DCHECK(space->identity() == OLD_POINTER_SPACE || |
782 space->identity() == OLD_DATA_SPACE || | 771 space->identity() == OLD_DATA_SPACE || |
783 space->identity() == CODE_SPACE); | 772 space->identity() == CODE_SPACE); |
784 | 773 |
785 static const int kMaxMaxEvacuationCandidates = 1000; | 774 static const int kMaxMaxEvacuationCandidates = 1000; |
786 int number_of_pages = space->CountTotalPages(); | 775 int number_of_pages = space->CountTotalPages(); |
787 int max_evacuation_candidates = | 776 int max_evacuation_candidates = |
788 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); | 777 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); |
789 | 778 |
790 if (FLAG_stress_compaction || FLAG_always_compact) { | 779 if (FLAG_stress_compaction || FLAG_always_compact) { |
791 max_evacuation_candidates = kMaxMaxEvacuationCandidates; | 780 max_evacuation_candidates = kMaxMaxEvacuationCandidates; |
792 } | 781 } |
793 | 782 |
794 class Candidate { | 783 class Candidate { |
795 public: | 784 public: |
796 Candidate() : fragmentation_(0), page_(NULL) { } | 785 Candidate() : fragmentation_(0), page_(NULL) {} |
797 Candidate(int f, Page* p) : fragmentation_(f), page_(p) { } | 786 Candidate(int f, Page* p) : fragmentation_(f), page_(p) {} |
798 | 787 |
799 int fragmentation() { return fragmentation_; } | 788 int fragmentation() { return fragmentation_; } |
800 Page* page() { return page_; } | 789 Page* page() { return page_; } |
801 | 790 |
802 private: | 791 private: |
803 int fragmentation_; | 792 int fragmentation_; |
804 Page* page_; | 793 Page* page_; |
805 }; | 794 }; |
806 | 795 |
807 enum CompactionMode { | 796 enum CompactionMode { COMPACT_FREE_LISTS, REDUCE_MEMORY_FOOTPRINT }; |
808 COMPACT_FREE_LISTS, | |
809 REDUCE_MEMORY_FOOTPRINT | |
810 }; | |
811 | 797 |
812 CompactionMode mode = COMPACT_FREE_LISTS; | 798 CompactionMode mode = COMPACT_FREE_LISTS; |
813 | 799 |
814 intptr_t reserved = number_of_pages * space->AreaSize(); | 800 intptr_t reserved = number_of_pages * space->AreaSize(); |
815 intptr_t over_reserved = reserved - space->SizeOfObjects(); | 801 intptr_t over_reserved = reserved - space->SizeOfObjects(); |
816 static const intptr_t kFreenessThreshold = 50; | 802 static const intptr_t kFreenessThreshold = 50; |
817 | 803 |
818 if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) { | 804 if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) { |
819 // If reduction of memory footprint was requested, we are aggressive | 805 // If reduction of memory footprint was requested, we are aggressive |
820 // about choosing pages to free. We expect that half-empty pages | 806 // about choosing pages to free. We expect that half-empty pages |
821 // are easier to compact so slightly bump the limit. | 807 // are easier to compact so slightly bump the limit. |
822 mode = REDUCE_MEMORY_FOOTPRINT; | 808 mode = REDUCE_MEMORY_FOOTPRINT; |
823 max_evacuation_candidates += 2; | 809 max_evacuation_candidates += 2; |
824 } | 810 } |
825 | 811 |
826 | 812 |
827 if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) { | 813 if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) { |
828 // If over-usage is very high (more than a third of the space), we | 814 // If over-usage is very high (more than a third of the space), we |
829 // try to free all mostly empty pages. We expect that almost empty | 815 // try to free all mostly empty pages. We expect that almost empty |
830 // pages are even easier to compact so bump the limit even more. | 816 // pages are even easier to compact so bump the limit even more. |
831 mode = REDUCE_MEMORY_FOOTPRINT; | 817 mode = REDUCE_MEMORY_FOOTPRINT; |
832 max_evacuation_candidates *= 2; | 818 max_evacuation_candidates *= 2; |
833 } | 819 } |
834 | 820 |
835 if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) { | 821 if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) { |
836 PrintF("Estimated over reserved memory: %.1f / %.1f MB (threshold %d), " | 822 PrintF( |
837 "evacuation candidate limit: %d\n", | 823 "Estimated over reserved memory: %.1f / %.1f MB (threshold %d), " |
838 static_cast<double>(over_reserved) / MB, | 824 "evacuation candidate limit: %d\n", |
839 static_cast<double>(reserved) / MB, | 825 static_cast<double>(over_reserved) / MB, |
840 static_cast<int>(kFreenessThreshold), | 826 static_cast<double>(reserved) / MB, |
841 max_evacuation_candidates); | 827 static_cast<int>(kFreenessThreshold), max_evacuation_candidates); |
842 } | 828 } |
843 | 829 |
844 intptr_t estimated_release = 0; | 830 intptr_t estimated_release = 0; |
845 | 831 |
846 Candidate candidates[kMaxMaxEvacuationCandidates]; | 832 Candidate candidates[kMaxMaxEvacuationCandidates]; |
847 | 833 |
848 max_evacuation_candidates = | 834 max_evacuation_candidates = |
849 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); | 835 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates); |
850 | 836 |
851 int count = 0; | 837 int count = 0; |
(...skipping 30 matching lines...) Expand all Loading... |
882 int free_pct = static_cast<int>(free_bytes * 100) / p->area_size(); | 868 int free_pct = static_cast<int>(free_bytes * 100) / p->area_size(); |
883 | 869 |
884 if (free_pct >= kFreenessThreshold) { | 870 if (free_pct >= kFreenessThreshold) { |
885 estimated_release += free_bytes; | 871 estimated_release += free_bytes; |
886 fragmentation = free_pct; | 872 fragmentation = free_pct; |
887 } else { | 873 } else { |
888 fragmentation = 0; | 874 fragmentation = 0; |
889 } | 875 } |
890 | 876 |
891 if (FLAG_trace_fragmentation) { | 877 if (FLAG_trace_fragmentation) { |
892 PrintF("%p [%s]: %d (%.2f%%) free %s\n", | 878 PrintF("%p [%s]: %d (%.2f%%) free %s\n", reinterpret_cast<void*>(p), |
893 reinterpret_cast<void*>(p), | |
894 AllocationSpaceName(space->identity()), | 879 AllocationSpaceName(space->identity()), |
895 static_cast<int>(free_bytes), | 880 static_cast<int>(free_bytes), |
896 static_cast<double>(free_bytes * 100) / p->area_size(), | 881 static_cast<double>(free_bytes * 100) / p->area_size(), |
897 (fragmentation > 0) ? "[fragmented]" : ""); | 882 (fragmentation > 0) ? "[fragmented]" : ""); |
898 } | 883 } |
899 } else { | 884 } else { |
900 fragmentation = FreeListFragmentation(space, p); | 885 fragmentation = FreeListFragmentation(space, p); |
901 } | 886 } |
902 | 887 |
903 if (fragmentation != 0) { | 888 if (fragmentation != 0) { |
(...skipping 14 matching lines...) Expand all Loading... |
918 } | 903 } |
919 } | 904 } |
920 } | 905 } |
921 } | 906 } |
922 | 907 |
923 for (int i = 0; i < count; i++) { | 908 for (int i = 0; i < count; i++) { |
924 AddEvacuationCandidate(candidates[i].page()); | 909 AddEvacuationCandidate(candidates[i].page()); |
925 } | 910 } |
926 | 911 |
927 if (count > 0 && FLAG_trace_fragmentation) { | 912 if (count > 0 && FLAG_trace_fragmentation) { |
928 PrintF("Collected %d evacuation candidates for space %s\n", | 913 PrintF("Collected %d evacuation candidates for space %s\n", count, |
929 count, | |
930 AllocationSpaceName(space->identity())); | 914 AllocationSpaceName(space->identity())); |
931 } | 915 } |
932 } | 916 } |
933 | 917 |
934 | 918 |
935 void MarkCompactCollector::AbortCompaction() { | 919 void MarkCompactCollector::AbortCompaction() { |
936 if (compacting_) { | 920 if (compacting_) { |
937 int npages = evacuation_candidates_.length(); | 921 int npages = evacuation_candidates_.length(); |
938 for (int i = 0; i < npages; i++) { | 922 for (int i = 0; i < npages; i++) { |
939 Page* p = evacuation_candidates_[i]; | 923 Page* p = evacuation_candidates_[i]; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
972 was_marked_incrementally_ = false; | 956 was_marked_incrementally_ = false; |
973 } | 957 } |
974 | 958 |
975 // Don't start compaction if we are in the middle of incremental | 959 // Don't start compaction if we are in the middle of incremental |
976 // marking cycle. We did not collect any slots. | 960 // marking cycle. We did not collect any slots. |
977 if (!FLAG_never_compact && !was_marked_incrementally_) { | 961 if (!FLAG_never_compact && !was_marked_incrementally_) { |
978 StartCompaction(NON_INCREMENTAL_COMPACTION); | 962 StartCompaction(NON_INCREMENTAL_COMPACTION); |
979 } | 963 } |
980 | 964 |
981 PagedSpaces spaces(heap()); | 965 PagedSpaces spaces(heap()); |
982 for (PagedSpace* space = spaces.next(); | 966 for (PagedSpace* space = spaces.next(); space != NULL; |
983 space != NULL; | |
984 space = spaces.next()) { | 967 space = spaces.next()) { |
985 space->PrepareForMarkCompact(); | 968 space->PrepareForMarkCompact(); |
986 } | 969 } |
987 | 970 |
988 #ifdef VERIFY_HEAP | 971 #ifdef VERIFY_HEAP |
989 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 972 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
990 VerifyMarkbitsAreClean(); | 973 VerifyMarkbitsAreClean(); |
991 } | 974 } |
992 #endif | 975 #endif |
993 } | 976 } |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1064 shared->set_code(lazy_compile); | 1047 shared->set_code(lazy_compile); |
1065 candidate->set_code(lazy_compile); | 1048 candidate->set_code(lazy_compile); |
1066 } else { | 1049 } else { |
1067 candidate->set_code(code); | 1050 candidate->set_code(code); |
1068 } | 1051 } |
1069 | 1052 |
1070 // We are in the middle of a GC cycle so the write barrier in the code | 1053 // We are in the middle of a GC cycle so the write barrier in the code |
1071 // setter did not record the slot update and we have to do that manually. | 1054 // setter did not record the slot update and we have to do that manually. |
1072 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; | 1055 Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
1073 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); | 1056 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
1074 isolate_->heap()->mark_compact_collector()-> | 1057 isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(slot, |
1075 RecordCodeEntrySlot(slot, target); | 1058 target); |
1076 | 1059 |
1077 Object** shared_code_slot = | 1060 Object** shared_code_slot = |
1078 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); | 1061 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); |
1079 isolate_->heap()->mark_compact_collector()-> | 1062 isolate_->heap()->mark_compact_collector()->RecordSlot( |
1080 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot); | 1063 shared_code_slot, shared_code_slot, *shared_code_slot); |
1081 | 1064 |
1082 candidate = next_candidate; | 1065 candidate = next_candidate; |
1083 } | 1066 } |
1084 | 1067 |
1085 jsfunction_candidates_head_ = NULL; | 1068 jsfunction_candidates_head_ = NULL; |
1086 } | 1069 } |
1087 | 1070 |
1088 | 1071 |
1089 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { | 1072 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { |
1090 Code* lazy_compile = | 1073 Code* lazy_compile = |
(...skipping 11 matching lines...) Expand all Loading... |
1102 if (FLAG_trace_code_flushing && candidate->is_compiled()) { | 1085 if (FLAG_trace_code_flushing && candidate->is_compiled()) { |
1103 PrintF("[code-flushing clears: "); | 1086 PrintF("[code-flushing clears: "); |
1104 candidate->ShortPrint(); | 1087 candidate->ShortPrint(); |
1105 PrintF(" - age: %d]\n", code->GetAge()); | 1088 PrintF(" - age: %d]\n", code->GetAge()); |
1106 } | 1089 } |
1107 candidate->set_code(lazy_compile); | 1090 candidate->set_code(lazy_compile); |
1108 } | 1091 } |
1109 | 1092 |
1110 Object** code_slot = | 1093 Object** code_slot = |
1111 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); | 1094 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); |
1112 isolate_->heap()->mark_compact_collector()-> | 1095 isolate_->heap()->mark_compact_collector()->RecordSlot(code_slot, code_slot, |
1113 RecordSlot(code_slot, code_slot, *code_slot); | 1096 *code_slot); |
1114 | 1097 |
1115 candidate = next_candidate; | 1098 candidate = next_candidate; |
1116 } | 1099 } |
1117 | 1100 |
1118 shared_function_info_candidates_head_ = NULL; | 1101 shared_function_info_candidates_head_ = NULL; |
1119 } | 1102 } |
1120 | 1103 |
1121 | 1104 |
1122 void CodeFlusher::ProcessOptimizedCodeMaps() { | 1105 void CodeFlusher::ProcessOptimizedCodeMaps() { |
1123 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); | 1106 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); |
1124 | 1107 |
1125 SharedFunctionInfo* holder = optimized_code_map_holder_head_; | 1108 SharedFunctionInfo* holder = optimized_code_map_holder_head_; |
1126 SharedFunctionInfo* next_holder; | 1109 SharedFunctionInfo* next_holder; |
1127 | 1110 |
1128 while (holder != NULL) { | 1111 while (holder != NULL) { |
1129 next_holder = GetNextCodeMap(holder); | 1112 next_holder = GetNextCodeMap(holder); |
1130 ClearNextCodeMap(holder); | 1113 ClearNextCodeMap(holder); |
1131 | 1114 |
1132 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); | 1115 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); |
1133 int new_length = SharedFunctionInfo::kEntriesStart; | 1116 int new_length = SharedFunctionInfo::kEntriesStart; |
1134 int old_length = code_map->length(); | 1117 int old_length = code_map->length(); |
1135 for (int i = SharedFunctionInfo::kEntriesStart; | 1118 for (int i = SharedFunctionInfo::kEntriesStart; i < old_length; |
1136 i < old_length; | |
1137 i += SharedFunctionInfo::kEntryLength) { | 1119 i += SharedFunctionInfo::kEntryLength) { |
1138 Code* code = | 1120 Code* code = |
1139 Code::cast(code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); | 1121 Code::cast(code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); |
1140 if (!Marking::MarkBitFrom(code).Get()) continue; | 1122 if (!Marking::MarkBitFrom(code).Get()) continue; |
1141 | 1123 |
1142 // Move every slot in the entry. | 1124 // Move every slot in the entry. |
1143 for (int j = 0; j < SharedFunctionInfo::kEntryLength; j++) { | 1125 for (int j = 0; j < SharedFunctionInfo::kEntryLength; j++) { |
1144 int dst_index = new_length++; | 1126 int dst_index = new_length++; |
1145 Object** slot = code_map->RawFieldOfElementAt(dst_index); | 1127 Object** slot = code_map->RawFieldOfElementAt(dst_index); |
1146 Object* object = code_map->get(i + j); | 1128 Object* object = code_map->get(i + j); |
1147 code_map->set(dst_index, object); | 1129 code_map->set(dst_index, object); |
1148 if (j == SharedFunctionInfo::kOsrAstIdOffset) { | 1130 if (j == SharedFunctionInfo::kOsrAstIdOffset) { |
1149 DCHECK(object->IsSmi()); | 1131 DCHECK(object->IsSmi()); |
1150 } else { | 1132 } else { |
1151 DCHECK(Marking::IsBlack( | 1133 DCHECK( |
1152 Marking::MarkBitFrom(HeapObject::cast(*slot)))); | 1134 Marking::IsBlack(Marking::MarkBitFrom(HeapObject::cast(*slot)))); |
1153 isolate_->heap()->mark_compact_collector()-> | 1135 isolate_->heap()->mark_compact_collector()->RecordSlot(slot, slot, |
1154 RecordSlot(slot, slot, *slot); | 1136 *slot); |
1155 } | 1137 } |
1156 } | 1138 } |
1157 } | 1139 } |
1158 | 1140 |
1159 // Trim the optimized code map if entries have been removed. | 1141 // Trim the optimized code map if entries have been removed. |
1160 if (new_length < old_length) { | 1142 if (new_length < old_length) { |
1161 holder->TrimOptimizedCodeMap(old_length - new_length); | 1143 holder->TrimOptimizedCodeMap(old_length - new_length); |
1162 } | 1144 } |
1163 | 1145 |
1164 holder = next_holder; | 1146 holder = next_holder; |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1232 break; | 1214 break; |
1233 } | 1215 } |
1234 | 1216 |
1235 candidate = next_candidate; | 1217 candidate = next_candidate; |
1236 } | 1218 } |
1237 } | 1219 } |
1238 } | 1220 } |
1239 | 1221 |
1240 | 1222 |
1241 void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) { | 1223 void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) { |
1242 DCHECK(!FixedArray::cast(code_map_holder->optimized_code_map())-> | 1224 DCHECK(!FixedArray::cast(code_map_holder->optimized_code_map()) |
1243 get(SharedFunctionInfo::kNextMapIndex)->IsUndefined()); | 1225 ->get(SharedFunctionInfo::kNextMapIndex) |
| 1226 ->IsUndefined()); |
1244 | 1227 |
1245 // Make sure previous flushing decisions are revisited. | 1228 // Make sure previous flushing decisions are revisited. |
1246 isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder); | 1229 isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder); |
1247 | 1230 |
1248 if (FLAG_trace_code_flushing) { | 1231 if (FLAG_trace_code_flushing) { |
1249 PrintF("[code-flushing abandons code-map: "); | 1232 PrintF("[code-flushing abandons code-map: "); |
1250 code_map_holder->ShortPrint(); | 1233 code_map_holder->ShortPrint(); |
1251 PrintF("]\n"); | 1234 PrintF("]\n"); |
1252 } | 1235 } |
1253 | 1236 |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1365 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; | 1348 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; |
1366 | 1349 |
1367 *p = first; | 1350 *p = first; |
1368 return HeapObject::cast(first); | 1351 return HeapObject::cast(first); |
1369 } | 1352 } |
1370 | 1353 |
1371 | 1354 |
1372 class MarkCompactMarkingVisitor | 1355 class MarkCompactMarkingVisitor |
1373 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { | 1356 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { |
1374 public: | 1357 public: |
1375 static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id, | 1358 static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id, Map* map, |
1376 Map* map, HeapObject* obj); | 1359 HeapObject* obj); |
1377 | 1360 |
1378 static void ObjectStatsCountFixedArray( | 1361 static void ObjectStatsCountFixedArray( |
1379 FixedArrayBase* fixed_array, | 1362 FixedArrayBase* fixed_array, FixedArraySubInstanceType fast_type, |
1380 FixedArraySubInstanceType fast_type, | |
1381 FixedArraySubInstanceType dictionary_type); | 1363 FixedArraySubInstanceType dictionary_type); |
1382 | 1364 |
1383 template<MarkCompactMarkingVisitor::VisitorId id> | 1365 template <MarkCompactMarkingVisitor::VisitorId id> |
1384 class ObjectStatsTracker { | 1366 class ObjectStatsTracker { |
1385 public: | 1367 public: |
1386 static inline void Visit(Map* map, HeapObject* obj); | 1368 static inline void Visit(Map* map, HeapObject* obj); |
1387 }; | 1369 }; |
1388 | 1370 |
1389 static void Initialize(); | 1371 static void Initialize(); |
1390 | 1372 |
1391 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 1373 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
1392 MarkObjectByPointer(heap->mark_compact_collector(), p, p); | 1374 MarkObjectByPointer(heap->mark_compact_collector(), p, p); |
1393 } | 1375 } |
(...skipping 23 matching lines...) Expand all Loading... |
1417 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1399 MarkBit mark_bit = Marking::MarkBitFrom(object); |
1418 if (!mark_bit.Get()) { | 1400 if (!mark_bit.Get()) { |
1419 heap->mark_compact_collector()->SetMark(object, mark_bit); | 1401 heap->mark_compact_collector()->SetMark(object, mark_bit); |
1420 return true; | 1402 return true; |
1421 } | 1403 } |
1422 return false; | 1404 return false; |
1423 } | 1405 } |
1424 | 1406 |
1425 // Mark object pointed to by p. | 1407 // Mark object pointed to by p. |
1426 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1408 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
1427 Object** anchor_slot, | 1409 Object** anchor_slot, Object** p)) { |
1428 Object** p)) { | |
1429 if (!(*p)->IsHeapObject()) return; | 1410 if (!(*p)->IsHeapObject()) return; |
1430 HeapObject* object = ShortCircuitConsString(p); | 1411 HeapObject* object = ShortCircuitConsString(p); |
1431 collector->RecordSlot(anchor_slot, p, object); | 1412 collector->RecordSlot(anchor_slot, p, object); |
1432 MarkBit mark = Marking::MarkBitFrom(object); | 1413 MarkBit mark = Marking::MarkBitFrom(object); |
1433 collector->MarkObject(object, mark); | 1414 collector->MarkObject(object, mark); |
1434 } | 1415 } |
1435 | 1416 |
1436 | 1417 |
1437 // Visit an unmarked object. | 1418 // Visit an unmarked object. |
1438 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1419 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
1439 HeapObject* obj)) { | 1420 HeapObject* obj)) { |
1440 #ifdef DEBUG | 1421 #ifdef DEBUG |
1441 DCHECK(collector->heap()->Contains(obj)); | 1422 DCHECK(collector->heap()->Contains(obj)); |
1442 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); | 1423 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
1443 #endif | 1424 #endif |
1444 Map* map = obj->map(); | 1425 Map* map = obj->map(); |
1445 Heap* heap = obj->GetHeap(); | 1426 Heap* heap = obj->GetHeap(); |
1446 MarkBit mark = Marking::MarkBitFrom(obj); | 1427 MarkBit mark = Marking::MarkBitFrom(obj); |
1447 heap->mark_compact_collector()->SetMark(obj, mark); | 1428 heap->mark_compact_collector()->SetMark(obj, mark); |
1448 // Mark the map pointer and the body. | 1429 // Mark the map pointer and the body. |
1449 MarkBit map_mark = Marking::MarkBitFrom(map); | 1430 MarkBit map_mark = Marking::MarkBitFrom(map); |
1450 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1431 heap->mark_compact_collector()->MarkObject(map, map_mark); |
1451 IterateBody(map, obj); | 1432 IterateBody(map, obj); |
1452 } | 1433 } |
1453 | 1434 |
1454 // Visit all unmarked objects pointed to by [start, end). | 1435 // Visit all unmarked objects pointed to by [start, end). |
1455 // Returns false if the operation fails (lack of stack space). | 1436 // Returns false if the operation fails (lack of stack space). |
1456 INLINE(static bool VisitUnmarkedObjects(Heap* heap, | 1437 INLINE(static bool VisitUnmarkedObjects(Heap* heap, Object** start, |
1457 Object** start, | |
1458 Object** end)) { | 1438 Object** end)) { |
1459 // Return false is we are close to the stack limit. | 1439 // Return false is we are close to the stack limit. |
1460 StackLimitCheck check(heap->isolate()); | 1440 StackLimitCheck check(heap->isolate()); |
1461 if (check.HasOverflowed()) return false; | 1441 if (check.HasOverflowed()) return false; |
1462 | 1442 |
1463 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1443 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1464 // Visit the unmarked objects. | 1444 // Visit the unmarked objects. |
1465 for (Object** p = start; p < end; p++) { | 1445 for (Object** p = start; p < end; p++) { |
1466 Object* o = *p; | 1446 Object* o = *p; |
1467 if (!o->IsHeapObject()) continue; | 1447 if (!o->IsHeapObject()) continue; |
1468 collector->RecordSlot(start, p, o); | 1448 collector->RecordSlot(start, p, o); |
1469 HeapObject* obj = HeapObject::cast(o); | 1449 HeapObject* obj = HeapObject::cast(o); |
1470 MarkBit mark = Marking::MarkBitFrom(obj); | 1450 MarkBit mark = Marking::MarkBitFrom(obj); |
1471 if (mark.Get()) continue; | 1451 if (mark.Get()) continue; |
1472 VisitUnmarkedObject(collector, obj); | 1452 VisitUnmarkedObject(collector, obj); |
1473 } | 1453 } |
1474 return true; | 1454 return true; |
1475 } | 1455 } |
1476 | 1456 |
1477 private: | 1457 private: |
1478 template<int id> | 1458 template <int id> |
1479 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); | 1459 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); |
1480 | 1460 |
1481 // Code flushing support. | 1461 // Code flushing support. |
1482 | 1462 |
1483 static const int kRegExpCodeThreshold = 5; | 1463 static const int kRegExpCodeThreshold = 5; |
1484 | 1464 |
1485 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, | 1465 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, JSRegExp* re, |
1486 JSRegExp* re, | |
1487 bool is_ascii) { | 1466 bool is_ascii) { |
1488 // Make sure that the fixed array is in fact initialized on the RegExp. | 1467 // Make sure that the fixed array is in fact initialized on the RegExp. |
1489 // We could potentially trigger a GC when initializing the RegExp. | 1468 // We could potentially trigger a GC when initializing the RegExp. |
1490 if (HeapObject::cast(re->data())->map()->instance_type() != | 1469 if (HeapObject::cast(re->data())->map()->instance_type() != |
1491 FIXED_ARRAY_TYPE) return; | 1470 FIXED_ARRAY_TYPE) |
| 1471 return; |
1492 | 1472 |
1493 // Make sure this is a RegExp that actually contains code. | 1473 // Make sure this is a RegExp that actually contains code. |
1494 if (re->TypeTag() != JSRegExp::IRREGEXP) return; | 1474 if (re->TypeTag() != JSRegExp::IRREGEXP) return; |
1495 | 1475 |
1496 Object* code = re->DataAt(JSRegExp::code_index(is_ascii)); | 1476 Object* code = re->DataAt(JSRegExp::code_index(is_ascii)); |
1497 if (!code->IsSmi() && | 1477 if (!code->IsSmi() && |
1498 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { | 1478 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { |
1499 // Save a copy that can be reinstated if we need the code again. | 1479 // Save a copy that can be reinstated if we need the code again. |
1500 re->SetDataAt(JSRegExp::saved_code_index(is_ascii), code); | 1480 re->SetDataAt(JSRegExp::saved_code_index(is_ascii), code); |
1501 | 1481 |
1502 // Saving a copy might create a pointer into compaction candidate | 1482 // Saving a copy might create a pointer into compaction candidate |
1503 // that was not observed by marker. This might happen if JSRegExp data | 1483 // that was not observed by marker. This might happen if JSRegExp data |
1504 // was marked through the compilation cache before marker reached JSRegExp | 1484 // was marked through the compilation cache before marker reached JSRegExp |
1505 // object. | 1485 // object. |
1506 FixedArray* data = FixedArray::cast(re->data()); | 1486 FixedArray* data = FixedArray::cast(re->data()); |
1507 Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii); | 1487 Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii); |
1508 heap->mark_compact_collector()-> | 1488 heap->mark_compact_collector()->RecordSlot(slot, slot, code); |
1509 RecordSlot(slot, slot, code); | |
1510 | 1489 |
1511 // Set a number in the 0-255 range to guarantee no smi overflow. | 1490 // Set a number in the 0-255 range to guarantee no smi overflow. |
1512 re->SetDataAt(JSRegExp::code_index(is_ascii), | 1491 re->SetDataAt(JSRegExp::code_index(is_ascii), |
1513 Smi::FromInt(heap->sweep_generation() & 0xff)); | 1492 Smi::FromInt(heap->sweep_generation() & 0xff)); |
1514 } else if (code->IsSmi()) { | 1493 } else if (code->IsSmi()) { |
1515 int value = Smi::cast(code)->value(); | 1494 int value = Smi::cast(code)->value(); |
1516 // The regexp has not been compiled yet or there was a compilation error. | 1495 // The regexp has not been compiled yet or there was a compilation error. |
1517 if (value == JSRegExp::kUninitializedValue || | 1496 if (value == JSRegExp::kUninitializedValue || |
1518 value == JSRegExp::kCompilationErrorValue) { | 1497 value == JSRegExp::kCompilationErrorValue) { |
1519 return; | 1498 return; |
(...skipping 28 matching lines...) Expand all Loading... |
1548 UpdateRegExpCodeAgeAndFlush(heap, re, false); | 1527 UpdateRegExpCodeAgeAndFlush(heap, re, false); |
1549 // Visit the fields of the RegExp, including the updated FixedArray. | 1528 // Visit the fields of the RegExp, including the updated FixedArray. |
1550 VisitJSRegExp(map, object); | 1529 VisitJSRegExp(map, object); |
1551 } | 1530 } |
1552 | 1531 |
1553 static VisitorDispatchTable<Callback> non_count_table_; | 1532 static VisitorDispatchTable<Callback> non_count_table_; |
1554 }; | 1533 }; |
1555 | 1534 |
1556 | 1535 |
1557 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray( | 1536 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray( |
1558 FixedArrayBase* fixed_array, | 1537 FixedArrayBase* fixed_array, FixedArraySubInstanceType fast_type, |
1559 FixedArraySubInstanceType fast_type, | |
1560 FixedArraySubInstanceType dictionary_type) { | 1538 FixedArraySubInstanceType dictionary_type) { |
1561 Heap* heap = fixed_array->map()->GetHeap(); | 1539 Heap* heap = fixed_array->map()->GetHeap(); |
1562 if (fixed_array->map() != heap->fixed_cow_array_map() && | 1540 if (fixed_array->map() != heap->fixed_cow_array_map() && |
1563 fixed_array->map() != heap->fixed_double_array_map() && | 1541 fixed_array->map() != heap->fixed_double_array_map() && |
1564 fixed_array != heap->empty_fixed_array()) { | 1542 fixed_array != heap->empty_fixed_array()) { |
1565 if (fixed_array->IsDictionary()) { | 1543 if (fixed_array->IsDictionary()) { |
1566 heap->RecordFixedArraySubTypeStats(dictionary_type, | 1544 heap->RecordFixedArraySubTypeStats(dictionary_type, fixed_array->Size()); |
1567 fixed_array->Size()); | |
1568 } else { | 1545 } else { |
1569 heap->RecordFixedArraySubTypeStats(fast_type, | 1546 heap->RecordFixedArraySubTypeStats(fast_type, fixed_array->Size()); |
1570 fixed_array->Size()); | |
1571 } | 1547 } |
1572 } | 1548 } |
1573 } | 1549 } |
1574 | 1550 |
1575 | 1551 |
1576 void MarkCompactMarkingVisitor::ObjectStatsVisitBase( | 1552 void MarkCompactMarkingVisitor::ObjectStatsVisitBase( |
1577 MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) { | 1553 MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) { |
1578 Heap* heap = map->GetHeap(); | 1554 Heap* heap = map->GetHeap(); |
1579 int object_size = obj->Size(); | 1555 int object_size = obj->Size(); |
1580 heap->RecordObjectStats(map->instance_type(), object_size); | 1556 heap->RecordObjectStats(map->instance_type(), object_size); |
1581 non_count_table_.GetVisitorById(id)(map, obj); | 1557 non_count_table_.GetVisitorById(id)(map, obj); |
1582 if (obj->IsJSObject()) { | 1558 if (obj->IsJSObject()) { |
1583 JSObject* object = JSObject::cast(obj); | 1559 JSObject* object = JSObject::cast(obj); |
1584 ObjectStatsCountFixedArray(object->elements(), | 1560 ObjectStatsCountFixedArray(object->elements(), DICTIONARY_ELEMENTS_SUB_TYPE, |
1585 DICTIONARY_ELEMENTS_SUB_TYPE, | |
1586 FAST_ELEMENTS_SUB_TYPE); | 1561 FAST_ELEMENTS_SUB_TYPE); |
1587 ObjectStatsCountFixedArray(object->properties(), | 1562 ObjectStatsCountFixedArray(object->properties(), |
1588 DICTIONARY_PROPERTIES_SUB_TYPE, | 1563 DICTIONARY_PROPERTIES_SUB_TYPE, |
1589 FAST_PROPERTIES_SUB_TYPE); | 1564 FAST_PROPERTIES_SUB_TYPE); |
1590 } | 1565 } |
1591 } | 1566 } |
1592 | 1567 |
1593 | 1568 |
1594 template<MarkCompactMarkingVisitor::VisitorId id> | 1569 template <MarkCompactMarkingVisitor::VisitorId id> |
1595 void MarkCompactMarkingVisitor::ObjectStatsTracker<id>::Visit( | 1570 void MarkCompactMarkingVisitor::ObjectStatsTracker<id>::Visit(Map* map, |
1596 Map* map, HeapObject* obj) { | 1571 HeapObject* obj) { |
1597 ObjectStatsVisitBase(id, map, obj); | 1572 ObjectStatsVisitBase(id, map, obj); |
1598 } | 1573 } |
1599 | 1574 |
1600 | 1575 |
1601 template<> | 1576 template <> |
1602 class MarkCompactMarkingVisitor::ObjectStatsTracker< | 1577 class MarkCompactMarkingVisitor::ObjectStatsTracker< |
1603 MarkCompactMarkingVisitor::kVisitMap> { | 1578 MarkCompactMarkingVisitor::kVisitMap> { |
1604 public: | 1579 public: |
1605 static inline void Visit(Map* map, HeapObject* obj) { | 1580 static inline void Visit(Map* map, HeapObject* obj) { |
1606 Heap* heap = map->GetHeap(); | 1581 Heap* heap = map->GetHeap(); |
1607 Map* map_obj = Map::cast(obj); | 1582 Map* map_obj = Map::cast(obj); |
1608 DCHECK(map->instance_type() == MAP_TYPE); | 1583 DCHECK(map->instance_type() == MAP_TYPE); |
1609 DescriptorArray* array = map_obj->instance_descriptors(); | 1584 DescriptorArray* array = map_obj->instance_descriptors(); |
1610 if (map_obj->owns_descriptors() && | 1585 if (map_obj->owns_descriptors() && |
1611 array != heap->empty_descriptor_array()) { | 1586 array != heap->empty_descriptor_array()) { |
(...skipping 14 matching lines...) Expand all Loading... |
1626 heap->RecordFixedArraySubTypeStats( | 1601 heap->RecordFixedArraySubTypeStats( |
1627 MAP_CODE_CACHE_SUB_TYPE, | 1602 MAP_CODE_CACHE_SUB_TYPE, |
1628 FixedArray::cast(cache->normal_type_cache())->Size()); | 1603 FixedArray::cast(cache->normal_type_cache())->Size()); |
1629 } | 1604 } |
1630 } | 1605 } |
1631 ObjectStatsVisitBase(kVisitMap, map, obj); | 1606 ObjectStatsVisitBase(kVisitMap, map, obj); |
1632 } | 1607 } |
1633 }; | 1608 }; |
1634 | 1609 |
1635 | 1610 |
1636 template<> | 1611 template <> |
1637 class MarkCompactMarkingVisitor::ObjectStatsTracker< | 1612 class MarkCompactMarkingVisitor::ObjectStatsTracker< |
1638 MarkCompactMarkingVisitor::kVisitCode> { | 1613 MarkCompactMarkingVisitor::kVisitCode> { |
1639 public: | 1614 public: |
1640 static inline void Visit(Map* map, HeapObject* obj) { | 1615 static inline void Visit(Map* map, HeapObject* obj) { |
1641 Heap* heap = map->GetHeap(); | 1616 Heap* heap = map->GetHeap(); |
1642 int object_size = obj->Size(); | 1617 int object_size = obj->Size(); |
1643 DCHECK(map->instance_type() == CODE_TYPE); | 1618 DCHECK(map->instance_type() == CODE_TYPE); |
1644 Code* code_obj = Code::cast(obj); | 1619 Code* code_obj = Code::cast(obj); |
1645 heap->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetRawAge(), | 1620 heap->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetRawAge(), |
1646 object_size); | 1621 object_size); |
1647 ObjectStatsVisitBase(kVisitCode, map, obj); | 1622 ObjectStatsVisitBase(kVisitCode, map, obj); |
1648 } | 1623 } |
1649 }; | 1624 }; |
1650 | 1625 |
1651 | 1626 |
1652 template<> | 1627 template <> |
1653 class MarkCompactMarkingVisitor::ObjectStatsTracker< | 1628 class MarkCompactMarkingVisitor::ObjectStatsTracker< |
1654 MarkCompactMarkingVisitor::kVisitSharedFunctionInfo> { | 1629 MarkCompactMarkingVisitor::kVisitSharedFunctionInfo> { |
1655 public: | 1630 public: |
1656 static inline void Visit(Map* map, HeapObject* obj) { | 1631 static inline void Visit(Map* map, HeapObject* obj) { |
1657 Heap* heap = map->GetHeap(); | 1632 Heap* heap = map->GetHeap(); |
1658 SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj); | 1633 SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj); |
1659 if (sfi->scope_info() != heap->empty_fixed_array()) { | 1634 if (sfi->scope_info() != heap->empty_fixed_array()) { |
1660 heap->RecordFixedArraySubTypeStats( | 1635 heap->RecordFixedArraySubTypeStats( |
1661 SCOPE_INFO_SUB_TYPE, | 1636 SCOPE_INFO_SUB_TYPE, FixedArray::cast(sfi->scope_info())->Size()); |
1662 FixedArray::cast(sfi->scope_info())->Size()); | |
1663 } | 1637 } |
1664 ObjectStatsVisitBase(kVisitSharedFunctionInfo, map, obj); | 1638 ObjectStatsVisitBase(kVisitSharedFunctionInfo, map, obj); |
1665 } | 1639 } |
1666 }; | 1640 }; |
1667 | 1641 |
1668 | 1642 |
1669 template<> | 1643 template <> |
1670 class MarkCompactMarkingVisitor::ObjectStatsTracker< | 1644 class MarkCompactMarkingVisitor::ObjectStatsTracker< |
1671 MarkCompactMarkingVisitor::kVisitFixedArray> { | 1645 MarkCompactMarkingVisitor::kVisitFixedArray> { |
1672 public: | 1646 public: |
1673 static inline void Visit(Map* map, HeapObject* obj) { | 1647 static inline void Visit(Map* map, HeapObject* obj) { |
1674 Heap* heap = map->GetHeap(); | 1648 Heap* heap = map->GetHeap(); |
1675 FixedArray* fixed_array = FixedArray::cast(obj); | 1649 FixedArray* fixed_array = FixedArray::cast(obj); |
1676 if (fixed_array == heap->string_table()) { | 1650 if (fixed_array == heap->string_table()) { |
1677 heap->RecordFixedArraySubTypeStats( | 1651 heap->RecordFixedArraySubTypeStats(STRING_TABLE_SUB_TYPE, |
1678 STRING_TABLE_SUB_TYPE, | 1652 fixed_array->Size()); |
1679 fixed_array->Size()); | |
1680 } | 1653 } |
1681 ObjectStatsVisitBase(kVisitFixedArray, map, obj); | 1654 ObjectStatsVisitBase(kVisitFixedArray, map, obj); |
1682 } | 1655 } |
1683 }; | 1656 }; |
1684 | 1657 |
1685 | 1658 |
1686 void MarkCompactMarkingVisitor::Initialize() { | 1659 void MarkCompactMarkingVisitor::Initialize() { |
1687 StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize(); | 1660 StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize(); |
1688 | 1661 |
1689 table_.Register(kVisitJSRegExp, | 1662 table_.Register(kVisitJSRegExp, &VisitRegExpAndFlushCode); |
1690 &VisitRegExpAndFlushCode); | |
1691 | 1663 |
1692 if (FLAG_track_gc_object_stats) { | 1664 if (FLAG_track_gc_object_stats) { |
1693 // Copy the visitor table to make call-through possible. | 1665 // Copy the visitor table to make call-through possible. |
1694 non_count_table_.CopyFrom(&table_); | 1666 non_count_table_.CopyFrom(&table_); |
1695 #define VISITOR_ID_COUNT_FUNCTION(id) \ | 1667 #define VISITOR_ID_COUNT_FUNCTION(id) \ |
1696 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); | 1668 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); |
1697 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) | 1669 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) |
1698 #undef VISITOR_ID_COUNT_FUNCTION | 1670 #undef VISITOR_ID_COUNT_FUNCTION |
1699 } | 1671 } |
1700 } | 1672 } |
1701 | 1673 |
1702 | 1674 |
1703 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> | 1675 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> |
1704 MarkCompactMarkingVisitor::non_count_table_; | 1676 MarkCompactMarkingVisitor::non_count_table_; |
1705 | 1677 |
1706 | 1678 |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1793 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor); | 1765 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor); |
1794 | 1766 |
1795 ProcessMarkingDeque(); | 1767 ProcessMarkingDeque(); |
1796 } | 1768 } |
1797 | 1769 |
1798 | 1770 |
1799 // Visitor class for marking heap roots. | 1771 // Visitor class for marking heap roots. |
1800 class RootMarkingVisitor : public ObjectVisitor { | 1772 class RootMarkingVisitor : public ObjectVisitor { |
1801 public: | 1773 public: |
1802 explicit RootMarkingVisitor(Heap* heap) | 1774 explicit RootMarkingVisitor(Heap* heap) |
1803 : collector_(heap->mark_compact_collector()) { } | 1775 : collector_(heap->mark_compact_collector()) {} |
1804 | 1776 |
1805 void VisitPointer(Object** p) { | 1777 void VisitPointer(Object** p) { MarkObjectByPointer(p); } |
1806 MarkObjectByPointer(p); | |
1807 } | |
1808 | 1778 |
1809 void VisitPointers(Object** start, Object** end) { | 1779 void VisitPointers(Object** start, Object** end) { |
1810 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 1780 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
1811 } | 1781 } |
1812 | 1782 |
1813 // Skip the weak next code link in a code object, which is visited in | 1783 // Skip the weak next code link in a code object, which is visited in |
1814 // ProcessTopOptimizedFrame. | 1784 // ProcessTopOptimizedFrame. |
1815 void VisitNextCodeLink(Object** p) { } | 1785 void VisitNextCodeLink(Object** p) {} |
1816 | 1786 |
1817 private: | 1787 private: |
1818 void MarkObjectByPointer(Object** p) { | 1788 void MarkObjectByPointer(Object** p) { |
1819 if (!(*p)->IsHeapObject()) return; | 1789 if (!(*p)->IsHeapObject()) return; |
1820 | 1790 |
1821 // Replace flat cons strings in place. | 1791 // Replace flat cons strings in place. |
1822 HeapObject* object = ShortCircuitConsString(p); | 1792 HeapObject* object = ShortCircuitConsString(p); |
1823 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1793 MarkBit mark_bit = Marking::MarkBitFrom(object); |
1824 if (mark_bit.Get()) return; | 1794 if (mark_bit.Get()) return; |
1825 | 1795 |
1826 Map* map = object->map(); | 1796 Map* map = object->map(); |
1827 // Mark the object. | 1797 // Mark the object. |
1828 collector_->SetMark(object, mark_bit); | 1798 collector_->SetMark(object, mark_bit); |
1829 | 1799 |
1830 // Mark the map pointer and body, and push them on the marking stack. | 1800 // Mark the map pointer and body, and push them on the marking stack. |
1831 MarkBit map_mark = Marking::MarkBitFrom(map); | 1801 MarkBit map_mark = Marking::MarkBitFrom(map); |
1832 collector_->MarkObject(map, map_mark); | 1802 collector_->MarkObject(map, map_mark); |
1833 MarkCompactMarkingVisitor::IterateBody(map, object); | 1803 MarkCompactMarkingVisitor::IterateBody(map, object); |
1834 | 1804 |
1835 // Mark all the objects reachable from the map and body. May leave | 1805 // Mark all the objects reachable from the map and body. May leave |
1836 // overflowed objects in the heap. | 1806 // overflowed objects in the heap. |
1837 collector_->EmptyMarkingDeque(); | 1807 collector_->EmptyMarkingDeque(); |
1838 } | 1808 } |
1839 | 1809 |
1840 MarkCompactCollector* collector_; | 1810 MarkCompactCollector* collector_; |
1841 }; | 1811 }; |
1842 | 1812 |
1843 | 1813 |
1844 // Helper class for pruning the string table. | 1814 // Helper class for pruning the string table. |
1845 template<bool finalize_external_strings> | 1815 template <bool finalize_external_strings> |
1846 class StringTableCleaner : public ObjectVisitor { | 1816 class StringTableCleaner : public ObjectVisitor { |
1847 public: | 1817 public: |
1848 explicit StringTableCleaner(Heap* heap) | 1818 explicit StringTableCleaner(Heap* heap) : heap_(heap), pointers_removed_(0) {} |
1849 : heap_(heap), pointers_removed_(0) { } | |
1850 | 1819 |
1851 virtual void VisitPointers(Object** start, Object** end) { | 1820 virtual void VisitPointers(Object** start, Object** end) { |
1852 // Visit all HeapObject pointers in [start, end). | 1821 // Visit all HeapObject pointers in [start, end). |
1853 for (Object** p = start; p < end; p++) { | 1822 for (Object** p = start; p < end; p++) { |
1854 Object* o = *p; | 1823 Object* o = *p; |
1855 if (o->IsHeapObject() && | 1824 if (o->IsHeapObject() && |
1856 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { | 1825 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { |
1857 if (finalize_external_strings) { | 1826 if (finalize_external_strings) { |
1858 DCHECK(o->IsExternalString()); | 1827 DCHECK(o->IsExternalString()); |
1859 heap_->FinalizeExternalString(String::cast(*p)); | 1828 heap_->FinalizeExternalString(String::cast(*p)); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1899 } else { | 1868 } else { |
1900 return NULL; | 1869 return NULL; |
1901 } | 1870 } |
1902 } | 1871 } |
1903 }; | 1872 }; |
1904 | 1873 |
1905 | 1874 |
1906 // Fill the marking stack with overflowed objects returned by the given | 1875 // Fill the marking stack with overflowed objects returned by the given |
1907 // iterator. Stop when the marking stack is filled or the end of the space | 1876 // iterator. Stop when the marking stack is filled or the end of the space |
1908 // is reached, whichever comes first. | 1877 // is reached, whichever comes first. |
1909 template<class T> | 1878 template <class T> |
1910 static void DiscoverGreyObjectsWithIterator(Heap* heap, | 1879 static void DiscoverGreyObjectsWithIterator(Heap* heap, |
1911 MarkingDeque* marking_deque, | 1880 MarkingDeque* marking_deque, |
1912 T* it) { | 1881 T* it) { |
1913 // The caller should ensure that the marking stack is initially not full, | 1882 // The caller should ensure that the marking stack is initially not full, |
1914 // so that we don't waste effort pointlessly scanning for objects. | 1883 // so that we don't waste effort pointlessly scanning for objects. |
1915 DCHECK(!marking_deque->IsFull()); | 1884 DCHECK(!marking_deque->IsFull()); |
1916 | 1885 |
1917 Map* filler_map = heap->one_pointer_filler_map(); | 1886 Map* filler_map = heap->one_pointer_filler_map(); |
1918 for (HeapObject* object = it->Next(); | 1887 for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { |
1919 object != NULL; | |
1920 object = it->Next()) { | |
1921 MarkBit markbit = Marking::MarkBitFrom(object); | 1888 MarkBit markbit = Marking::MarkBitFrom(object); |
1922 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { | 1889 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) { |
1923 Marking::GreyToBlack(markbit); | 1890 Marking::GreyToBlack(markbit); |
1924 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); | 1891 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); |
1925 marking_deque->PushBlack(object); | 1892 marking_deque->PushBlack(object); |
1926 if (marking_deque->IsFull()) return; | 1893 if (marking_deque->IsFull()) return; |
1927 } | 1894 } |
1928 } | 1895 } |
1929 } | 1896 } |
1930 | 1897 |
(...skipping 11 matching lines...) Expand all Loading... |
1942 | 1909 |
1943 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { | 1910 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { |
1944 Address cell_base = it.CurrentCellBase(); | 1911 Address cell_base = it.CurrentCellBase(); |
1945 MarkBit::CellType* cell = it.CurrentCell(); | 1912 MarkBit::CellType* cell = it.CurrentCell(); |
1946 | 1913 |
1947 const MarkBit::CellType current_cell = *cell; | 1914 const MarkBit::CellType current_cell = *cell; |
1948 if (current_cell == 0) continue; | 1915 if (current_cell == 0) continue; |
1949 | 1916 |
1950 MarkBit::CellType grey_objects; | 1917 MarkBit::CellType grey_objects; |
1951 if (it.HasNext()) { | 1918 if (it.HasNext()) { |
1952 const MarkBit::CellType next_cell = *(cell+1); | 1919 const MarkBit::CellType next_cell = *(cell + 1); |
1953 grey_objects = current_cell & | 1920 grey_objects = current_cell & ((current_cell >> 1) | |
1954 ((current_cell >> 1) | (next_cell << (Bitmap::kBitsPerCell - 1))); | 1921 (next_cell << (Bitmap::kBitsPerCell - 1))); |
1955 } else { | 1922 } else { |
1956 grey_objects = current_cell & (current_cell >> 1); | 1923 grey_objects = current_cell & (current_cell >> 1); |
1957 } | 1924 } |
1958 | 1925 |
1959 int offset = 0; | 1926 int offset = 0; |
1960 while (grey_objects != 0) { | 1927 while (grey_objects != 0) { |
1961 int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(grey_objects); | 1928 int trailing_zeros = CompilerIntrinsics::CountTrailingZeros(grey_objects); |
1962 grey_objects >>= trailing_zeros; | 1929 grey_objects >>= trailing_zeros; |
1963 offset += trailing_zeros; | 1930 offset += trailing_zeros; |
1964 MarkBit markbit(cell, 1 << offset, false); | 1931 MarkBit markbit(cell, 1 << offset, false); |
1965 DCHECK(Marking::IsGrey(markbit)); | 1932 DCHECK(Marking::IsGrey(markbit)); |
1966 Marking::GreyToBlack(markbit); | 1933 Marking::GreyToBlack(markbit); |
1967 Address addr = cell_base + offset * kPointerSize; | 1934 Address addr = cell_base + offset * kPointerSize; |
1968 HeapObject* object = HeapObject::FromAddress(addr); | 1935 HeapObject* object = HeapObject::FromAddress(addr); |
1969 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); | 1936 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); |
1970 marking_deque->PushBlack(object); | 1937 marking_deque->PushBlack(object); |
1971 if (marking_deque->IsFull()) return; | 1938 if (marking_deque->IsFull()) return; |
1972 offset += 2; | 1939 offset += 2; |
1973 grey_objects >>= 2; | 1940 grey_objects >>= 2; |
1974 } | 1941 } |
1975 | 1942 |
1976 grey_objects >>= (Bitmap::kBitsPerCell - 1); | 1943 grey_objects >>= (Bitmap::kBitsPerCell - 1); |
1977 } | 1944 } |
1978 } | 1945 } |
1979 | 1946 |
1980 | 1947 |
1981 int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( | 1948 int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( |
1982 NewSpace* new_space, | 1949 NewSpace* new_space, NewSpacePage* p) { |
1983 NewSpacePage* p) { | |
1984 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 1950 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); |
1985 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0); | 1951 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0); |
1986 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0); | 1952 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0); |
1987 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 1953 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); |
1988 | 1954 |
1989 MarkBit::CellType* cells = p->markbits()->cells(); | 1955 MarkBit::CellType* cells = p->markbits()->cells(); |
1990 int survivors_size = 0; | 1956 int survivors_size = 0; |
1991 | 1957 |
1992 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { | 1958 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { |
1993 Address cell_base = it.CurrentCellBase(); | 1959 Address cell_base = it.CurrentCellBase(); |
(...skipping 30 matching lines...) Expand all Loading... |
2024 // Shouldn't happen. We are sweeping linearly, and to-space | 1990 // Shouldn't happen. We are sweeping linearly, and to-space |
2025 // has the same number of pages as from-space, so there is | 1991 // has the same number of pages as from-space, so there is |
2026 // always room. | 1992 // always room. |
2027 UNREACHABLE(); | 1993 UNREACHABLE(); |
2028 } | 1994 } |
2029 allocation = new_space->AllocateRaw(size); | 1995 allocation = new_space->AllocateRaw(size); |
2030 DCHECK(!allocation.IsRetry()); | 1996 DCHECK(!allocation.IsRetry()); |
2031 } | 1997 } |
2032 Object* target = allocation.ToObjectChecked(); | 1998 Object* target = allocation.ToObjectChecked(); |
2033 | 1999 |
2034 MigrateObject(HeapObject::cast(target), | 2000 MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); |
2035 object, | |
2036 size, | |
2037 NEW_SPACE); | |
2038 heap()->IncrementSemiSpaceCopiedObjectSize(size); | 2001 heap()->IncrementSemiSpaceCopiedObjectSize(size); |
2039 } | 2002 } |
2040 *cells = 0; | 2003 *cells = 0; |
2041 } | 2004 } |
2042 return survivors_size; | 2005 return survivors_size; |
2043 } | 2006 } |
2044 | 2007 |
2045 | 2008 |
2046 static void DiscoverGreyObjectsInSpace(Heap* heap, | 2009 static void DiscoverGreyObjectsInSpace(Heap* heap, MarkingDeque* marking_deque, |
2047 MarkingDeque* marking_deque, | |
2048 PagedSpace* space) { | 2010 PagedSpace* space) { |
2049 if (space->swept_precisely()) { | 2011 if (space->swept_precisely()) { |
2050 HeapObjectIterator it(space); | 2012 HeapObjectIterator it(space); |
2051 DiscoverGreyObjectsWithIterator(heap, marking_deque, &it); | 2013 DiscoverGreyObjectsWithIterator(heap, marking_deque, &it); |
2052 } else { | 2014 } else { |
2053 PageIterator it(space); | 2015 PageIterator it(space); |
2054 while (it.has_next()) { | 2016 while (it.has_next()) { |
2055 Page* p = it.next(); | 2017 Page* p = it.next(); |
2056 DiscoverGreyObjectsOnPage(marking_deque, p); | 2018 DiscoverGreyObjectsOnPage(marking_deque, p); |
2057 if (marking_deque->IsFull()) return; | 2019 if (marking_deque->IsFull()) return; |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2195 // push them on the marking stack. Stop early if the marking stack fills | 2157 // push them on the marking stack. Stop early if the marking stack fills |
2196 // before sweeping completes. If sweeping completes, there are no remaining | 2158 // before sweeping completes. If sweeping completes, there are no remaining |
2197 // overflowed objects in the heap so the overflow flag on the markings stack | 2159 // overflowed objects in the heap so the overflow flag on the markings stack |
2198 // is cleared. | 2160 // is cleared. |
2199 void MarkCompactCollector::RefillMarkingDeque() { | 2161 void MarkCompactCollector::RefillMarkingDeque() { |
2200 DCHECK(marking_deque_.overflowed()); | 2162 DCHECK(marking_deque_.overflowed()); |
2201 | 2163 |
2202 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); | 2164 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); |
2203 if (marking_deque_.IsFull()) return; | 2165 if (marking_deque_.IsFull()) return; |
2204 | 2166 |
2205 DiscoverGreyObjectsInSpace(heap(), | 2167 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, |
2206 &marking_deque_, | |
2207 heap()->old_pointer_space()); | 2168 heap()->old_pointer_space()); |
2208 if (marking_deque_.IsFull()) return; | 2169 if (marking_deque_.IsFull()) return; |
2209 | 2170 |
2210 DiscoverGreyObjectsInSpace(heap(), | 2171 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space()); |
2211 &marking_deque_, | |
2212 heap()->old_data_space()); | |
2213 if (marking_deque_.IsFull()) return; | 2172 if (marking_deque_.IsFull()) return; |
2214 | 2173 |
2215 DiscoverGreyObjectsInSpace(heap(), | 2174 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space()); |
2216 &marking_deque_, | |
2217 heap()->code_space()); | |
2218 if (marking_deque_.IsFull()) return; | 2175 if (marking_deque_.IsFull()) return; |
2219 | 2176 |
2220 DiscoverGreyObjectsInSpace(heap(), | 2177 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space()); |
2221 &marking_deque_, | |
2222 heap()->map_space()); | |
2223 if (marking_deque_.IsFull()) return; | 2178 if (marking_deque_.IsFull()) return; |
2224 | 2179 |
2225 DiscoverGreyObjectsInSpace(heap(), | 2180 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space()); |
2226 &marking_deque_, | |
2227 heap()->cell_space()); | |
2228 if (marking_deque_.IsFull()) return; | 2181 if (marking_deque_.IsFull()) return; |
2229 | 2182 |
2230 DiscoverGreyObjectsInSpace(heap(), | 2183 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, |
2231 &marking_deque_, | |
2232 heap()->property_cell_space()); | 2184 heap()->property_cell_space()); |
2233 if (marking_deque_.IsFull()) return; | 2185 if (marking_deque_.IsFull()) return; |
2234 | 2186 |
2235 LargeObjectIterator lo_it(heap()->lo_space()); | 2187 LargeObjectIterator lo_it(heap()->lo_space()); |
2236 DiscoverGreyObjectsWithIterator(heap(), | 2188 DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &lo_it); |
2237 &marking_deque_, | |
2238 &lo_it); | |
2239 if (marking_deque_.IsFull()) return; | 2189 if (marking_deque_.IsFull()) return; |
2240 | 2190 |
2241 marking_deque_.ClearOverflowed(); | 2191 marking_deque_.ClearOverflowed(); |
2242 } | 2192 } |
2243 | 2193 |
2244 | 2194 |
2245 // Mark all objects reachable (transitively) from objects on the marking | 2195 // Mark all objects reachable (transitively) from objects on the marking |
2246 // stack. Before: the marking stack contains zero or more heap object | 2196 // stack. Before: the marking stack contains zero or more heap object |
2247 // pointers. After: the marking stack is empty and there are no overflowed | 2197 // pointers. After: the marking stack is empty and there are no overflowed |
2248 // objects in the heap. | 2198 // objects in the heap. |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2322 DCHECK(state_ == PREPARE_GC); | 2272 DCHECK(state_ == PREPARE_GC); |
2323 state_ = MARK_LIVE_OBJECTS; | 2273 state_ = MARK_LIVE_OBJECTS; |
2324 #endif | 2274 #endif |
2325 // The to space contains live objects, a page in from space is used as a | 2275 // The to space contains live objects, a page in from space is used as a |
2326 // marking stack. | 2276 // marking stack. |
2327 Address marking_deque_start = heap()->new_space()->FromSpacePageLow(); | 2277 Address marking_deque_start = heap()->new_space()->FromSpacePageLow(); |
2328 Address marking_deque_end = heap()->new_space()->FromSpacePageHigh(); | 2278 Address marking_deque_end = heap()->new_space()->FromSpacePageHigh(); |
2329 if (FLAG_force_marking_deque_overflows) { | 2279 if (FLAG_force_marking_deque_overflows) { |
2330 marking_deque_end = marking_deque_start + 64 * kPointerSize; | 2280 marking_deque_end = marking_deque_start + 64 * kPointerSize; |
2331 } | 2281 } |
2332 marking_deque_.Initialize(marking_deque_start, | 2282 marking_deque_.Initialize(marking_deque_start, marking_deque_end); |
2333 marking_deque_end); | |
2334 DCHECK(!marking_deque_.overflowed()); | 2283 DCHECK(!marking_deque_.overflowed()); |
2335 | 2284 |
2336 if (incremental_marking_overflowed) { | 2285 if (incremental_marking_overflowed) { |
2337 // There are overflowed objects left in the heap after incremental marking. | 2286 // There are overflowed objects left in the heap after incremental marking. |
2338 marking_deque_.SetOverflowed(); | 2287 marking_deque_.SetOverflowed(); |
2339 } | 2288 } |
2340 | 2289 |
2341 PrepareForCodeFlushing(); | 2290 PrepareForCodeFlushing(); |
2342 | 2291 |
2343 if (was_marked_incrementally_) { | 2292 if (was_marked_incrementally_) { |
2344 // There is no write barrier on cells so we have to scan them now at the end | 2293 // There is no write barrier on cells so we have to scan them now at the end |
2345 // of the incremental marking. | 2294 // of the incremental marking. |
2346 { | 2295 { |
2347 HeapObjectIterator cell_iterator(heap()->cell_space()); | 2296 HeapObjectIterator cell_iterator(heap()->cell_space()); |
2348 HeapObject* cell; | 2297 HeapObject* cell; |
2349 while ((cell = cell_iterator.Next()) != NULL) { | 2298 while ((cell = cell_iterator.Next()) != NULL) { |
2350 DCHECK(cell->IsCell()); | 2299 DCHECK(cell->IsCell()); |
2351 if (IsMarked(cell)) { | 2300 if (IsMarked(cell)) { |
2352 int offset = Cell::kValueOffset; | 2301 int offset = Cell::kValueOffset; |
2353 MarkCompactMarkingVisitor::VisitPointer( | 2302 MarkCompactMarkingVisitor::VisitPointer( |
2354 heap(), | 2303 heap(), reinterpret_cast<Object**>(cell->address() + offset)); |
2355 reinterpret_cast<Object**>(cell->address() + offset)); | |
2356 } | 2304 } |
2357 } | 2305 } |
2358 } | 2306 } |
2359 { | 2307 { |
2360 HeapObjectIterator js_global_property_cell_iterator( | 2308 HeapObjectIterator js_global_property_cell_iterator( |
2361 heap()->property_cell_space()); | 2309 heap()->property_cell_space()); |
2362 HeapObject* cell; | 2310 HeapObject* cell; |
2363 while ((cell = js_global_property_cell_iterator.Next()) != NULL) { | 2311 while ((cell = js_global_property_cell_iterator.Next()) != NULL) { |
2364 DCHECK(cell->IsPropertyCell()); | 2312 DCHECK(cell->IsPropertyCell()); |
2365 if (IsMarked(cell)) { | 2313 if (IsMarked(cell)) { |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2457 HeapObject* raw_map_cache = | 2405 HeapObject* raw_map_cache = |
2458 HeapObject::cast(context->get(Context::MAP_CACHE_INDEX)); | 2406 HeapObject::cast(context->get(Context::MAP_CACHE_INDEX)); |
2459 // A map cache may be reachable from the stack. In this case | 2407 // A map cache may be reachable from the stack. In this case |
2460 // it's already transitively marked and it's too late to clean | 2408 // it's already transitively marked and it's too late to clean |
2461 // up its parts. | 2409 // up its parts. |
2462 if (!IsMarked(raw_map_cache) && | 2410 if (!IsMarked(raw_map_cache) && |
2463 raw_map_cache != heap()->undefined_value()) { | 2411 raw_map_cache != heap()->undefined_value()) { |
2464 MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache); | 2412 MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache); |
2465 int existing_elements = map_cache->NumberOfElements(); | 2413 int existing_elements = map_cache->NumberOfElements(); |
2466 int used_elements = 0; | 2414 int used_elements = 0; |
2467 for (int i = MapCache::kElementsStartIndex; | 2415 for (int i = MapCache::kElementsStartIndex; i < map_cache->length(); |
2468 i < map_cache->length(); | |
2469 i += MapCache::kEntrySize) { | 2416 i += MapCache::kEntrySize) { |
2470 Object* raw_key = map_cache->get(i); | 2417 Object* raw_key = map_cache->get(i); |
2471 if (raw_key == heap()->undefined_value() || | 2418 if (raw_key == heap()->undefined_value() || |
2472 raw_key == heap()->the_hole_value()) continue; | 2419 raw_key == heap()->the_hole_value()) |
| 2420 continue; |
2473 STATIC_ASSERT(MapCache::kEntrySize == 2); | 2421 STATIC_ASSERT(MapCache::kEntrySize == 2); |
2474 Object* raw_map = map_cache->get(i + 1); | 2422 Object* raw_map = map_cache->get(i + 1); |
2475 if (raw_map->IsHeapObject() && IsMarked(raw_map)) { | 2423 if (raw_map->IsHeapObject() && IsMarked(raw_map)) { |
2476 ++used_elements; | 2424 ++used_elements; |
2477 } else { | 2425 } else { |
2478 // Delete useless entries with unmarked maps. | 2426 // Delete useless entries with unmarked maps. |
2479 DCHECK(raw_map->IsMap()); | 2427 DCHECK(raw_map->IsMap()); |
2480 map_cache->set_the_hole(i); | 2428 map_cache->set_the_hole(i); |
2481 map_cache->set_the_hole(i + 1); | 2429 map_cache->set_the_hole(i + 1); |
2482 } | 2430 } |
(...skipping 15 matching lines...) Expand all Loading... |
2498 } | 2446 } |
2499 ProcessMarkingDeque(); | 2447 ProcessMarkingDeque(); |
2500 } | 2448 } |
2501 | 2449 |
2502 | 2450 |
2503 void MarkCompactCollector::ClearNonLiveReferences() { | 2451 void MarkCompactCollector::ClearNonLiveReferences() { |
2504 // Iterate over the map space, setting map transitions that go from | 2452 // Iterate over the map space, setting map transitions that go from |
2505 // a marked map to an unmarked map to null transitions. This action | 2453 // a marked map to an unmarked map to null transitions. This action |
2506 // is carried out only on maps of JSObjects and related subtypes. | 2454 // is carried out only on maps of JSObjects and related subtypes. |
2507 HeapObjectIterator map_iterator(heap()->map_space()); | 2455 HeapObjectIterator map_iterator(heap()->map_space()); |
2508 for (HeapObject* obj = map_iterator.Next(); | 2456 for (HeapObject* obj = map_iterator.Next(); obj != NULL; |
2509 obj != NULL; | |
2510 obj = map_iterator.Next()) { | 2457 obj = map_iterator.Next()) { |
2511 Map* map = Map::cast(obj); | 2458 Map* map = Map::cast(obj); |
2512 | 2459 |
2513 if (!map->CanTransition()) continue; | 2460 if (!map->CanTransition()) continue; |
2514 | 2461 |
2515 MarkBit map_mark = Marking::MarkBitFrom(map); | 2462 MarkBit map_mark = Marking::MarkBitFrom(map); |
2516 ClearNonLivePrototypeTransitions(map); | 2463 ClearNonLivePrototypeTransitions(map); |
2517 ClearNonLiveMapTransitions(map, map_mark); | 2464 ClearNonLiveMapTransitions(map, map_mark); |
2518 | 2465 |
2519 if (map_mark.Get()) { | 2466 if (map_mark.Get()) { |
2520 ClearNonLiveDependentCode(map->dependent_code()); | 2467 ClearNonLiveDependentCode(map->dependent_code()); |
2521 } else { | 2468 } else { |
2522 ClearDependentCode(map->dependent_code()); | 2469 ClearDependentCode(map->dependent_code()); |
2523 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); | 2470 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
2524 } | 2471 } |
2525 } | 2472 } |
2526 | 2473 |
2527 // Iterate over property cell space, removing dependent code that is not | 2474 // Iterate over property cell space, removing dependent code that is not |
2528 // otherwise kept alive by strong references. | 2475 // otherwise kept alive by strong references. |
2529 HeapObjectIterator cell_iterator(heap_->property_cell_space()); | 2476 HeapObjectIterator cell_iterator(heap_->property_cell_space()); |
2530 for (HeapObject* cell = cell_iterator.Next(); | 2477 for (HeapObject* cell = cell_iterator.Next(); cell != NULL; |
2531 cell != NULL; | |
2532 cell = cell_iterator.Next()) { | 2478 cell = cell_iterator.Next()) { |
2533 if (IsMarked(cell)) { | 2479 if (IsMarked(cell)) { |
2534 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); | 2480 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); |
2535 } | 2481 } |
2536 } | 2482 } |
2537 | 2483 |
2538 // Iterate over allocation sites, removing dependent code that is not | 2484 // Iterate over allocation sites, removing dependent code that is not |
2539 // otherwise kept alive by strong references. | 2485 // otherwise kept alive by strong references. |
2540 Object* undefined = heap()->undefined_value(); | 2486 Object* undefined = heap()->undefined_value(); |
2541 for (Object* site = heap()->allocation_sites_list(); | 2487 for (Object* site = heap()->allocation_sites_list(); site != undefined; |
2542 site != undefined; | |
2543 site = AllocationSite::cast(site)->weak_next()) { | 2488 site = AllocationSite::cast(site)->weak_next()) { |
2544 if (IsMarked(site)) { | 2489 if (IsMarked(site)) { |
2545 ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code()); | 2490 ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code()); |
2546 } | 2491 } |
2547 } | 2492 } |
2548 | 2493 |
2549 if (heap_->weak_object_to_code_table()->IsHashTable()) { | 2494 if (heap_->weak_object_to_code_table()->IsHashTable()) { |
2550 WeakHashTable* table = | 2495 WeakHashTable* table = |
2551 WeakHashTable::cast(heap_->weak_object_to_code_table()); | 2496 WeakHashTable::cast(heap_->weak_object_to_code_table()); |
2552 uint32_t capacity = table->Capacity(); | 2497 uint32_t capacity = table->Capacity(); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2594 const int map_offset = header + Map::kProtoTransitionMapOffset; | 2539 const int map_offset = header + Map::kProtoTransitionMapOffset; |
2595 const int step = Map::kProtoTransitionElementsPerEntry; | 2540 const int step = Map::kProtoTransitionElementsPerEntry; |
2596 for (int i = 0; i < number_of_transitions; i++) { | 2541 for (int i = 0; i < number_of_transitions; i++) { |
2597 Object* prototype = prototype_transitions->get(proto_offset + i * step); | 2542 Object* prototype = prototype_transitions->get(proto_offset + i * step); |
2598 Object* cached_map = prototype_transitions->get(map_offset + i * step); | 2543 Object* cached_map = prototype_transitions->get(map_offset + i * step); |
2599 if (IsMarked(prototype) && IsMarked(cached_map)) { | 2544 if (IsMarked(prototype) && IsMarked(cached_map)) { |
2600 DCHECK(!prototype->IsUndefined()); | 2545 DCHECK(!prototype->IsUndefined()); |
2601 int proto_index = proto_offset + new_number_of_transitions * step; | 2546 int proto_index = proto_offset + new_number_of_transitions * step; |
2602 int map_index = map_offset + new_number_of_transitions * step; | 2547 int map_index = map_offset + new_number_of_transitions * step; |
2603 if (new_number_of_transitions != i) { | 2548 if (new_number_of_transitions != i) { |
2604 prototype_transitions->set( | 2549 prototype_transitions->set(proto_index, prototype, |
2605 proto_index, | 2550 UPDATE_WRITE_BARRIER); |
2606 prototype, | 2551 prototype_transitions->set(map_index, cached_map, SKIP_WRITE_BARRIER); |
2607 UPDATE_WRITE_BARRIER); | |
2608 prototype_transitions->set( | |
2609 map_index, | |
2610 cached_map, | |
2611 SKIP_WRITE_BARRIER); | |
2612 } | 2552 } |
2613 Object** slot = prototype_transitions->RawFieldOfElementAt(proto_index); | 2553 Object** slot = prototype_transitions->RawFieldOfElementAt(proto_index); |
2614 RecordSlot(slot, slot, prototype); | 2554 RecordSlot(slot, slot, prototype); |
2615 new_number_of_transitions++; | 2555 new_number_of_transitions++; |
2616 } | 2556 } |
2617 } | 2557 } |
2618 | 2558 |
2619 if (new_number_of_transitions != number_of_transitions) { | 2559 if (new_number_of_transitions != number_of_transitions) { |
2620 map->SetNumberOfProtoTransitions(new_number_of_transitions); | 2560 map->SetNumberOfProtoTransitions(new_number_of_transitions); |
2621 } | 2561 } |
2622 | 2562 |
2623 // Fill slots that became free with undefined value. | 2563 // Fill slots that became free with undefined value. |
2624 for (int i = new_number_of_transitions * step; | 2564 for (int i = new_number_of_transitions * step; |
2625 i < number_of_transitions * step; | 2565 i < number_of_transitions * step; i++) { |
2626 i++) { | |
2627 prototype_transitions->set_undefined(header + i); | 2566 prototype_transitions->set_undefined(header + i); |
2628 } | 2567 } |
2629 } | 2568 } |
2630 | 2569 |
2631 | 2570 |
2632 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, | 2571 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, |
2633 MarkBit map_mark) { | 2572 MarkBit map_mark) { |
2634 Object* potential_parent = map->GetBackPointer(); | 2573 Object* potential_parent = map->GetBackPointer(); |
2635 if (!potential_parent->IsMap()) return; | 2574 if (!potential_parent->IsMap()) return; |
2636 Map* parent = Map::cast(potential_parent); | 2575 Map* parent = Map::cast(potential_parent); |
(...skipping 16 matching lines...) Expand all Loading... |
2653 if (IsMarked(code)) { | 2592 if (IsMarked(code)) { |
2654 DCHECK(code->is_weak_stub()); | 2593 DCHECK(code->is_weak_stub()); |
2655 IC::InvalidateMaps(code); | 2594 IC::InvalidateMaps(code); |
2656 } | 2595 } |
2657 current = code->next_code_link(); | 2596 current = code->next_code_link(); |
2658 code->set_next_code_link(undefined); | 2597 code->set_next_code_link(undefined); |
2659 } | 2598 } |
2660 } | 2599 } |
2661 | 2600 |
2662 | 2601 |
2663 void MarkCompactCollector::ClearDependentCode( | 2602 void MarkCompactCollector::ClearDependentCode(DependentCode* entries) { |
2664 DependentCode* entries) { | |
2665 DisallowHeapAllocation no_allocation; | 2603 DisallowHeapAllocation no_allocation; |
2666 DependentCode::GroupStartIndexes starts(entries); | 2604 DependentCode::GroupStartIndexes starts(entries); |
2667 int number_of_entries = starts.number_of_entries(); | 2605 int number_of_entries = starts.number_of_entries(); |
2668 if (number_of_entries == 0) return; | 2606 if (number_of_entries == 0) return; |
2669 int g = DependentCode::kWeakICGroup; | 2607 int g = DependentCode::kWeakICGroup; |
2670 if (starts.at(g) != starts.at(g + 1)) { | 2608 if (starts.at(g) != starts.at(g + 1)) { |
2671 int i = starts.at(g); | 2609 int i = starts.at(g); |
2672 DCHECK(i + 1 == starts.at(g + 1)); | 2610 DCHECK(i + 1 == starts.at(g + 1)); |
2673 Object* head = entries->object_at(i); | 2611 Object* head = entries->object_at(i); |
2674 ClearDependentICList(head); | 2612 ClearDependentICList(head); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2759 if (weak_collection->table()->IsHashTable()) { | 2697 if (weak_collection->table()->IsHashTable()) { |
2760 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); | 2698 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); |
2761 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2699 Object** anchor = reinterpret_cast<Object**>(table->address()); |
2762 for (int i = 0; i < table->Capacity(); i++) { | 2700 for (int i = 0; i < table->Capacity(); i++) { |
2763 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2701 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
2764 Object** key_slot = | 2702 Object** key_slot = |
2765 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); | 2703 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); |
2766 RecordSlot(anchor, key_slot, *key_slot); | 2704 RecordSlot(anchor, key_slot, *key_slot); |
2767 Object** value_slot = | 2705 Object** value_slot = |
2768 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); | 2706 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); |
2769 MarkCompactMarkingVisitor::MarkObjectByPointer( | 2707 MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor, |
2770 this, anchor, value_slot); | 2708 value_slot); |
2771 } | 2709 } |
2772 } | 2710 } |
2773 } | 2711 } |
2774 weak_collection_obj = weak_collection->next(); | 2712 weak_collection_obj = weak_collection->next(); |
2775 } | 2713 } |
2776 } | 2714 } |
2777 | 2715 |
2778 | 2716 |
2779 void MarkCompactCollector::ClearWeakCollections() { | 2717 void MarkCompactCollector::ClearWeakCollections() { |
2780 GCTracer::Scope gc_scope(heap()->tracer(), | 2718 GCTracer::Scope gc_scope(heap()->tracer(), |
(...skipping 16 matching lines...) Expand all Loading... |
2797 weak_collection->set_next(heap()->undefined_value()); | 2735 weak_collection->set_next(heap()->undefined_value()); |
2798 } | 2736 } |
2799 heap()->set_encountered_weak_collections(Smi::FromInt(0)); | 2737 heap()->set_encountered_weak_collections(Smi::FromInt(0)); |
2800 } | 2738 } |
2801 | 2739 |
2802 | 2740 |
2803 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { | 2741 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { |
2804 if (heap_->InNewSpace(value)) { | 2742 if (heap_->InNewSpace(value)) { |
2805 heap_->store_buffer()->Mark(slot); | 2743 heap_->store_buffer()->Mark(slot); |
2806 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { | 2744 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { |
2807 SlotsBuffer::AddTo(&slots_buffer_allocator_, | 2745 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
2808 &migration_slots_buffer_, | |
2809 reinterpret_cast<Object**>(slot), | 2746 reinterpret_cast<Object**>(slot), |
2810 SlotsBuffer::IGNORE_OVERFLOW); | 2747 SlotsBuffer::IGNORE_OVERFLOW); |
2811 } | 2748 } |
2812 } | 2749 } |
2813 | 2750 |
2814 | 2751 |
2815 | |
2816 // We scavange new space simultaneously with sweeping. This is done in two | 2752 // We scavange new space simultaneously with sweeping. This is done in two |
2817 // passes. | 2753 // passes. |
2818 // | 2754 // |
2819 // The first pass migrates all alive objects from one semispace to another or | 2755 // The first pass migrates all alive objects from one semispace to another or |
2820 // promotes them to old space. Forwarding address is written directly into | 2756 // promotes them to old space. Forwarding address is written directly into |
2821 // first word of object without any encoding. If object is dead we write | 2757 // first word of object without any encoding. If object is dead we write |
2822 // NULL as a forwarding address. | 2758 // NULL as a forwarding address. |
2823 // | 2759 // |
2824 // The second pass updates pointers to new space in all spaces. It is possible | 2760 // The second pass updates pointers to new space in all spaces. It is possible |
2825 // to encounter pointers to dead new space objects during traversal of pointers | 2761 // to encounter pointers to dead new space objects during traversal of pointers |
2826 // to new space. We should clear them to avoid encountering them during next | 2762 // to new space. We should clear them to avoid encountering them during next |
2827 // pointer iteration. This is an issue if the store buffer overflows and we | 2763 // pointer iteration. This is an issue if the store buffer overflows and we |
2828 // have to scan the entire old space, including dead objects, looking for | 2764 // have to scan the entire old space, including dead objects, looking for |
2829 // pointers to new space. | 2765 // pointers to new space. |
2830 void MarkCompactCollector::MigrateObject(HeapObject* dst, | 2766 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, |
2831 HeapObject* src, | 2767 int size, AllocationSpace dest) { |
2832 int size, | |
2833 AllocationSpace dest) { | |
2834 Address dst_addr = dst->address(); | 2768 Address dst_addr = dst->address(); |
2835 Address src_addr = src->address(); | 2769 Address src_addr = src->address(); |
2836 DCHECK(heap()->AllowedToBeMigrated(src, dest)); | 2770 DCHECK(heap()->AllowedToBeMigrated(src, dest)); |
2837 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); | 2771 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); |
2838 if (dest == OLD_POINTER_SPACE) { | 2772 if (dest == OLD_POINTER_SPACE) { |
2839 Address src_slot = src_addr; | 2773 Address src_slot = src_addr; |
2840 Address dst_slot = dst_addr; | 2774 Address dst_slot = dst_addr; |
2841 DCHECK(IsAligned(size, kPointerSize)); | 2775 DCHECK(IsAligned(size, kPointerSize)); |
2842 | 2776 |
2843 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { | 2777 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { |
(...skipping 10 matching lines...) Expand all Loading... |
2854 | 2788 |
2855 src_slot += kPointerSize; | 2789 src_slot += kPointerSize; |
2856 dst_slot += kPointerSize; | 2790 dst_slot += kPointerSize; |
2857 } | 2791 } |
2858 | 2792 |
2859 if (compacting_ && dst->IsJSFunction()) { | 2793 if (compacting_ && dst->IsJSFunction()) { |
2860 Address code_entry_slot = dst_addr + JSFunction::kCodeEntryOffset; | 2794 Address code_entry_slot = dst_addr + JSFunction::kCodeEntryOffset; |
2861 Address code_entry = Memory::Address_at(code_entry_slot); | 2795 Address code_entry = Memory::Address_at(code_entry_slot); |
2862 | 2796 |
2863 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { | 2797 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { |
2864 SlotsBuffer::AddTo(&slots_buffer_allocator_, | 2798 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
2865 &migration_slots_buffer_, | 2799 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, |
2866 SlotsBuffer::CODE_ENTRY_SLOT, | |
2867 code_entry_slot, | |
2868 SlotsBuffer::IGNORE_OVERFLOW); | 2800 SlotsBuffer::IGNORE_OVERFLOW); |
2869 } | 2801 } |
2870 } else if (dst->IsConstantPoolArray()) { | 2802 } else if (dst->IsConstantPoolArray()) { |
2871 ConstantPoolArray* array = ConstantPoolArray::cast(dst); | 2803 ConstantPoolArray* array = ConstantPoolArray::cast(dst); |
2872 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); | 2804 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); |
2873 while (!code_iter.is_finished()) { | 2805 while (!code_iter.is_finished()) { |
2874 Address code_entry_slot = | 2806 Address code_entry_slot = |
2875 dst_addr + array->OffsetOfElementAt(code_iter.next_index()); | 2807 dst_addr + array->OffsetOfElementAt(code_iter.next_index()); |
2876 Address code_entry = Memory::Address_at(code_entry_slot); | 2808 Address code_entry = Memory::Address_at(code_entry_slot); |
2877 | 2809 |
2878 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { | 2810 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { |
2879 SlotsBuffer::AddTo(&slots_buffer_allocator_, | 2811 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
2880 &migration_slots_buffer_, | 2812 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, |
2881 SlotsBuffer::CODE_ENTRY_SLOT, | |
2882 code_entry_slot, | |
2883 SlotsBuffer::IGNORE_OVERFLOW); | 2813 SlotsBuffer::IGNORE_OVERFLOW); |
2884 } | 2814 } |
2885 } | 2815 } |
2886 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); | 2816 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); |
2887 while (!heap_iter.is_finished()) { | 2817 while (!heap_iter.is_finished()) { |
2888 Address heap_slot = | 2818 Address heap_slot = |
2889 dst_addr + array->OffsetOfElementAt(heap_iter.next_index()); | 2819 dst_addr + array->OffsetOfElementAt(heap_iter.next_index()); |
2890 Object* value = Memory::Object_at(heap_slot); | 2820 Object* value = Memory::Object_at(heap_slot); |
2891 RecordMigratedSlot(value, heap_slot); | 2821 RecordMigratedSlot(value, heap_slot); |
2892 } | 2822 } |
2893 } | 2823 } |
2894 } else if (dest == CODE_SPACE) { | 2824 } else if (dest == CODE_SPACE) { |
2895 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); | 2825 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); |
2896 heap()->MoveBlock(dst_addr, src_addr, size); | 2826 heap()->MoveBlock(dst_addr, src_addr, size); |
2897 SlotsBuffer::AddTo(&slots_buffer_allocator_, | 2827 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
2898 &migration_slots_buffer_, | 2828 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, |
2899 SlotsBuffer::RELOCATED_CODE_OBJECT, | |
2900 dst_addr, | |
2901 SlotsBuffer::IGNORE_OVERFLOW); | 2829 SlotsBuffer::IGNORE_OVERFLOW); |
2902 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2830 Code::cast(dst)->Relocate(dst_addr - src_addr); |
2903 } else { | 2831 } else { |
2904 DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE); | 2832 DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE); |
2905 heap()->MoveBlock(dst_addr, src_addr, size); | 2833 heap()->MoveBlock(dst_addr, src_addr, size); |
2906 } | 2834 } |
2907 heap()->OnMoveEvent(dst, src, size); | 2835 heap()->OnMoveEvent(dst, src, size); |
2908 Memory::Address_at(src_addr) = dst_addr; | 2836 Memory::Address_at(src_addr) = dst_addr; |
2909 } | 2837 } |
2910 | 2838 |
2911 | 2839 |
2912 // Visitor for updating pointers from live objects in old spaces to new space. | 2840 // Visitor for updating pointers from live objects in old spaces to new space. |
2913 // It does not expect to encounter pointers to dead objects. | 2841 // It does not expect to encounter pointers to dead objects. |
2914 class PointersUpdatingVisitor: public ObjectVisitor { | 2842 class PointersUpdatingVisitor : public ObjectVisitor { |
2915 public: | 2843 public: |
2916 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) { } | 2844 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) {} |
2917 | 2845 |
2918 void VisitPointer(Object** p) { | 2846 void VisitPointer(Object** p) { UpdatePointer(p); } |
2919 UpdatePointer(p); | |
2920 } | |
2921 | 2847 |
2922 void VisitPointers(Object** start, Object** end) { | 2848 void VisitPointers(Object** start, Object** end) { |
2923 for (Object** p = start; p < end; p++) UpdatePointer(p); | 2849 for (Object** p = start; p < end; p++) UpdatePointer(p); |
2924 } | 2850 } |
2925 | 2851 |
2926 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 2852 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
2927 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 2853 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
2928 Object* target = rinfo->target_object(); | 2854 Object* target = rinfo->target_object(); |
2929 Object* old_target = target; | 2855 Object* old_target = target; |
2930 VisitPointer(&target); | 2856 VisitPointer(&target); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2977 DCHECK(heap->InFromSpace(heap_obj) || | 2903 DCHECK(heap->InFromSpace(heap_obj) || |
2978 MarkCompactCollector::IsOnEvacuationCandidate(heap_obj)); | 2904 MarkCompactCollector::IsOnEvacuationCandidate(heap_obj)); |
2979 HeapObject* target = map_word.ToForwardingAddress(); | 2905 HeapObject* target = map_word.ToForwardingAddress(); |
2980 *slot = target; | 2906 *slot = target; |
2981 DCHECK(!heap->InFromSpace(target) && | 2907 DCHECK(!heap->InFromSpace(target) && |
2982 !MarkCompactCollector::IsOnEvacuationCandidate(target)); | 2908 !MarkCompactCollector::IsOnEvacuationCandidate(target)); |
2983 } | 2909 } |
2984 } | 2910 } |
2985 | 2911 |
2986 private: | 2912 private: |
2987 inline void UpdatePointer(Object** p) { | 2913 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); } |
2988 UpdateSlot(heap_, p); | |
2989 } | |
2990 | 2914 |
2991 Heap* heap_; | 2915 Heap* heap_; |
2992 }; | 2916 }; |
2993 | 2917 |
2994 | 2918 |
2995 static void UpdatePointer(HeapObject** address, HeapObject* object) { | 2919 static void UpdatePointer(HeapObject** address, HeapObject* object) { |
2996 Address new_addr = Memory::Address_at(object->address()); | 2920 Address new_addr = Memory::Address_at(object->address()); |
2997 | 2921 |
2998 // The new space sweep will overwrite the map word of dead objects | 2922 // The new space sweep will overwrite the map word of dead objects |
2999 // with NULL. In this case we do not need to transfer this entry to | 2923 // with NULL. In this case we do not need to transfer this entry to |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3036 int object_size) { | 2960 int object_size) { |
3037 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2961 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
3038 | 2962 |
3039 OldSpace* target_space = heap()->TargetSpace(object); | 2963 OldSpace* target_space = heap()->TargetSpace(object); |
3040 | 2964 |
3041 DCHECK(target_space == heap()->old_pointer_space() || | 2965 DCHECK(target_space == heap()->old_pointer_space() || |
3042 target_space == heap()->old_data_space()); | 2966 target_space == heap()->old_data_space()); |
3043 HeapObject* target; | 2967 HeapObject* target; |
3044 AllocationResult allocation = target_space->AllocateRaw(object_size); | 2968 AllocationResult allocation = target_space->AllocateRaw(object_size); |
3045 if (allocation.To(&target)) { | 2969 if (allocation.To(&target)) { |
3046 MigrateObject(target, | 2970 MigrateObject(target, object, object_size, target_space->identity()); |
3047 object, | |
3048 object_size, | |
3049 target_space->identity()); | |
3050 heap()->IncrementPromotedObjectsSize(object_size); | 2971 heap()->IncrementPromotedObjectsSize(object_size); |
3051 return true; | 2972 return true; |
3052 } | 2973 } |
3053 | 2974 |
3054 return false; | 2975 return false; |
3055 } | 2976 } |
3056 | 2977 |
3057 | 2978 |
3058 void MarkCompactCollector::EvacuateNewSpace() { | 2979 void MarkCompactCollector::EvacuateNewSpace() { |
3059 // There are soft limits in the allocation code, designed trigger a mark | 2980 // There are soft limits in the allocation code, designed trigger a mark |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3189 MapWord map_word = heap_object->map_word(); | 3110 MapWord map_word = heap_object->map_word(); |
3190 if (map_word.IsForwardingAddress()) { | 3111 if (map_word.IsForwardingAddress()) { |
3191 return map_word.ToForwardingAddress(); | 3112 return map_word.ToForwardingAddress(); |
3192 } | 3113 } |
3193 } | 3114 } |
3194 return object; | 3115 return object; |
3195 } | 3116 } |
3196 }; | 3117 }; |
3197 | 3118 |
3198 | 3119 |
3199 static inline void UpdateSlot(Isolate* isolate, | 3120 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, |
3200 ObjectVisitor* v, | 3121 SlotsBuffer::SlotType slot_type, Address addr) { |
3201 SlotsBuffer::SlotType slot_type, | |
3202 Address addr) { | |
3203 switch (slot_type) { | 3122 switch (slot_type) { |
3204 case SlotsBuffer::CODE_TARGET_SLOT: { | 3123 case SlotsBuffer::CODE_TARGET_SLOT: { |
3205 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); | 3124 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); |
3206 rinfo.Visit(isolate, v); | 3125 rinfo.Visit(isolate, v); |
3207 break; | 3126 break; |
3208 } | 3127 } |
3209 case SlotsBuffer::CODE_ENTRY_SLOT: { | 3128 case SlotsBuffer::CODE_ENTRY_SLOT: { |
3210 v->VisitCodeEntry(addr); | 3129 v->VisitCodeEntry(addr); |
3211 break; | 3130 break; |
3212 } | 3131 } |
(...skipping 17 matching lines...) Expand all Loading... |
3230 rinfo.Visit(isolate, v); | 3149 rinfo.Visit(isolate, v); |
3231 break; | 3150 break; |
3232 } | 3151 } |
3233 default: | 3152 default: |
3234 UNREACHABLE(); | 3153 UNREACHABLE(); |
3235 break; | 3154 break; |
3236 } | 3155 } |
3237 } | 3156 } |
3238 | 3157 |
3239 | 3158 |
3240 enum SweepingMode { | 3159 enum SweepingMode { SWEEP_ONLY, SWEEP_AND_VISIT_LIVE_OBJECTS }; |
3241 SWEEP_ONLY, | |
3242 SWEEP_AND_VISIT_LIVE_OBJECTS | |
3243 }; | |
3244 | 3160 |
3245 | 3161 |
3246 enum SkipListRebuildingMode { | 3162 enum SkipListRebuildingMode { REBUILD_SKIP_LIST, IGNORE_SKIP_LIST }; |
3247 REBUILD_SKIP_LIST, | |
3248 IGNORE_SKIP_LIST | |
3249 }; | |
3250 | 3163 |
3251 | 3164 |
3252 enum FreeSpaceTreatmentMode { | 3165 enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; |
3253 IGNORE_FREE_SPACE, | |
3254 ZAP_FREE_SPACE | |
3255 }; | |
3256 | 3166 |
3257 | 3167 |
3258 template<MarkCompactCollector::SweepingParallelism mode> | 3168 template <MarkCompactCollector::SweepingParallelism mode> |
3259 static intptr_t Free(PagedSpace* space, | 3169 static intptr_t Free(PagedSpace* space, FreeList* free_list, Address start, |
3260 FreeList* free_list, | |
3261 Address start, | |
3262 int size) { | 3170 int size) { |
3263 if (mode == MarkCompactCollector::SWEEP_ON_MAIN_THREAD) { | 3171 if (mode == MarkCompactCollector::SWEEP_ON_MAIN_THREAD) { |
3264 DCHECK(free_list == NULL); | 3172 DCHECK(free_list == NULL); |
3265 return space->Free(start, size); | 3173 return space->Free(start, size); |
3266 } else { | 3174 } else { |
3267 // TODO(hpayer): account for wasted bytes in concurrent sweeping too. | 3175 // TODO(hpayer): account for wasted bytes in concurrent sweeping too. |
3268 return size - free_list->Free(start, size); | 3176 return size - free_list->Free(start, size); |
3269 } | 3177 } |
3270 } | 3178 } |
3271 | 3179 |
3272 | 3180 |
3273 // Sweep a space precisely. After this has been done the space can | 3181 // Sweep a space precisely. After this has been done the space can |
3274 // be iterated precisely, hitting only the live objects. Code space | 3182 // be iterated precisely, hitting only the live objects. Code space |
3275 // is always swept precisely because we want to be able to iterate | 3183 // is always swept precisely because we want to be able to iterate |
3276 // over it. Map space is swept precisely, because it is not compacted. | 3184 // over it. Map space is swept precisely, because it is not compacted. |
3277 // Slots in live objects pointing into evacuation candidates are updated | 3185 // Slots in live objects pointing into evacuation candidates are updated |
3278 // if requested. | 3186 // if requested. |
3279 // Returns the size of the biggest continuous freed memory chunk in bytes. | 3187 // Returns the size of the biggest continuous freed memory chunk in bytes. |
3280 template<SweepingMode sweeping_mode, | 3188 template <SweepingMode sweeping_mode, |
3281 MarkCompactCollector::SweepingParallelism parallelism, | 3189 MarkCompactCollector::SweepingParallelism parallelism, |
3282 SkipListRebuildingMode skip_list_mode, | 3190 SkipListRebuildingMode skip_list_mode, |
3283 FreeSpaceTreatmentMode free_space_mode> | 3191 FreeSpaceTreatmentMode free_space_mode> |
3284 static int SweepPrecisely(PagedSpace* space, | 3192 static int SweepPrecisely(PagedSpace* space, FreeList* free_list, Page* p, |
3285 FreeList* free_list, | 3193 ObjectVisitor* v) { |
3286 Page* p, | |
3287 ObjectVisitor* v) { | |
3288 DCHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); | 3194 DCHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); |
3289 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, | 3195 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, |
3290 space->identity() == CODE_SPACE); | 3196 space->identity() == CODE_SPACE); |
3291 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); | 3197 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); |
3292 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD || | 3198 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD || |
3293 sweeping_mode == SWEEP_ONLY); | 3199 sweeping_mode == SWEEP_ONLY); |
3294 | 3200 |
3295 Address free_start = p->area_start(); | 3201 Address free_start = p->area_start(); |
3296 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); | 3202 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); |
3297 int offsets[16]; | 3203 int offsets[16]; |
3298 | 3204 |
3299 SkipList* skip_list = p->skip_list(); | 3205 SkipList* skip_list = p->skip_list(); |
3300 int curr_region = -1; | 3206 int curr_region = -1; |
3301 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { | 3207 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { |
3302 skip_list->Clear(); | 3208 skip_list->Clear(); |
3303 } | 3209 } |
3304 | 3210 |
3305 intptr_t freed_bytes = 0; | 3211 intptr_t freed_bytes = 0; |
3306 intptr_t max_freed_bytes = 0; | 3212 intptr_t max_freed_bytes = 0; |
3307 | 3213 |
3308 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { | 3214 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { |
3309 Address cell_base = it.CurrentCellBase(); | 3215 Address cell_base = it.CurrentCellBase(); |
3310 MarkBit::CellType* cell = it.CurrentCell(); | 3216 MarkBit::CellType* cell = it.CurrentCell(); |
3311 int live_objects = MarkWordToObjectStarts(*cell, offsets); | 3217 int live_objects = MarkWordToObjectStarts(*cell, offsets); |
3312 int live_index = 0; | 3218 int live_index = 0; |
3313 for ( ; live_objects != 0; live_objects--) { | 3219 for (; live_objects != 0; live_objects--) { |
3314 Address free_end = cell_base + offsets[live_index++] * kPointerSize; | 3220 Address free_end = cell_base + offsets[live_index++] * kPointerSize; |
3315 if (free_end != free_start) { | 3221 if (free_end != free_start) { |
3316 int size = static_cast<int>(free_end - free_start); | 3222 int size = static_cast<int>(free_end - free_start); |
3317 if (free_space_mode == ZAP_FREE_SPACE) { | 3223 if (free_space_mode == ZAP_FREE_SPACE) { |
3318 memset(free_start, 0xcc, size); | 3224 memset(free_start, 0xcc, size); |
3319 } | 3225 } |
3320 freed_bytes = Free<parallelism>(space, free_list, free_start, size); | 3226 freed_bytes = Free<parallelism>(space, free_list, free_start, size); |
3321 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3227 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
3322 #ifdef ENABLE_GDB_JIT_INTERFACE | 3228 #ifdef ENABLE_GDB_JIT_INTERFACE |
3323 if (FLAG_gdbjit && space->identity() == CODE_SPACE) { | 3229 if (FLAG_gdbjit && space->identity() == CODE_SPACE) { |
3324 GDBJITInterface::RemoveCodeRange(free_start, free_end); | 3230 GDBJITInterface::RemoveCodeRange(free_start, free_end); |
3325 } | 3231 } |
3326 #endif | 3232 #endif |
3327 } | 3233 } |
3328 HeapObject* live_object = HeapObject::FromAddress(free_end); | 3234 HeapObject* live_object = HeapObject::FromAddress(free_end); |
3329 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(live_object))); | 3235 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(live_object))); |
3330 Map* map = live_object->map(); | 3236 Map* map = live_object->map(); |
3331 int size = live_object->SizeFromMap(map); | 3237 int size = live_object->SizeFromMap(map); |
3332 if (sweeping_mode == SWEEP_AND_VISIT_LIVE_OBJECTS) { | 3238 if (sweeping_mode == SWEEP_AND_VISIT_LIVE_OBJECTS) { |
3333 live_object->IterateBody(map->instance_type(), size, v); | 3239 live_object->IterateBody(map->instance_type(), size, v); |
3334 } | 3240 } |
3335 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list != NULL) { | 3241 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list != NULL) { |
3336 int new_region_start = | 3242 int new_region_start = SkipList::RegionNumber(free_end); |
3337 SkipList::RegionNumber(free_end); | |
3338 int new_region_end = | 3243 int new_region_end = |
3339 SkipList::RegionNumber(free_end + size - kPointerSize); | 3244 SkipList::RegionNumber(free_end + size - kPointerSize); |
3340 if (new_region_start != curr_region || | 3245 if (new_region_start != curr_region || new_region_end != curr_region) { |
3341 new_region_end != curr_region) { | |
3342 skip_list->AddObject(free_end, size); | 3246 skip_list->AddObject(free_end, size); |
3343 curr_region = new_region_end; | 3247 curr_region = new_region_end; |
3344 } | 3248 } |
3345 } | 3249 } |
3346 free_start = free_end + size; | 3250 free_start = free_end + size; |
3347 } | 3251 } |
3348 // Clear marking bits for current cell. | 3252 // Clear marking bits for current cell. |
3349 *cell = 0; | 3253 *cell = 0; |
3350 } | 3254 } |
3351 if (free_start != p->area_end()) { | 3255 if (free_start != p->area_end()) { |
(...skipping 18 matching lines...) Expand all Loading... |
3370 } else { | 3274 } else { |
3371 p->MarkSweptPrecisely(); | 3275 p->MarkSweptPrecisely(); |
3372 } | 3276 } |
3373 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3277 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3374 } | 3278 } |
3375 | 3279 |
3376 | 3280 |
3377 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { | 3281 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { |
3378 Page* p = Page::FromAddress(code->address()); | 3282 Page* p = Page::FromAddress(code->address()); |
3379 | 3283 |
3380 if (p->IsEvacuationCandidate() || | 3284 if (p->IsEvacuationCandidate() || p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
3381 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | |
3382 return false; | 3285 return false; |
3383 } | 3286 } |
3384 | 3287 |
3385 Address code_start = code->address(); | 3288 Address code_start = code->address(); |
3386 Address code_end = code_start + code->Size(); | 3289 Address code_end = code_start + code->Size(); |
3387 | 3290 |
3388 uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); | 3291 uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); |
3389 uint32_t end_index = | 3292 uint32_t end_index = |
3390 MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); | 3293 MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); |
3391 | 3294 |
(...skipping 12 matching lines...) Expand all Loading... |
3404 if (start_cell == end_cell) { | 3307 if (start_cell == end_cell) { |
3405 *start_cell |= start_mask & end_mask; | 3308 *start_cell |= start_mask & end_mask; |
3406 } else { | 3309 } else { |
3407 *start_cell |= start_mask; | 3310 *start_cell |= start_mask; |
3408 for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { | 3311 for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { |
3409 *cell = ~0; | 3312 *cell = ~0; |
3410 } | 3313 } |
3411 *end_cell |= end_mask; | 3314 *end_cell |= end_mask; |
3412 } | 3315 } |
3413 } else { | 3316 } else { |
3414 for (MarkBit::CellType* cell = start_cell ; cell <= end_cell; cell++) { | 3317 for (MarkBit::CellType* cell = start_cell; cell <= end_cell; cell++) { |
3415 *cell = 0; | 3318 *cell = 0; |
3416 } | 3319 } |
3417 } | 3320 } |
3418 | 3321 |
3419 return true; | 3322 return true; |
3420 } | 3323 } |
3421 | 3324 |
3422 | 3325 |
3423 static bool IsOnInvalidatedCodeObject(Address addr) { | 3326 static bool IsOnInvalidatedCodeObject(Address addr) { |
3424 // We did not record any slots in large objects thus | 3327 // We did not record any slots in large objects thus |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3493 } | 3396 } |
3494 } | 3397 } |
3495 invalidated_code_.Rewind(0); | 3398 invalidated_code_.Rewind(0); |
3496 } | 3399 } |
3497 | 3400 |
3498 | 3401 |
3499 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3402 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
3500 Heap::RelocationLock relocation_lock(heap()); | 3403 Heap::RelocationLock relocation_lock(heap()); |
3501 | 3404 |
3502 bool code_slots_filtering_required; | 3405 bool code_slots_filtering_required; |
3503 { GCTracer::Scope gc_scope(heap()->tracer(), | 3406 { |
| 3407 GCTracer::Scope gc_scope(heap()->tracer(), |
3504 GCTracer::Scope::MC_SWEEP_NEWSPACE); | 3408 GCTracer::Scope::MC_SWEEP_NEWSPACE); |
3505 code_slots_filtering_required = MarkInvalidatedCode(); | 3409 code_slots_filtering_required = MarkInvalidatedCode(); |
3506 EvacuateNewSpace(); | 3410 EvacuateNewSpace(); |
3507 } | 3411 } |
3508 | 3412 |
3509 { GCTracer::Scope gc_scope(heap()->tracer(), | 3413 { |
| 3414 GCTracer::Scope gc_scope(heap()->tracer(), |
3510 GCTracer::Scope::MC_EVACUATE_PAGES); | 3415 GCTracer::Scope::MC_EVACUATE_PAGES); |
3511 EvacuatePages(); | 3416 EvacuatePages(); |
3512 } | 3417 } |
3513 | 3418 |
3514 // Second pass: find pointers to new space and update them. | 3419 // Second pass: find pointers to new space and update them. |
3515 PointersUpdatingVisitor updating_visitor(heap()); | 3420 PointersUpdatingVisitor updating_visitor(heap()); |
3516 | 3421 |
3517 { GCTracer::Scope gc_scope(heap()->tracer(), | 3422 { |
| 3423 GCTracer::Scope gc_scope(heap()->tracer(), |
3518 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); | 3424 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); |
3519 // Update pointers in to space. | 3425 // Update pointers in to space. |
3520 SemiSpaceIterator to_it(heap()->new_space()->bottom(), | 3426 SemiSpaceIterator to_it(heap()->new_space()->bottom(), |
3521 heap()->new_space()->top()); | 3427 heap()->new_space()->top()); |
3522 for (HeapObject* object = to_it.Next(); | 3428 for (HeapObject* object = to_it.Next(); object != NULL; |
3523 object != NULL; | |
3524 object = to_it.Next()) { | 3429 object = to_it.Next()) { |
3525 Map* map = object->map(); | 3430 Map* map = object->map(); |
3526 object->IterateBody(map->instance_type(), | 3431 object->IterateBody(map->instance_type(), object->SizeFromMap(map), |
3527 object->SizeFromMap(map), | |
3528 &updating_visitor); | 3432 &updating_visitor); |
3529 } | 3433 } |
3530 } | 3434 } |
3531 | 3435 |
3532 { GCTracer::Scope gc_scope(heap()->tracer(), | 3436 { |
| 3437 GCTracer::Scope gc_scope(heap()->tracer(), |
3533 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); | 3438 GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS); |
3534 // Update roots. | 3439 // Update roots. |
3535 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 3440 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); |
3536 } | 3441 } |
3537 | 3442 |
3538 { GCTracer::Scope gc_scope(heap()->tracer(), | 3443 { |
| 3444 GCTracer::Scope gc_scope(heap()->tracer(), |
3539 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); | 3445 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); |
3540 StoreBufferRebuildScope scope(heap_, | 3446 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), |
3541 heap_->store_buffer(), | |
3542 &Heap::ScavengeStoreBufferCallback); | 3447 &Heap::ScavengeStoreBufferCallback); |
3543 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps( | 3448 heap_->store_buffer()->IteratePointersToNewSpaceAndClearMaps( |
3544 &UpdatePointer); | 3449 &UpdatePointer); |
3545 } | 3450 } |
3546 | 3451 |
3547 { GCTracer::Scope gc_scope(heap()->tracer(), | 3452 { |
| 3453 GCTracer::Scope gc_scope(heap()->tracer(), |
3548 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); | 3454 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
3549 SlotsBuffer::UpdateSlotsRecordedIn(heap_, | 3455 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_, |
3550 migration_slots_buffer_, | |
3551 code_slots_filtering_required); | 3456 code_slots_filtering_required); |
3552 if (FLAG_trace_fragmentation) { | 3457 if (FLAG_trace_fragmentation) { |
3553 PrintF(" migration slots buffer: %d\n", | 3458 PrintF(" migration slots buffer: %d\n", |
3554 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); | 3459 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
3555 } | 3460 } |
3556 | 3461 |
3557 if (compacting_ && was_marked_incrementally_) { | 3462 if (compacting_ && was_marked_incrementally_) { |
3558 // It's difficult to filter out slots recorded for large objects. | 3463 // It's difficult to filter out slots recorded for large objects. |
3559 LargeObjectIterator it(heap_->lo_space()); | 3464 LargeObjectIterator it(heap_->lo_space()); |
3560 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 3465 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
3561 // LargeObjectSpace is not swept yet thus we have to skip | 3466 // LargeObjectSpace is not swept yet thus we have to skip |
3562 // dead objects explicitly. | 3467 // dead objects explicitly. |
3563 if (!IsMarked(obj)) continue; | 3468 if (!IsMarked(obj)) continue; |
3564 | 3469 |
3565 Page* p = Page::FromAddress(obj->address()); | 3470 Page* p = Page::FromAddress(obj->address()); |
3566 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 3471 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
3567 obj->Iterate(&updating_visitor); | 3472 obj->Iterate(&updating_visitor); |
3568 p->ClearFlag(Page::RESCAN_ON_EVACUATION); | 3473 p->ClearFlag(Page::RESCAN_ON_EVACUATION); |
3569 } | 3474 } |
3570 } | 3475 } |
3571 } | 3476 } |
3572 } | 3477 } |
3573 | 3478 |
3574 int npages = evacuation_candidates_.length(); | 3479 int npages = evacuation_candidates_.length(); |
3575 { GCTracer::Scope gc_scope( | 3480 { |
3576 heap()->tracer(), GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); | 3481 GCTracer::Scope gc_scope( |
| 3482 heap()->tracer(), |
| 3483 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); |
3577 for (int i = 0; i < npages; i++) { | 3484 for (int i = 0; i < npages; i++) { |
3578 Page* p = evacuation_candidates_[i]; | 3485 Page* p = evacuation_candidates_[i]; |
3579 DCHECK(p->IsEvacuationCandidate() || | 3486 DCHECK(p->IsEvacuationCandidate() || |
3580 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3487 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
3581 | 3488 |
3582 if (p->IsEvacuationCandidate()) { | 3489 if (p->IsEvacuationCandidate()) { |
3583 SlotsBuffer::UpdateSlotsRecordedIn(heap_, | 3490 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(), |
3584 p->slots_buffer(), | |
3585 code_slots_filtering_required); | 3491 code_slots_filtering_required); |
3586 if (FLAG_trace_fragmentation) { | 3492 if (FLAG_trace_fragmentation) { |
3587 PrintF(" page %p slots buffer: %d\n", | 3493 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), |
3588 reinterpret_cast<void*>(p), | |
3589 SlotsBuffer::SizeOfChain(p->slots_buffer())); | 3494 SlotsBuffer::SizeOfChain(p->slots_buffer())); |
3590 } | 3495 } |
3591 | 3496 |
3592 // Important: skip list should be cleared only after roots were updated | 3497 // Important: skip list should be cleared only after roots were updated |
3593 // because root iteration traverses the stack and might have to find | 3498 // because root iteration traverses the stack and might have to find |
3594 // code objects from non-updated pc pointing into evacuation candidate. | 3499 // code objects from non-updated pc pointing into evacuation candidate. |
3595 SkipList* list = p->skip_list(); | 3500 SkipList* list = p->skip_list(); |
3596 if (list != NULL) list->Clear(); | 3501 if (list != NULL) list->Clear(); |
3597 } else { | 3502 } else { |
3598 if (FLAG_gc_verbose) { | 3503 if (FLAG_gc_verbose) { |
3599 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", | 3504 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", |
3600 reinterpret_cast<intptr_t>(p)); | 3505 reinterpret_cast<intptr_t>(p)); |
3601 } | 3506 } |
3602 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3507 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
3603 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 3508 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
3604 | 3509 |
3605 switch (space->identity()) { | 3510 switch (space->identity()) { |
3606 case OLD_DATA_SPACE: | 3511 case OLD_DATA_SPACE: |
3607 SweepConservatively<SWEEP_ON_MAIN_THREAD>(space, NULL, p); | 3512 SweepConservatively<SWEEP_ON_MAIN_THREAD>(space, NULL, p); |
3608 break; | 3513 break; |
3609 case OLD_POINTER_SPACE: | 3514 case OLD_POINTER_SPACE: |
3610 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, | 3515 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
3611 SWEEP_ON_MAIN_THREAD, | 3516 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>( |
3612 IGNORE_SKIP_LIST, | |
3613 IGNORE_FREE_SPACE>( | |
3614 space, NULL, p, &updating_visitor); | 3517 space, NULL, p, &updating_visitor); |
3615 break; | 3518 break; |
3616 case CODE_SPACE: | 3519 case CODE_SPACE: |
3617 if (FLAG_zap_code_space) { | 3520 if (FLAG_zap_code_space) { |
3618 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, | 3521 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
3619 SWEEP_ON_MAIN_THREAD, | 3522 REBUILD_SKIP_LIST, ZAP_FREE_SPACE>( |
3620 REBUILD_SKIP_LIST, | |
3621 ZAP_FREE_SPACE>( | |
3622 space, NULL, p, &updating_visitor); | 3523 space, NULL, p, &updating_visitor); |
3623 } else { | 3524 } else { |
3624 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, | 3525 SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
3625 SWEEP_ON_MAIN_THREAD, | 3526 REBUILD_SKIP_LIST, IGNORE_FREE_SPACE>( |
3626 REBUILD_SKIP_LIST, | |
3627 IGNORE_FREE_SPACE>( | |
3628 space, NULL, p, &updating_visitor); | 3527 space, NULL, p, &updating_visitor); |
3629 } | 3528 } |
3630 break; | 3529 break; |
3631 default: | 3530 default: |
3632 UNREACHABLE(); | 3531 UNREACHABLE(); |
3633 break; | 3532 break; |
3634 } | 3533 } |
3635 } | 3534 } |
3636 } | 3535 } |
3637 } | 3536 } |
3638 | 3537 |
3639 GCTracer::Scope gc_scope(heap()->tracer(), | 3538 GCTracer::Scope gc_scope(heap()->tracer(), |
3640 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); | 3539 GCTracer::Scope::MC_UPDATE_MISC_POINTERS); |
3641 | 3540 |
3642 // Update pointers from cells. | 3541 // Update pointers from cells. |
3643 HeapObjectIterator cell_iterator(heap_->cell_space()); | 3542 HeapObjectIterator cell_iterator(heap_->cell_space()); |
3644 for (HeapObject* cell = cell_iterator.Next(); | 3543 for (HeapObject* cell = cell_iterator.Next(); cell != NULL; |
3645 cell != NULL; | |
3646 cell = cell_iterator.Next()) { | 3544 cell = cell_iterator.Next()) { |
3647 if (cell->IsCell()) { | 3545 if (cell->IsCell()) { |
3648 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor); | 3546 Cell::BodyDescriptor::IterateBody(cell, &updating_visitor); |
3649 } | 3547 } |
3650 } | 3548 } |
3651 | 3549 |
3652 HeapObjectIterator js_global_property_cell_iterator( | 3550 HeapObjectIterator js_global_property_cell_iterator( |
3653 heap_->property_cell_space()); | 3551 heap_->property_cell_space()); |
3654 for (HeapObject* cell = js_global_property_cell_iterator.Next(); | 3552 for (HeapObject* cell = js_global_property_cell_iterator.Next(); cell != NULL; |
3655 cell != NULL; | |
3656 cell = js_global_property_cell_iterator.Next()) { | 3553 cell = js_global_property_cell_iterator.Next()) { |
3657 if (cell->IsPropertyCell()) { | 3554 if (cell->IsPropertyCell()) { |
3658 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); | 3555 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); |
3659 } | 3556 } |
3660 } | 3557 } |
3661 | 3558 |
3662 heap_->string_table()->Iterate(&updating_visitor); | 3559 heap_->string_table()->Iterate(&updating_visitor); |
3663 updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address()); | 3560 updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address()); |
3664 if (heap_->weak_object_to_code_table()->IsHashTable()) { | 3561 if (heap_->weak_object_to_code_table()->IsHashTable()) { |
3665 WeakHashTable* table = | 3562 WeakHashTable* table = |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3726 // Mark-bit to object start offset table. | 3623 // Mark-bit to object start offset table. |
3727 // | 3624 // |
3728 // The line is indexed by the mark bits in a byte. The first number on | 3625 // The line is indexed by the mark bits in a byte. The first number on |
3729 // the line describes the number of live object starts for the line and the | 3626 // the line describes the number of live object starts for the line and the |
3730 // other numbers on the line describe the offsets (in words) of the object | 3627 // other numbers on the line describe the offsets (in words) of the object |
3731 // starts. | 3628 // starts. |
3732 // | 3629 // |
3733 // Since objects are at least 2 words large we don't have entries for two | 3630 // Since objects are at least 2 words large we don't have entries for two |
3734 // consecutive 1 bits. All entries after 170 have at least 2 consecutive bits. | 3631 // consecutive 1 bits. All entries after 170 have at least 2 consecutive bits. |
3735 char kStartTable[kStartTableLines * kStartTableEntriesPerLine] = { | 3632 char kStartTable[kStartTableLines * kStartTableEntriesPerLine] = { |
3736 0, _, _, _, _, // 0 | 3633 0, _, _, |
3737 1, 0, _, _, _, // 1 | 3634 _, _, // 0 |
3738 1, 1, _, _, _, // 2 | 3635 1, 0, _, |
3739 X, _, _, _, _, // 3 | 3636 _, _, // 1 |
3740 1, 2, _, _, _, // 4 | 3637 1, 1, _, |
3741 2, 0, 2, _, _, // 5 | 3638 _, _, // 2 |
3742 X, _, _, _, _, // 6 | 3639 X, _, _, |
3743 X, _, _, _, _, // 7 | 3640 _, _, // 3 |
3744 1, 3, _, _, _, // 8 | 3641 1, 2, _, |
3745 2, 0, 3, _, _, // 9 | 3642 _, _, // 4 |
3746 2, 1, 3, _, _, // 10 | 3643 2, 0, 2, |
3747 X, _, _, _, _, // 11 | 3644 _, _, // 5 |
3748 X, _, _, _, _, // 12 | 3645 X, _, _, |
3749 X, _, _, _, _, // 13 | 3646 _, _, // 6 |
3750 X, _, _, _, _, // 14 | 3647 X, _, _, |
3751 X, _, _, _, _, // 15 | 3648 _, _, // 7 |
3752 1, 4, _, _, _, // 16 | 3649 1, 3, _, |
3753 2, 0, 4, _, _, // 17 | 3650 _, _, // 8 |
3754 2, 1, 4, _, _, // 18 | 3651 2, 0, 3, |
3755 X, _, _, _, _, // 19 | 3652 _, _, // 9 |
3756 2, 2, 4, _, _, // 20 | 3653 2, 1, 3, |
3757 3, 0, 2, 4, _, // 21 | 3654 _, _, // 10 |
3758 X, _, _, _, _, // 22 | 3655 X, _, _, |
3759 X, _, _, _, _, // 23 | 3656 _, _, // 11 |
3760 X, _, _, _, _, // 24 | 3657 X, _, _, |
3761 X, _, _, _, _, // 25 | 3658 _, _, // 12 |
3762 X, _, _, _, _, // 26 | 3659 X, _, _, |
3763 X, _, _, _, _, // 27 | 3660 _, _, // 13 |
3764 X, _, _, _, _, // 28 | 3661 X, _, _, |
3765 X, _, _, _, _, // 29 | 3662 _, _, // 14 |
3766 X, _, _, _, _, // 30 | 3663 X, _, _, |
3767 X, _, _, _, _, // 31 | 3664 _, _, // 15 |
3768 1, 5, _, _, _, // 32 | 3665 1, 4, _, |
3769 2, 0, 5, _, _, // 33 | 3666 _, _, // 16 |
3770 2, 1, 5, _, _, // 34 | 3667 2, 0, 4, |
3771 X, _, _, _, _, // 35 | 3668 _, _, // 17 |
3772 2, 2, 5, _, _, // 36 | 3669 2, 1, 4, |
3773 3, 0, 2, 5, _, // 37 | 3670 _, _, // 18 |
3774 X, _, _, _, _, // 38 | 3671 X, _, _, |
3775 X, _, _, _, _, // 39 | 3672 _, _, // 19 |
3776 2, 3, 5, _, _, // 40 | 3673 2, 2, 4, |
3777 3, 0, 3, 5, _, // 41 | 3674 _, _, // 20 |
3778 3, 1, 3, 5, _, // 42 | 3675 3, 0, 2, |
3779 X, _, _, _, _, // 43 | 3676 4, _, // 21 |
3780 X, _, _, _, _, // 44 | 3677 X, _, _, |
3781 X, _, _, _, _, // 45 | 3678 _, _, // 22 |
3782 X, _, _, _, _, // 46 | 3679 X, _, _, |
3783 X, _, _, _, _, // 47 | 3680 _, _, // 23 |
3784 X, _, _, _, _, // 48 | 3681 X, _, _, |
3785 X, _, _, _, _, // 49 | 3682 _, _, // 24 |
3786 X, _, _, _, _, // 50 | 3683 X, _, _, |
3787 X, _, _, _, _, // 51 | 3684 _, _, // 25 |
3788 X, _, _, _, _, // 52 | 3685 X, _, _, |
3789 X, _, _, _, _, // 53 | 3686 _, _, // 26 |
3790 X, _, _, _, _, // 54 | 3687 X, _, _, |
3791 X, _, _, _, _, // 55 | 3688 _, _, // 27 |
3792 X, _, _, _, _, // 56 | 3689 X, _, _, |
3793 X, _, _, _, _, // 57 | 3690 _, _, // 28 |
3794 X, _, _, _, _, // 58 | 3691 X, _, _, |
3795 X, _, _, _, _, // 59 | 3692 _, _, // 29 |
3796 X, _, _, _, _, // 60 | 3693 X, _, _, |
3797 X, _, _, _, _, // 61 | 3694 _, _, // 30 |
3798 X, _, _, _, _, // 62 | 3695 X, _, _, |
3799 X, _, _, _, _, // 63 | 3696 _, _, // 31 |
3800 1, 6, _, _, _, // 64 | 3697 1, 5, _, |
3801 2, 0, 6, _, _, // 65 | 3698 _, _, // 32 |
3802 2, 1, 6, _, _, // 66 | 3699 2, 0, 5, |
3803 X, _, _, _, _, // 67 | 3700 _, _, // 33 |
3804 2, 2, 6, _, _, // 68 | 3701 2, 1, 5, |
3805 3, 0, 2, 6, _, // 69 | 3702 _, _, // 34 |
3806 X, _, _, _, _, // 70 | 3703 X, _, _, |
3807 X, _, _, _, _, // 71 | 3704 _, _, // 35 |
3808 2, 3, 6, _, _, // 72 | 3705 2, 2, 5, |
3809 3, 0, 3, 6, _, // 73 | 3706 _, _, // 36 |
3810 3, 1, 3, 6, _, // 74 | 3707 3, 0, 2, |
3811 X, _, _, _, _, // 75 | 3708 5, _, // 37 |
3812 X, _, _, _, _, // 76 | 3709 X, _, _, |
3813 X, _, _, _, _, // 77 | 3710 _, _, // 38 |
3814 X, _, _, _, _, // 78 | 3711 X, _, _, |
3815 X, _, _, _, _, // 79 | 3712 _, _, // 39 |
3816 2, 4, 6, _, _, // 80 | 3713 2, 3, 5, |
3817 3, 0, 4, 6, _, // 81 | 3714 _, _, // 40 |
3818 3, 1, 4, 6, _, // 82 | 3715 3, 0, 3, |
3819 X, _, _, _, _, // 83 | 3716 5, _, // 41 |
3820 3, 2, 4, 6, _, // 84 | 3717 3, 1, 3, |
3821 4, 0, 2, 4, 6, // 85 | 3718 5, _, // 42 |
3822 X, _, _, _, _, // 86 | 3719 X, _, _, |
3823 X, _, _, _, _, // 87 | 3720 _, _, // 43 |
3824 X, _, _, _, _, // 88 | 3721 X, _, _, |
3825 X, _, _, _, _, // 89 | 3722 _, _, // 44 |
3826 X, _, _, _, _, // 90 | 3723 X, _, _, |
3827 X, _, _, _, _, // 91 | 3724 _, _, // 45 |
3828 X, _, _, _, _, // 92 | 3725 X, _, _, |
3829 X, _, _, _, _, // 93 | 3726 _, _, // 46 |
3830 X, _, _, _, _, // 94 | 3727 X, _, _, |
3831 X, _, _, _, _, // 95 | 3728 _, _, // 47 |
3832 X, _, _, _, _, // 96 | 3729 X, _, _, |
3833 X, _, _, _, _, // 97 | 3730 _, _, // 48 |
3834 X, _, _, _, _, // 98 | 3731 X, _, _, |
3835 X, _, _, _, _, // 99 | 3732 _, _, // 49 |
3836 X, _, _, _, _, // 100 | 3733 X, _, _, |
3837 X, _, _, _, _, // 101 | 3734 _, _, // 50 |
3838 X, _, _, _, _, // 102 | 3735 X, _, _, |
3839 X, _, _, _, _, // 103 | 3736 _, _, // 51 |
3840 X, _, _, _, _, // 104 | 3737 X, _, _, |
3841 X, _, _, _, _, // 105 | 3738 _, _, // 52 |
3842 X, _, _, _, _, // 106 | 3739 X, _, _, |
3843 X, _, _, _, _, // 107 | 3740 _, _, // 53 |
3844 X, _, _, _, _, // 108 | 3741 X, _, _, |
3845 X, _, _, _, _, // 109 | 3742 _, _, // 54 |
3846 X, _, _, _, _, // 110 | 3743 X, _, _, |
3847 X, _, _, _, _, // 111 | 3744 _, _, // 55 |
3848 X, _, _, _, _, // 112 | 3745 X, _, _, |
3849 X, _, _, _, _, // 113 | 3746 _, _, // 56 |
3850 X, _, _, _, _, // 114 | 3747 X, _, _, |
3851 X, _, _, _, _, // 115 | 3748 _, _, // 57 |
3852 X, _, _, _, _, // 116 | 3749 X, _, _, |
3853 X, _, _, _, _, // 117 | 3750 _, _, // 58 |
3854 X, _, _, _, _, // 118 | 3751 X, _, _, |
3855 X, _, _, _, _, // 119 | 3752 _, _, // 59 |
3856 X, _, _, _, _, // 120 | 3753 X, _, _, |
3857 X, _, _, _, _, // 121 | 3754 _, _, // 60 |
3858 X, _, _, _, _, // 122 | 3755 X, _, _, |
3859 X, _, _, _, _, // 123 | 3756 _, _, // 61 |
3860 X, _, _, _, _, // 124 | 3757 X, _, _, |
3861 X, _, _, _, _, // 125 | 3758 _, _, // 62 |
3862 X, _, _, _, _, // 126 | 3759 X, _, _, |
3863 X, _, _, _, _, // 127 | 3760 _, _, // 63 |
3864 1, 7, _, _, _, // 128 | 3761 1, 6, _, |
3865 2, 0, 7, _, _, // 129 | 3762 _, _, // 64 |
3866 2, 1, 7, _, _, // 130 | 3763 2, 0, 6, |
3867 X, _, _, _, _, // 131 | 3764 _, _, // 65 |
3868 2, 2, 7, _, _, // 132 | 3765 2, 1, 6, |
3869 3, 0, 2, 7, _, // 133 | 3766 _, _, // 66 |
3870 X, _, _, _, _, // 134 | 3767 X, _, _, |
3871 X, _, _, _, _, // 135 | 3768 _, _, // 67 |
3872 2, 3, 7, _, _, // 136 | 3769 2, 2, 6, |
3873 3, 0, 3, 7, _, // 137 | 3770 _, _, // 68 |
3874 3, 1, 3, 7, _, // 138 | 3771 3, 0, 2, |
3875 X, _, _, _, _, // 139 | 3772 6, _, // 69 |
3876 X, _, _, _, _, // 140 | 3773 X, _, _, |
3877 X, _, _, _, _, // 141 | 3774 _, _, // 70 |
3878 X, _, _, _, _, // 142 | 3775 X, _, _, |
3879 X, _, _, _, _, // 143 | 3776 _, _, // 71 |
3880 2, 4, 7, _, _, // 144 | 3777 2, 3, 6, |
3881 3, 0, 4, 7, _, // 145 | 3778 _, _, // 72 |
3882 3, 1, 4, 7, _, // 146 | 3779 3, 0, 3, |
3883 X, _, _, _, _, // 147 | 3780 6, _, // 73 |
3884 3, 2, 4, 7, _, // 148 | 3781 3, 1, 3, |
3885 4, 0, 2, 4, 7, // 149 | 3782 6, _, // 74 |
3886 X, _, _, _, _, // 150 | 3783 X, _, _, |
3887 X, _, _, _, _, // 151 | 3784 _, _, // 75 |
3888 X, _, _, _, _, // 152 | 3785 X, _, _, |
3889 X, _, _, _, _, // 153 | 3786 _, _, // 76 |
3890 X, _, _, _, _, // 154 | 3787 X, _, _, |
3891 X, _, _, _, _, // 155 | 3788 _, _, // 77 |
3892 X, _, _, _, _, // 156 | 3789 X, _, _, |
3893 X, _, _, _, _, // 157 | 3790 _, _, // 78 |
3894 X, _, _, _, _, // 158 | 3791 X, _, _, |
3895 X, _, _, _, _, // 159 | 3792 _, _, // 79 |
3896 2, 5, 7, _, _, // 160 | 3793 2, 4, 6, |
3897 3, 0, 5, 7, _, // 161 | 3794 _, _, // 80 |
3898 3, 1, 5, 7, _, // 162 | 3795 3, 0, 4, |
3899 X, _, _, _, _, // 163 | 3796 6, _, // 81 |
3900 3, 2, 5, 7, _, // 164 | 3797 3, 1, 4, |
3901 4, 0, 2, 5, 7, // 165 | 3798 6, _, // 82 |
3902 X, _, _, _, _, // 166 | 3799 X, _, _, |
3903 X, _, _, _, _, // 167 | 3800 _, _, // 83 |
3904 3, 3, 5, 7, _, // 168 | 3801 3, 2, 4, |
3905 4, 0, 3, 5, 7, // 169 | 3802 6, _, // 84 |
3906 4, 1, 3, 5, 7 // 170 | 3803 4, 0, 2, |
| 3804 4, 6, // 85 |
| 3805 X, _, _, |
| 3806 _, _, // 86 |
| 3807 X, _, _, |
| 3808 _, _, // 87 |
| 3809 X, _, _, |
| 3810 _, _, // 88 |
| 3811 X, _, _, |
| 3812 _, _, // 89 |
| 3813 X, _, _, |
| 3814 _, _, // 90 |
| 3815 X, _, _, |
| 3816 _, _, // 91 |
| 3817 X, _, _, |
| 3818 _, _, // 92 |
| 3819 X, _, _, |
| 3820 _, _, // 93 |
| 3821 X, _, _, |
| 3822 _, _, // 94 |
| 3823 X, _, _, |
| 3824 _, _, // 95 |
| 3825 X, _, _, |
| 3826 _, _, // 96 |
| 3827 X, _, _, |
| 3828 _, _, // 97 |
| 3829 X, _, _, |
| 3830 _, _, // 98 |
| 3831 X, _, _, |
| 3832 _, _, // 99 |
| 3833 X, _, _, |
| 3834 _, _, // 100 |
| 3835 X, _, _, |
| 3836 _, _, // 101 |
| 3837 X, _, _, |
| 3838 _, _, // 102 |
| 3839 X, _, _, |
| 3840 _, _, // 103 |
| 3841 X, _, _, |
| 3842 _, _, // 104 |
| 3843 X, _, _, |
| 3844 _, _, // 105 |
| 3845 X, _, _, |
| 3846 _, _, // 106 |
| 3847 X, _, _, |
| 3848 _, _, // 107 |
| 3849 X, _, _, |
| 3850 _, _, // 108 |
| 3851 X, _, _, |
| 3852 _, _, // 109 |
| 3853 X, _, _, |
| 3854 _, _, // 110 |
| 3855 X, _, _, |
| 3856 _, _, // 111 |
| 3857 X, _, _, |
| 3858 _, _, // 112 |
| 3859 X, _, _, |
| 3860 _, _, // 113 |
| 3861 X, _, _, |
| 3862 _, _, // 114 |
| 3863 X, _, _, |
| 3864 _, _, // 115 |
| 3865 X, _, _, |
| 3866 _, _, // 116 |
| 3867 X, _, _, |
| 3868 _, _, // 117 |
| 3869 X, _, _, |
| 3870 _, _, // 118 |
| 3871 X, _, _, |
| 3872 _, _, // 119 |
| 3873 X, _, _, |
| 3874 _, _, // 120 |
| 3875 X, _, _, |
| 3876 _, _, // 121 |
| 3877 X, _, _, |
| 3878 _, _, // 122 |
| 3879 X, _, _, |
| 3880 _, _, // 123 |
| 3881 X, _, _, |
| 3882 _, _, // 124 |
| 3883 X, _, _, |
| 3884 _, _, // 125 |
| 3885 X, _, _, |
| 3886 _, _, // 126 |
| 3887 X, _, _, |
| 3888 _, _, // 127 |
| 3889 1, 7, _, |
| 3890 _, _, // 128 |
| 3891 2, 0, 7, |
| 3892 _, _, // 129 |
| 3893 2, 1, 7, |
| 3894 _, _, // 130 |
| 3895 X, _, _, |
| 3896 _, _, // 131 |
| 3897 2, 2, 7, |
| 3898 _, _, // 132 |
| 3899 3, 0, 2, |
| 3900 7, _, // 133 |
| 3901 X, _, _, |
| 3902 _, _, // 134 |
| 3903 X, _, _, |
| 3904 _, _, // 135 |
| 3905 2, 3, 7, |
| 3906 _, _, // 136 |
| 3907 3, 0, 3, |
| 3908 7, _, // 137 |
| 3909 3, 1, 3, |
| 3910 7, _, // 138 |
| 3911 X, _, _, |
| 3912 _, _, // 139 |
| 3913 X, _, _, |
| 3914 _, _, // 140 |
| 3915 X, _, _, |
| 3916 _, _, // 141 |
| 3917 X, _, _, |
| 3918 _, _, // 142 |
| 3919 X, _, _, |
| 3920 _, _, // 143 |
| 3921 2, 4, 7, |
| 3922 _, _, // 144 |
| 3923 3, 0, 4, |
| 3924 7, _, // 145 |
| 3925 3, 1, 4, |
| 3926 7, _, // 146 |
| 3927 X, _, _, |
| 3928 _, _, // 147 |
| 3929 3, 2, 4, |
| 3930 7, _, // 148 |
| 3931 4, 0, 2, |
| 3932 4, 7, // 149 |
| 3933 X, _, _, |
| 3934 _, _, // 150 |
| 3935 X, _, _, |
| 3936 _, _, // 151 |
| 3937 X, _, _, |
| 3938 _, _, // 152 |
| 3939 X, _, _, |
| 3940 _, _, // 153 |
| 3941 X, _, _, |
| 3942 _, _, // 154 |
| 3943 X, _, _, |
| 3944 _, _, // 155 |
| 3945 X, _, _, |
| 3946 _, _, // 156 |
| 3947 X, _, _, |
| 3948 _, _, // 157 |
| 3949 X, _, _, |
| 3950 _, _, // 158 |
| 3951 X, _, _, |
| 3952 _, _, // 159 |
| 3953 2, 5, 7, |
| 3954 _, _, // 160 |
| 3955 3, 0, 5, |
| 3956 7, _, // 161 |
| 3957 3, 1, 5, |
| 3958 7, _, // 162 |
| 3959 X, _, _, |
| 3960 _, _, // 163 |
| 3961 3, 2, 5, |
| 3962 7, _, // 164 |
| 3963 4, 0, 2, |
| 3964 5, 7, // 165 |
| 3965 X, _, _, |
| 3966 _, _, // 166 |
| 3967 X, _, _, |
| 3968 _, _, // 167 |
| 3969 3, 3, 5, |
| 3970 7, _, // 168 |
| 3971 4, 0, 3, |
| 3972 5, 7, // 169 |
| 3973 4, 1, 3, |
| 3974 5, 7 // 170 |
3907 }; | 3975 }; |
3908 #undef _ | 3976 #undef _ |
3909 #undef X | 3977 #undef X |
3910 | 3978 |
3911 | 3979 |
3912 // Takes a word of mark bits. Returns the number of objects that start in the | 3980 // Takes a word of mark bits. Returns the number of objects that start in the |
3913 // range. Puts the offsets of the words in the supplied array. | 3981 // range. Puts the offsets of the words in the supplied array. |
3914 static inline int MarkWordToObjectStarts(uint32_t mark_bits, int* starts) { | 3982 static inline int MarkWordToObjectStarts(uint32_t mark_bits, int* starts) { |
3915 int objects = 0; | 3983 int objects = 0; |
3916 int offset = 0; | 3984 int offset = 0; |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3987 DCHECK((first_set_bit & cell) == first_set_bit); | 4055 DCHECK((first_set_bit & cell) == first_set_bit); |
3988 int live_objects = MarkWordToObjectStarts(first_set_bit, offsets); | 4056 int live_objects = MarkWordToObjectStarts(first_set_bit, offsets); |
3989 DCHECK(live_objects == 1); | 4057 DCHECK(live_objects == 1); |
3990 USE(live_objects); | 4058 USE(live_objects); |
3991 return block_address + offsets[0] * kPointerSize; | 4059 return block_address + offsets[0] * kPointerSize; |
3992 } | 4060 } |
3993 | 4061 |
3994 | 4062 |
3995 // Force instantiation of templatized SweepConservatively method for | 4063 // Force instantiation of templatized SweepConservatively method for |
3996 // SWEEP_ON_MAIN_THREAD mode. | 4064 // SWEEP_ON_MAIN_THREAD mode. |
3997 template int MarkCompactCollector:: | 4065 template int MarkCompactCollector::SweepConservatively< |
3998 SweepConservatively<MarkCompactCollector::SWEEP_ON_MAIN_THREAD>( | 4066 MarkCompactCollector::SWEEP_ON_MAIN_THREAD>(PagedSpace*, FreeList*, Page*); |
3999 PagedSpace*, FreeList*, Page*); | |
4000 | 4067 |
4001 | 4068 |
4002 // Force instantiation of templatized SweepConservatively method for | 4069 // Force instantiation of templatized SweepConservatively method for |
4003 // SWEEP_IN_PARALLEL mode. | 4070 // SWEEP_IN_PARALLEL mode. |
4004 template int MarkCompactCollector:: | 4071 template int MarkCompactCollector::SweepConservatively< |
4005 SweepConservatively<MarkCompactCollector::SWEEP_IN_PARALLEL>( | 4072 MarkCompactCollector::SWEEP_IN_PARALLEL>(PagedSpace*, FreeList*, Page*); |
4006 PagedSpace*, FreeList*, Page*); | |
4007 | 4073 |
4008 | 4074 |
4009 // Sweeps a space conservatively. After this has been done the larger free | 4075 // Sweeps a space conservatively. After this has been done the larger free |
4010 // spaces have been put on the free list and the smaller ones have been | 4076 // spaces have been put on the free list and the smaller ones have been |
4011 // ignored and left untouched. A free space is always either ignored or put | 4077 // ignored and left untouched. A free space is always either ignored or put |
4012 // on the free list, never split up into two parts. This is important | 4078 // on the free list, never split up into two parts. This is important |
4013 // because it means that any FreeSpace maps left actually describe a region of | 4079 // because it means that any FreeSpace maps left actually describe a region of |
4014 // memory that can be ignored when scanning. Dead objects other than free | 4080 // memory that can be ignored when scanning. Dead objects other than free |
4015 // spaces will not contain the free space map. | 4081 // spaces will not contain the free space map. |
4016 template<MarkCompactCollector::SweepingParallelism mode> | 4082 template <MarkCompactCollector::SweepingParallelism mode> |
4017 int MarkCompactCollector::SweepConservatively(PagedSpace* space, | 4083 int MarkCompactCollector::SweepConservatively(PagedSpace* space, |
4018 FreeList* free_list, | 4084 FreeList* free_list, Page* p) { |
4019 Page* p) { | |
4020 DCHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); | 4085 DCHECK(!p->IsEvacuationCandidate() && !p->WasSwept()); |
4021 DCHECK((mode == MarkCompactCollector::SWEEP_IN_PARALLEL && | 4086 DCHECK( |
4022 free_list != NULL) || | 4087 (mode == MarkCompactCollector::SWEEP_IN_PARALLEL && free_list != NULL) || |
4023 (mode == MarkCompactCollector::SWEEP_ON_MAIN_THREAD && | 4088 (mode == MarkCompactCollector::SWEEP_ON_MAIN_THREAD && |
4024 free_list == NULL)); | 4089 free_list == NULL)); |
4025 | 4090 |
4026 intptr_t freed_bytes = 0; | 4091 intptr_t freed_bytes = 0; |
4027 intptr_t max_freed_bytes = 0; | 4092 intptr_t max_freed_bytes = 0; |
4028 size_t size = 0; | 4093 size_t size = 0; |
4029 | 4094 |
4030 // Skip over all the dead objects at the start of the page and mark them free. | 4095 // Skip over all the dead objects at the start of the page and mark them free. |
4031 Address cell_base = 0; | 4096 Address cell_base = 0; |
4032 MarkBit::CellType* cell = NULL; | 4097 MarkBit::CellType* cell = NULL; |
4033 MarkBitCellIterator it(p); | 4098 MarkBitCellIterator it(p); |
4034 for (; !it.Done(); it.Advance()) { | 4099 for (; !it.Done(); it.Advance()) { |
4035 cell_base = it.CurrentCellBase(); | 4100 cell_base = it.CurrentCellBase(); |
4036 cell = it.CurrentCell(); | 4101 cell = it.CurrentCell(); |
4037 if (*cell != 0) break; | 4102 if (*cell != 0) break; |
4038 } | 4103 } |
4039 | 4104 |
4040 if (it.Done()) { | 4105 if (it.Done()) { |
4041 size = p->area_end() - p->area_start(); | 4106 size = p->area_end() - p->area_start(); |
4042 freed_bytes = Free<mode>(space, free_list, p->area_start(), | 4107 freed_bytes = |
4043 static_cast<int>(size)); | 4108 Free<mode>(space, free_list, p->area_start(), static_cast<int>(size)); |
4044 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 4109 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
4045 DCHECK_EQ(0, p->LiveBytes()); | 4110 DCHECK_EQ(0, p->LiveBytes()); |
4046 if (mode == MarkCompactCollector::SWEEP_IN_PARALLEL) { | 4111 if (mode == MarkCompactCollector::SWEEP_IN_PARALLEL) { |
4047 // When concurrent sweeping is active, the page will be marked after | 4112 // When concurrent sweeping is active, the page will be marked after |
4048 // sweeping by the main thread. | 4113 // sweeping by the main thread. |
4049 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); | 4114 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); |
4050 } else { | 4115 } else { |
4051 p->MarkSweptConservatively(); | 4116 p->MarkSweptConservatively(); |
4052 } | 4117 } |
4053 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 4118 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
4054 } | 4119 } |
4055 | 4120 |
4056 // Grow the size of the start-of-page free space a little to get up to the | 4121 // Grow the size of the start-of-page free space a little to get up to the |
4057 // first live object. | 4122 // first live object. |
4058 Address free_end = StartOfLiveObject(cell_base, *cell); | 4123 Address free_end = StartOfLiveObject(cell_base, *cell); |
4059 // Free the first free space. | 4124 // Free the first free space. |
4060 size = free_end - p->area_start(); | 4125 size = free_end - p->area_start(); |
4061 freed_bytes = Free<mode>(space, free_list, p->area_start(), | 4126 freed_bytes = |
4062 static_cast<int>(size)); | 4127 Free<mode>(space, free_list, p->area_start(), static_cast<int>(size)); |
4063 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 4128 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
4064 | 4129 |
4065 // The start of the current free area is represented in undigested form by | 4130 // The start of the current free area is represented in undigested form by |
4066 // the address of the last 32-word section that contained a live object and | 4131 // the address of the last 32-word section that contained a live object and |
4067 // the marking bitmap for that cell, which describes where the live object | 4132 // the marking bitmap for that cell, which describes where the live object |
4068 // started. Unless we find a large free space in the bitmap we will not | 4133 // started. Unless we find a large free space in the bitmap we will not |
4069 // digest this pair into a real address. We start the iteration here at the | 4134 // digest this pair into a real address. We start the iteration here at the |
4070 // first word in the marking bit map that indicates a live object. | 4135 // first word in the marking bit map that indicates a live object. |
4071 Address free_start = cell_base; | 4136 Address free_start = cell_base; |
4072 MarkBit::CellType free_start_cell = *cell; | 4137 MarkBit::CellType free_start_cell = *cell; |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4227 space->set_end_of_unswept_pages(p); | 4292 space->set_end_of_unswept_pages(p); |
4228 break; | 4293 break; |
4229 } | 4294 } |
4230 case CONCURRENT_PRECISE: | 4295 case CONCURRENT_PRECISE: |
4231 case PARALLEL_PRECISE: | 4296 case PARALLEL_PRECISE: |
4232 if (!parallel_sweeping_active) { | 4297 if (!parallel_sweeping_active) { |
4233 if (FLAG_gc_verbose) { | 4298 if (FLAG_gc_verbose) { |
4234 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", | 4299 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", |
4235 reinterpret_cast<intptr_t>(p)); | 4300 reinterpret_cast<intptr_t>(p)); |
4236 } | 4301 } |
4237 SweepPrecisely<SWEEP_ONLY, | 4302 SweepPrecisely<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, |
4238 SWEEP_ON_MAIN_THREAD, | |
4239 IGNORE_SKIP_LIST, | |
4240 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4303 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
4241 pages_swept++; | 4304 pages_swept++; |
4242 parallel_sweeping_active = true; | 4305 parallel_sweeping_active = true; |
4243 } else { | 4306 } else { |
4244 if (FLAG_gc_verbose) { | 4307 if (FLAG_gc_verbose) { |
4245 PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", | 4308 PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", |
4246 reinterpret_cast<intptr_t>(p)); | 4309 reinterpret_cast<intptr_t>(p)); |
4247 } | 4310 } |
4248 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING); | 4311 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING); |
4249 space->IncreaseUnsweptFreeBytes(p); | 4312 space->IncreaseUnsweptFreeBytes(p); |
4250 } | 4313 } |
4251 space->set_end_of_unswept_pages(p); | 4314 space->set_end_of_unswept_pages(p); |
4252 break; | 4315 break; |
4253 case PRECISE: { | 4316 case PRECISE: { |
4254 if (FLAG_gc_verbose) { | 4317 if (FLAG_gc_verbose) { |
4255 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", | 4318 PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n", |
4256 reinterpret_cast<intptr_t>(p)); | 4319 reinterpret_cast<intptr_t>(p)); |
4257 } | 4320 } |
4258 if (space->identity() == CODE_SPACE && FLAG_zap_code_space) { | 4321 if (space->identity() == CODE_SPACE && FLAG_zap_code_space) { |
4259 SweepPrecisely<SWEEP_ONLY, | 4322 SweepPrecisely<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
4260 SWEEP_ON_MAIN_THREAD, | |
4261 REBUILD_SKIP_LIST, | |
4262 ZAP_FREE_SPACE>(space, NULL, p, NULL); | 4323 ZAP_FREE_SPACE>(space, NULL, p, NULL); |
4263 } else if (space->identity() == CODE_SPACE) { | 4324 } else if (space->identity() == CODE_SPACE) { |
4264 SweepPrecisely<SWEEP_ONLY, | 4325 SweepPrecisely<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
4265 SWEEP_ON_MAIN_THREAD, | |
4266 REBUILD_SKIP_LIST, | |
4267 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4326 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
4268 } else { | 4327 } else { |
4269 SweepPrecisely<SWEEP_ONLY, | 4328 SweepPrecisely<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, |
4270 SWEEP_ON_MAIN_THREAD, | |
4271 IGNORE_SKIP_LIST, | |
4272 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4329 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
4273 } | 4330 } |
4274 pages_swept++; | 4331 pages_swept++; |
4275 break; | 4332 break; |
4276 } | 4333 } |
4277 default: { | 4334 default: { UNREACHABLE(); } |
4278 UNREACHABLE(); | |
4279 } | |
4280 } | 4335 } |
4281 } | 4336 } |
4282 | 4337 |
4283 if (FLAG_gc_verbose) { | 4338 if (FLAG_gc_verbose) { |
4284 PrintF("SweepSpace: %s (%d pages swept)\n", | 4339 PrintF("SweepSpace: %s (%d pages swept)\n", |
4285 AllocationSpaceName(space->identity()), | 4340 AllocationSpaceName(space->identity()), pages_swept); |
4286 pages_swept); | |
4287 } | 4341 } |
4288 | 4342 |
4289 // Give pages that are queued to be freed back to the OS. | 4343 // Give pages that are queued to be freed back to the OS. |
4290 heap()->FreeQueuedChunks(); | 4344 heap()->FreeQueuedChunks(); |
4291 } | 4345 } |
4292 | 4346 |
4293 | 4347 |
4294 static bool ShouldStartSweeperThreads(MarkCompactCollector::SweeperType type) { | 4348 static bool ShouldStartSweeperThreads(MarkCompactCollector::SweeperType type) { |
4295 return type == MarkCompactCollector::PARALLEL_CONSERVATIVE || | 4349 return type == MarkCompactCollector::PARALLEL_CONSERVATIVE || |
4296 type == MarkCompactCollector::CONCURRENT_CONSERVATIVE || | 4350 type == MarkCompactCollector::CONCURRENT_CONSERVATIVE || |
(...skipping 30 matching lines...) Expand all Loading... |
4327 } | 4381 } |
4328 if (sweep_precisely_) how_to_sweep = PRECISE; | 4382 if (sweep_precisely_) how_to_sweep = PRECISE; |
4329 | 4383 |
4330 MoveEvacuationCandidatesToEndOfPagesList(); | 4384 MoveEvacuationCandidatesToEndOfPagesList(); |
4331 | 4385 |
4332 // Noncompacting collections simply sweep the spaces to clear the mark | 4386 // Noncompacting collections simply sweep the spaces to clear the mark |
4333 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4387 // bits and free the nonlive blocks (for old and map spaces). We sweep |
4334 // the map space last because freeing non-live maps overwrites them and | 4388 // the map space last because freeing non-live maps overwrites them and |
4335 // the other spaces rely on possibly non-live maps to get the sizes for | 4389 // the other spaces rely on possibly non-live maps to get the sizes for |
4336 // non-live objects. | 4390 // non-live objects. |
4337 { GCTracer::Scope sweep_scope(heap()->tracer(), | 4391 { |
| 4392 GCTracer::Scope sweep_scope(heap()->tracer(), |
4338 GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4393 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
4339 { SequentialSweepingScope scope(this); | 4394 { |
| 4395 SequentialSweepingScope scope(this); |
4340 SweepSpace(heap()->old_pointer_space(), how_to_sweep); | 4396 SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
4341 SweepSpace(heap()->old_data_space(), how_to_sweep); | 4397 SweepSpace(heap()->old_data_space(), how_to_sweep); |
4342 } | 4398 } |
4343 | 4399 |
4344 if (ShouldStartSweeperThreads(how_to_sweep)) { | 4400 if (ShouldStartSweeperThreads(how_to_sweep)) { |
4345 StartSweeperThreads(); | 4401 StartSweeperThreads(); |
4346 } | 4402 } |
4347 | 4403 |
4348 if (ShouldWaitForSweeperThreads(how_to_sweep)) { | 4404 if (ShouldWaitForSweeperThreads(how_to_sweep)) { |
4349 EnsureSweepingCompleted(); | 4405 EnsureSweepingCompleted(); |
4350 } | 4406 } |
4351 } | 4407 } |
4352 RemoveDeadInvalidatedCode(); | 4408 RemoveDeadInvalidatedCode(); |
4353 | 4409 |
4354 { GCTracer::Scope sweep_scope(heap()->tracer(), | 4410 { |
| 4411 GCTracer::Scope sweep_scope(heap()->tracer(), |
4355 GCTracer::Scope::MC_SWEEP_CODE); | 4412 GCTracer::Scope::MC_SWEEP_CODE); |
4356 SweepSpace(heap()->code_space(), PRECISE); | 4413 SweepSpace(heap()->code_space(), PRECISE); |
4357 } | 4414 } |
4358 | 4415 |
4359 { GCTracer::Scope sweep_scope(heap()->tracer(), | 4416 { |
| 4417 GCTracer::Scope sweep_scope(heap()->tracer(), |
4360 GCTracer::Scope::MC_SWEEP_CELL); | 4418 GCTracer::Scope::MC_SWEEP_CELL); |
4361 SweepSpace(heap()->cell_space(), PRECISE); | 4419 SweepSpace(heap()->cell_space(), PRECISE); |
4362 SweepSpace(heap()->property_cell_space(), PRECISE); | 4420 SweepSpace(heap()->property_cell_space(), PRECISE); |
4363 } | 4421 } |
4364 | 4422 |
4365 EvacuateNewSpaceAndCandidates(); | 4423 EvacuateNewSpaceAndCandidates(); |
4366 | 4424 |
4367 // ClearNonLiveTransitions depends on precise sweeping of map space to | 4425 // ClearNonLiveTransitions depends on precise sweeping of map space to |
4368 // detect whether unmarked map became dead in this collection or in one | 4426 // detect whether unmarked map became dead in this collection or in one |
4369 // of the previous ones. | 4427 // of the previous ones. |
4370 { GCTracer::Scope sweep_scope(heap()->tracer(), | 4428 { |
| 4429 GCTracer::Scope sweep_scope(heap()->tracer(), |
4371 GCTracer::Scope::MC_SWEEP_MAP); | 4430 GCTracer::Scope::MC_SWEEP_MAP); |
4372 SweepSpace(heap()->map_space(), PRECISE); | 4431 SweepSpace(heap()->map_space(), PRECISE); |
4373 } | 4432 } |
4374 | 4433 |
4375 // Deallocate unmarked objects and clear marked bits for marked objects. | 4434 // Deallocate unmarked objects and clear marked bits for marked objects. |
4376 heap_->lo_space()->FreeUnmarkedObjects(); | 4435 heap_->lo_space()->FreeUnmarkedObjects(); |
4377 | 4436 |
4378 // Deallocate evacuated candidate pages. | 4437 // Deallocate evacuated candidate pages. |
4379 ReleaseEvacuationCandidates(); | 4438 ReleaseEvacuationCandidates(); |
4380 | 4439 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4434 // Our profiling tools do not expect intersections between | 4493 // Our profiling tools do not expect intersections between |
4435 // code objects. We should either reenable it or change our tools. | 4494 // code objects. We should either reenable it or change our tools. |
4436 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, | 4495 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj, |
4437 Isolate* isolate) { | 4496 Isolate* isolate) { |
4438 if (obj->IsCode()) { | 4497 if (obj->IsCode()) { |
4439 PROFILE(isolate, CodeDeleteEvent(obj->address())); | 4498 PROFILE(isolate, CodeDeleteEvent(obj->address())); |
4440 } | 4499 } |
4441 } | 4500 } |
4442 | 4501 |
4443 | 4502 |
4444 Isolate* MarkCompactCollector::isolate() const { | 4503 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); } |
4445 return heap_->isolate(); | |
4446 } | |
4447 | 4504 |
4448 | 4505 |
4449 void MarkCompactCollector::Initialize() { | 4506 void MarkCompactCollector::Initialize() { |
4450 MarkCompactMarkingVisitor::Initialize(); | 4507 MarkCompactMarkingVisitor::Initialize(); |
4451 IncrementalMarking::Initialize(); | 4508 IncrementalMarking::Initialize(); |
4452 } | 4509 } |
4453 | 4510 |
4454 | 4511 |
4455 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { | 4512 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { |
4456 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; | 4513 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; |
4457 } | 4514 } |
4458 | 4515 |
4459 | 4516 |
4460 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, | 4517 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, |
4461 SlotsBuffer** buffer_address, | 4518 SlotsBuffer** buffer_address, SlotType type, |
4462 SlotType type, | 4519 Address addr, AdditionMode mode) { |
4463 Address addr, | |
4464 AdditionMode mode) { | |
4465 SlotsBuffer* buffer = *buffer_address; | 4520 SlotsBuffer* buffer = *buffer_address; |
4466 if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) { | 4521 if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) { |
4467 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { | 4522 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { |
4468 allocator->DeallocateChain(buffer_address); | 4523 allocator->DeallocateChain(buffer_address); |
4469 return false; | 4524 return false; |
4470 } | 4525 } |
4471 buffer = allocator->AllocateBuffer(buffer); | 4526 buffer = allocator->AllocateBuffer(buffer); |
4472 *buffer_address = buffer; | 4527 *buffer_address = buffer; |
4473 } | 4528 } |
4474 DCHECK(buffer->HasSpaceForTypedSlot()); | 4529 DCHECK(buffer->HasSpaceForTypedSlot()); |
(...skipping 22 matching lines...) Expand all Loading... |
4497 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); | 4552 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); |
4498 RelocInfo::Mode rmode = rinfo->rmode(); | 4553 RelocInfo::Mode rmode = rinfo->rmode(); |
4499 if (target_page->IsEvacuationCandidate() && | 4554 if (target_page->IsEvacuationCandidate() && |
4500 (rinfo->host() == NULL || | 4555 (rinfo->host() == NULL || |
4501 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { | 4556 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { |
4502 bool success; | 4557 bool success; |
4503 if (RelocInfo::IsEmbeddedObject(rmode) && rinfo->IsInConstantPool()) { | 4558 if (RelocInfo::IsEmbeddedObject(rmode) && rinfo->IsInConstantPool()) { |
4504 // This doesn't need to be typed since it is just a normal heap pointer. | 4559 // This doesn't need to be typed since it is just a normal heap pointer. |
4505 Object** target_pointer = | 4560 Object** target_pointer = |
4506 reinterpret_cast<Object**>(rinfo->constant_pool_entry_address()); | 4561 reinterpret_cast<Object**>(rinfo->constant_pool_entry_address()); |
4507 success = SlotsBuffer::AddTo(&slots_buffer_allocator_, | 4562 success = SlotsBuffer::AddTo( |
4508 target_page->slots_buffer_address(), | 4563 &slots_buffer_allocator_, target_page->slots_buffer_address(), |
4509 target_pointer, | 4564 target_pointer, SlotsBuffer::FAIL_ON_OVERFLOW); |
4510 SlotsBuffer::FAIL_ON_OVERFLOW); | |
4511 } else if (RelocInfo::IsCodeTarget(rmode) && rinfo->IsInConstantPool()) { | 4565 } else if (RelocInfo::IsCodeTarget(rmode) && rinfo->IsInConstantPool()) { |
4512 success = SlotsBuffer::AddTo(&slots_buffer_allocator_, | 4566 success = SlotsBuffer::AddTo( |
4513 target_page->slots_buffer_address(), | 4567 &slots_buffer_allocator_, target_page->slots_buffer_address(), |
4514 SlotsBuffer::CODE_ENTRY_SLOT, | 4568 SlotsBuffer::CODE_ENTRY_SLOT, rinfo->constant_pool_entry_address(), |
4515 rinfo->constant_pool_entry_address(), | 4569 SlotsBuffer::FAIL_ON_OVERFLOW); |
4516 SlotsBuffer::FAIL_ON_OVERFLOW); | |
4517 } else { | 4570 } else { |
4518 success = SlotsBuffer::AddTo(&slots_buffer_allocator_, | 4571 success = SlotsBuffer::AddTo( |
4519 target_page->slots_buffer_address(), | 4572 &slots_buffer_allocator_, target_page->slots_buffer_address(), |
4520 SlotTypeForRMode(rmode), | 4573 SlotTypeForRMode(rmode), rinfo->pc(), SlotsBuffer::FAIL_ON_OVERFLOW); |
4521 rinfo->pc(), | |
4522 SlotsBuffer::FAIL_ON_OVERFLOW); | |
4523 } | 4574 } |
4524 if (!success) { | 4575 if (!success) { |
4525 EvictEvacuationCandidate(target_page); | 4576 EvictEvacuationCandidate(target_page); |
4526 } | 4577 } |
4527 } | 4578 } |
4528 } | 4579 } |
4529 | 4580 |
4530 | 4581 |
4531 void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { | 4582 void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { |
4532 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); | 4583 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); |
4533 if (target_page->IsEvacuationCandidate() && | 4584 if (target_page->IsEvacuationCandidate() && |
4534 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { | 4585 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { |
4535 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, | 4586 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, |
4536 target_page->slots_buffer_address(), | 4587 target_page->slots_buffer_address(), |
4537 SlotsBuffer::CODE_ENTRY_SLOT, | 4588 SlotsBuffer::CODE_ENTRY_SLOT, slot, |
4538 slot, | |
4539 SlotsBuffer::FAIL_ON_OVERFLOW)) { | 4589 SlotsBuffer::FAIL_ON_OVERFLOW)) { |
4540 EvictEvacuationCandidate(target_page); | 4590 EvictEvacuationCandidate(target_page); |
4541 } | 4591 } |
4542 } | 4592 } |
4543 } | 4593 } |
4544 | 4594 |
4545 | 4595 |
4546 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4596 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
4547 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4597 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
4548 if (is_compacting()) { | 4598 if (is_compacting()) { |
4549 Code* host = isolate()->inner_pointer_to_code_cache()-> | 4599 Code* host = |
4550 GcSafeFindCodeForInnerPointer(pc); | 4600 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
| 4601 pc); |
4551 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4602 MarkBit mark_bit = Marking::MarkBitFrom(host); |
4552 if (Marking::IsBlack(mark_bit)) { | 4603 if (Marking::IsBlack(mark_bit)) { |
4553 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); | 4604 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); |
4554 RecordRelocSlot(&rinfo, target); | 4605 RecordRelocSlot(&rinfo, target); |
4555 } | 4606 } |
4556 } | 4607 } |
4557 } | 4608 } |
4558 | 4609 |
4559 | 4610 |
4560 static inline SlotsBuffer::SlotType DecodeSlotType( | 4611 static inline SlotsBuffer::SlotType DecodeSlotType( |
4561 SlotsBuffer::ObjectSlot slot) { | 4612 SlotsBuffer::ObjectSlot slot) { |
4562 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot)); | 4613 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot)); |
4563 } | 4614 } |
4564 | 4615 |
4565 | 4616 |
4566 void SlotsBuffer::UpdateSlots(Heap* heap) { | 4617 void SlotsBuffer::UpdateSlots(Heap* heap) { |
4567 PointersUpdatingVisitor v(heap); | 4618 PointersUpdatingVisitor v(heap); |
4568 | 4619 |
4569 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 4620 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
4570 ObjectSlot slot = slots_[slot_idx]; | 4621 ObjectSlot slot = slots_[slot_idx]; |
4571 if (!IsTypedSlot(slot)) { | 4622 if (!IsTypedSlot(slot)) { |
4572 PointersUpdatingVisitor::UpdateSlot(heap, slot); | 4623 PointersUpdatingVisitor::UpdateSlot(heap, slot); |
4573 } else { | 4624 } else { |
4574 ++slot_idx; | 4625 ++slot_idx; |
4575 DCHECK(slot_idx < idx_); | 4626 DCHECK(slot_idx < idx_); |
4576 UpdateSlot(heap->isolate(), | 4627 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), |
4577 &v, | |
4578 DecodeSlotType(slot), | |
4579 reinterpret_cast<Address>(slots_[slot_idx])); | 4628 reinterpret_cast<Address>(slots_[slot_idx])); |
4580 } | 4629 } |
4581 } | 4630 } |
4582 } | 4631 } |
4583 | 4632 |
4584 | 4633 |
4585 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { | 4634 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { |
4586 PointersUpdatingVisitor v(heap); | 4635 PointersUpdatingVisitor v(heap); |
4587 | 4636 |
4588 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { | 4637 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
4589 ObjectSlot slot = slots_[slot_idx]; | 4638 ObjectSlot slot = slots_[slot_idx]; |
4590 if (!IsTypedSlot(slot)) { | 4639 if (!IsTypedSlot(slot)) { |
4591 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { | 4640 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { |
4592 PointersUpdatingVisitor::UpdateSlot(heap, slot); | 4641 PointersUpdatingVisitor::UpdateSlot(heap, slot); |
4593 } | 4642 } |
4594 } else { | 4643 } else { |
4595 ++slot_idx; | 4644 ++slot_idx; |
4596 DCHECK(slot_idx < idx_); | 4645 DCHECK(slot_idx < idx_); |
4597 Address pc = reinterpret_cast<Address>(slots_[slot_idx]); | 4646 Address pc = reinterpret_cast<Address>(slots_[slot_idx]); |
4598 if (!IsOnInvalidatedCodeObject(pc)) { | 4647 if (!IsOnInvalidatedCodeObject(pc)) { |
4599 UpdateSlot(heap->isolate(), | 4648 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), |
4600 &v, | |
4601 DecodeSlotType(slot), | |
4602 reinterpret_cast<Address>(slots_[slot_idx])); | 4649 reinterpret_cast<Address>(slots_[slot_idx])); |
4603 } | 4650 } |
4604 } | 4651 } |
4605 } | 4652 } |
4606 } | 4653 } |
4607 | 4654 |
4608 | 4655 |
4609 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { | 4656 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { |
4610 return new SlotsBuffer(next_buffer); | 4657 return new SlotsBuffer(next_buffer); |
4611 } | 4658 } |
4612 | 4659 |
4613 | 4660 |
4614 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) { | 4661 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) { |
4615 delete buffer; | 4662 delete buffer; |
4616 } | 4663 } |
4617 | 4664 |
4618 | 4665 |
4619 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) { | 4666 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) { |
4620 SlotsBuffer* buffer = *buffer_address; | 4667 SlotsBuffer* buffer = *buffer_address; |
4621 while (buffer != NULL) { | 4668 while (buffer != NULL) { |
4622 SlotsBuffer* next_buffer = buffer->next(); | 4669 SlotsBuffer* next_buffer = buffer->next(); |
4623 DeallocateBuffer(buffer); | 4670 DeallocateBuffer(buffer); |
4624 buffer = next_buffer; | 4671 buffer = next_buffer; |
4625 } | 4672 } |
4626 *buffer_address = NULL; | 4673 *buffer_address = NULL; |
4627 } | 4674 } |
4628 | 4675 } |
4629 | 4676 } // namespace v8::internal |
4630 } } // namespace v8::internal | |
OLD | NEW |