OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include "src/compiler-intrinsics.h" | 8 #include "src/compiler-intrinsics.h" |
9 #include "src/spaces.h" | 9 #include "src/heap/spaces.h" |
10 | 10 |
11 namespace v8 { | 11 namespace v8 { |
12 namespace internal { | 12 namespace internal { |
13 | 13 |
14 // Callback function, returns whether an object is alive. The heap size | 14 // Callback function, returns whether an object is alive. The heap size |
15 // of the object is returned in size. It optionally updates the offset | 15 // of the object is returned in size. It optionally updates the offset |
16 // to the first live object in the page (only used for old and map objects). | 16 // to the first live object in the page (only used for old and map objects). |
17 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); | 17 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); |
18 | 18 |
19 // Forward declarations. | 19 // Forward declarations. |
20 class CodeFlusher; | 20 class CodeFlusher; |
21 class MarkCompactCollector; | 21 class MarkCompactCollector; |
22 class MarkingVisitor; | 22 class MarkingVisitor; |
23 class RootMarkingVisitor; | 23 class RootMarkingVisitor; |
24 | 24 |
25 | 25 |
26 class Marking { | 26 class Marking { |
27 public: | 27 public: |
28 explicit Marking(Heap* heap) | 28 explicit Marking(Heap* heap) : heap_(heap) {} |
29 : heap_(heap) { | |
30 } | |
31 | 29 |
32 INLINE(static MarkBit MarkBitFrom(Address addr)); | 30 INLINE(static MarkBit MarkBitFrom(Address addr)); |
33 | 31 |
34 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { | 32 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { |
35 return MarkBitFrom(reinterpret_cast<Address>(obj)); | 33 return MarkBitFrom(reinterpret_cast<Address>(obj)); |
36 } | 34 } |
37 | 35 |
38 // Impossible markbits: 01 | 36 // Impossible markbits: 01 |
39 static const char* kImpossibleBitPattern; | 37 static const char* kImpossibleBitPattern; |
40 INLINE(static bool IsImpossible(MarkBit mark_bit)) { | 38 INLINE(static bool IsImpossible(MarkBit mark_bit)) { |
41 return !mark_bit.Get() && mark_bit.Next().Get(); | 39 return !mark_bit.Get() && mark_bit.Next().Get(); |
42 } | 40 } |
43 | 41 |
44 // Black markbits: 10 - this is required by the sweeper. | 42 // Black markbits: 10 - this is required by the sweeper. |
45 static const char* kBlackBitPattern; | 43 static const char* kBlackBitPattern; |
46 INLINE(static bool IsBlack(MarkBit mark_bit)) { | 44 INLINE(static bool IsBlack(MarkBit mark_bit)) { |
47 return mark_bit.Get() && !mark_bit.Next().Get(); | 45 return mark_bit.Get() && !mark_bit.Next().Get(); |
48 } | 46 } |
49 | 47 |
50 // White markbits: 00 - this is required by the mark bit clearer. | 48 // White markbits: 00 - this is required by the mark bit clearer. |
51 static const char* kWhiteBitPattern; | 49 static const char* kWhiteBitPattern; |
52 INLINE(static bool IsWhite(MarkBit mark_bit)) { | 50 INLINE(static bool IsWhite(MarkBit mark_bit)) { return !mark_bit.Get(); } |
53 return !mark_bit.Get(); | |
54 } | |
55 | 51 |
56 // Grey markbits: 11 | 52 // Grey markbits: 11 |
57 static const char* kGreyBitPattern; | 53 static const char* kGreyBitPattern; |
58 INLINE(static bool IsGrey(MarkBit mark_bit)) { | 54 INLINE(static bool IsGrey(MarkBit mark_bit)) { |
59 return mark_bit.Get() && mark_bit.Next().Get(); | 55 return mark_bit.Get() && mark_bit.Next().Get(); |
60 } | 56 } |
61 | 57 |
62 INLINE(static void MarkBlack(MarkBit mark_bit)) { | 58 INLINE(static void MarkBlack(MarkBit mark_bit)) { |
63 mark_bit.Set(); | 59 mark_bit.Set(); |
64 mark_bit.Next().Clear(); | 60 mark_bit.Next().Clear(); |
65 } | 61 } |
66 | 62 |
67 INLINE(static void BlackToGrey(MarkBit markbit)) { | 63 INLINE(static void BlackToGrey(MarkBit markbit)) { markbit.Next().Set(); } |
68 markbit.Next().Set(); | |
69 } | |
70 | 64 |
71 INLINE(static void WhiteToGrey(MarkBit markbit)) { | 65 INLINE(static void WhiteToGrey(MarkBit markbit)) { |
72 markbit.Set(); | 66 markbit.Set(); |
73 markbit.Next().Set(); | 67 markbit.Next().Set(); |
74 } | 68 } |
75 | 69 |
76 INLINE(static void GreyToBlack(MarkBit markbit)) { | 70 INLINE(static void GreyToBlack(MarkBit markbit)) { markbit.Next().Clear(); } |
77 markbit.Next().Clear(); | |
78 } | |
79 | 71 |
80 INLINE(static void BlackToGrey(HeapObject* obj)) { | 72 INLINE(static void BlackToGrey(HeapObject* obj)) { |
81 BlackToGrey(MarkBitFrom(obj)); | 73 BlackToGrey(MarkBitFrom(obj)); |
82 } | 74 } |
83 | 75 |
84 INLINE(static void AnyToGrey(MarkBit markbit)) { | 76 INLINE(static void AnyToGrey(MarkBit markbit)) { |
85 markbit.Set(); | 77 markbit.Set(); |
86 markbit.Next().Set(); | 78 markbit.Next().Set(); |
87 } | 79 } |
88 | 80 |
89 void TransferMark(Address old_start, Address new_start); | 81 void TransferMark(Address old_start, Address new_start); |
90 | 82 |
91 #ifdef DEBUG | 83 #ifdef DEBUG |
92 enum ObjectColor { | 84 enum ObjectColor { |
93 BLACK_OBJECT, | 85 BLACK_OBJECT, |
94 WHITE_OBJECT, | 86 WHITE_OBJECT, |
95 GREY_OBJECT, | 87 GREY_OBJECT, |
96 IMPOSSIBLE_COLOR | 88 IMPOSSIBLE_COLOR |
97 }; | 89 }; |
98 | 90 |
99 static const char* ColorName(ObjectColor color) { | 91 static const char* ColorName(ObjectColor color) { |
100 switch (color) { | 92 switch (color) { |
101 case BLACK_OBJECT: return "black"; | 93 case BLACK_OBJECT: |
102 case WHITE_OBJECT: return "white"; | 94 return "black"; |
103 case GREY_OBJECT: return "grey"; | 95 case WHITE_OBJECT: |
104 case IMPOSSIBLE_COLOR: return "impossible"; | 96 return "white"; |
| 97 case GREY_OBJECT: |
| 98 return "grey"; |
| 99 case IMPOSSIBLE_COLOR: |
| 100 return "impossible"; |
105 } | 101 } |
106 return "error"; | 102 return "error"; |
107 } | 103 } |
108 | 104 |
109 static ObjectColor Color(HeapObject* obj) { | 105 static ObjectColor Color(HeapObject* obj) { |
110 return Color(Marking::MarkBitFrom(obj)); | 106 return Color(Marking::MarkBitFrom(obj)); |
111 } | 107 } |
112 | 108 |
113 static ObjectColor Color(MarkBit mark_bit) { | 109 static ObjectColor Color(MarkBit mark_bit) { |
114 if (IsBlack(mark_bit)) return BLACK_OBJECT; | 110 if (IsBlack(mark_bit)) return BLACK_OBJECT; |
115 if (IsWhite(mark_bit)) return WHITE_OBJECT; | 111 if (IsWhite(mark_bit)) return WHITE_OBJECT; |
116 if (IsGrey(mark_bit)) return GREY_OBJECT; | 112 if (IsGrey(mark_bit)) return GREY_OBJECT; |
117 UNREACHABLE(); | 113 UNREACHABLE(); |
118 return IMPOSSIBLE_COLOR; | 114 return IMPOSSIBLE_COLOR; |
119 } | 115 } |
120 #endif | 116 #endif |
121 | 117 |
122 // Returns true if the transferred color is black. | 118 // Returns true if the transferred color is black. |
123 INLINE(static bool TransferColor(HeapObject* from, | 119 INLINE(static bool TransferColor(HeapObject* from, HeapObject* to)) { |
124 HeapObject* to)) { | |
125 MarkBit from_mark_bit = MarkBitFrom(from); | 120 MarkBit from_mark_bit = MarkBitFrom(from); |
126 MarkBit to_mark_bit = MarkBitFrom(to); | 121 MarkBit to_mark_bit = MarkBitFrom(to); |
127 bool is_black = false; | 122 bool is_black = false; |
128 if (from_mark_bit.Get()) { | 123 if (from_mark_bit.Get()) { |
129 to_mark_bit.Set(); | 124 to_mark_bit.Set(); |
130 is_black = true; // Looks black so far. | 125 is_black = true; // Looks black so far. |
131 } | 126 } |
132 if (from_mark_bit.Next().Get()) { | 127 if (from_mark_bit.Next().Get()) { |
133 to_mark_bit.Next().Set(); | 128 to_mark_bit.Next().Set(); |
134 is_black = false; // Was actually gray. | 129 is_black = false; // Was actually gray. |
135 } | 130 } |
136 return is_black; | 131 return is_black; |
137 } | 132 } |
138 | 133 |
139 private: | 134 private: |
140 Heap* heap_; | 135 Heap* heap_; |
141 }; | 136 }; |
142 | 137 |
143 // ---------------------------------------------------------------------------- | 138 // ---------------------------------------------------------------------------- |
144 // Marking deque for tracing live objects. | 139 // Marking deque for tracing live objects. |
145 class MarkingDeque { | 140 class MarkingDeque { |
146 public: | 141 public: |
147 MarkingDeque() | 142 MarkingDeque() |
148 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) { } | 143 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) {} |
149 | 144 |
150 void Initialize(Address low, Address high) { | 145 void Initialize(Address low, Address high) { |
151 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); | 146 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); |
152 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); | 147 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); |
153 array_ = obj_low; | 148 array_ = obj_low; |
154 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1; | 149 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1; |
155 top_ = bottom_ = 0; | 150 top_ = bottom_ = 0; |
156 overflowed_ = false; | 151 overflowed_ = false; |
157 } | 152 } |
158 | 153 |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
254 public: | 249 public: |
255 typedef Object** ObjectSlot; | 250 typedef Object** ObjectSlot; |
256 | 251 |
257 explicit SlotsBuffer(SlotsBuffer* next_buffer) | 252 explicit SlotsBuffer(SlotsBuffer* next_buffer) |
258 : idx_(0), chain_length_(1), next_(next_buffer) { | 253 : idx_(0), chain_length_(1), next_(next_buffer) { |
259 if (next_ != NULL) { | 254 if (next_ != NULL) { |
260 chain_length_ = next_->chain_length_ + 1; | 255 chain_length_ = next_->chain_length_ + 1; |
261 } | 256 } |
262 } | 257 } |
263 | 258 |
264 ~SlotsBuffer() { | 259 ~SlotsBuffer() {} |
265 } | |
266 | 260 |
267 void Add(ObjectSlot slot) { | 261 void Add(ObjectSlot slot) { |
268 DCHECK(0 <= idx_ && idx_ < kNumberOfElements); | 262 DCHECK(0 <= idx_ && idx_ < kNumberOfElements); |
269 slots_[idx_++] = slot; | 263 slots_[idx_++] = slot; |
270 } | 264 } |
271 | 265 |
272 enum SlotType { | 266 enum SlotType { |
273 EMBEDDED_OBJECT_SLOT, | 267 EMBEDDED_OBJECT_SLOT, |
274 RELOCATED_CODE_OBJECT, | 268 RELOCATED_CODE_OBJECT, |
275 CODE_TARGET_SLOT, | 269 CODE_TARGET_SLOT, |
(...skipping 28 matching lines...) Expand all Loading... |
304 void UpdateSlotsWithFilter(Heap* heap); | 298 void UpdateSlotsWithFilter(Heap* heap); |
305 | 299 |
306 SlotsBuffer* next() { return next_; } | 300 SlotsBuffer* next() { return next_; } |
307 | 301 |
308 static int SizeOfChain(SlotsBuffer* buffer) { | 302 static int SizeOfChain(SlotsBuffer* buffer) { |
309 if (buffer == NULL) return 0; | 303 if (buffer == NULL) return 0; |
310 return static_cast<int>(buffer->idx_ + | 304 return static_cast<int>(buffer->idx_ + |
311 (buffer->chain_length_ - 1) * kNumberOfElements); | 305 (buffer->chain_length_ - 1) * kNumberOfElements); |
312 } | 306 } |
313 | 307 |
314 inline bool IsFull() { | 308 inline bool IsFull() { return idx_ == kNumberOfElements; } |
315 return idx_ == kNumberOfElements; | |
316 } | |
317 | 309 |
318 inline bool HasSpaceForTypedSlot() { | 310 inline bool HasSpaceForTypedSlot() { return idx_ < kNumberOfElements - 1; } |
319 return idx_ < kNumberOfElements - 1; | |
320 } | |
321 | 311 |
322 static void UpdateSlotsRecordedIn(Heap* heap, | 312 static void UpdateSlotsRecordedIn(Heap* heap, SlotsBuffer* buffer, |
323 SlotsBuffer* buffer, | |
324 bool code_slots_filtering_required) { | 313 bool code_slots_filtering_required) { |
325 while (buffer != NULL) { | 314 while (buffer != NULL) { |
326 if (code_slots_filtering_required) { | 315 if (code_slots_filtering_required) { |
327 buffer->UpdateSlotsWithFilter(heap); | 316 buffer->UpdateSlotsWithFilter(heap); |
328 } else { | 317 } else { |
329 buffer->UpdateSlots(heap); | 318 buffer->UpdateSlots(heap); |
330 } | 319 } |
331 buffer = buffer->next(); | 320 buffer = buffer->next(); |
332 } | 321 } |
333 } | 322 } |
334 | 323 |
335 enum AdditionMode { | 324 enum AdditionMode { FAIL_ON_OVERFLOW, IGNORE_OVERFLOW }; |
336 FAIL_ON_OVERFLOW, | |
337 IGNORE_OVERFLOW | |
338 }; | |
339 | 325 |
340 static bool ChainLengthThresholdReached(SlotsBuffer* buffer) { | 326 static bool ChainLengthThresholdReached(SlotsBuffer* buffer) { |
341 return buffer != NULL && buffer->chain_length_ >= kChainLengthThreshold; | 327 return buffer != NULL && buffer->chain_length_ >= kChainLengthThreshold; |
342 } | 328 } |
343 | 329 |
344 INLINE(static bool AddTo(SlotsBufferAllocator* allocator, | 330 INLINE(static bool AddTo(SlotsBufferAllocator* allocator, |
345 SlotsBuffer** buffer_address, | 331 SlotsBuffer** buffer_address, ObjectSlot slot, |
346 ObjectSlot slot, | |
347 AdditionMode mode)) { | 332 AdditionMode mode)) { |
348 SlotsBuffer* buffer = *buffer_address; | 333 SlotsBuffer* buffer = *buffer_address; |
349 if (buffer == NULL || buffer->IsFull()) { | 334 if (buffer == NULL || buffer->IsFull()) { |
350 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { | 335 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { |
351 allocator->DeallocateChain(buffer_address); | 336 allocator->DeallocateChain(buffer_address); |
352 return false; | 337 return false; |
353 } | 338 } |
354 buffer = allocator->AllocateBuffer(buffer); | 339 buffer = allocator->AllocateBuffer(buffer); |
355 *buffer_address = buffer; | 340 *buffer_address = buffer; |
356 } | 341 } |
357 buffer->Add(slot); | 342 buffer->Add(slot); |
358 return true; | 343 return true; |
359 } | 344 } |
360 | 345 |
361 static bool IsTypedSlot(ObjectSlot slot); | 346 static bool IsTypedSlot(ObjectSlot slot); |
362 | 347 |
363 static bool AddTo(SlotsBufferAllocator* allocator, | 348 static bool AddTo(SlotsBufferAllocator* allocator, |
364 SlotsBuffer** buffer_address, | 349 SlotsBuffer** buffer_address, SlotType type, Address addr, |
365 SlotType type, | |
366 Address addr, | |
367 AdditionMode mode); | 350 AdditionMode mode); |
368 | 351 |
369 static const int kNumberOfElements = 1021; | 352 static const int kNumberOfElements = 1021; |
370 | 353 |
371 private: | 354 private: |
372 static const int kChainLengthThreshold = 15; | 355 static const int kChainLengthThreshold = 15; |
373 | 356 |
374 intptr_t idx_; | 357 intptr_t idx_; |
375 intptr_t chain_length_; | 358 intptr_t chain_length_; |
376 SlotsBuffer* next_; | 359 SlotsBuffer* next_; |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
525 | 508 |
526 void AddEvacuationCandidate(Page* p); | 509 void AddEvacuationCandidate(Page* p); |
527 | 510 |
528 // Prepares for GC by resetting relocation info in old and map spaces and | 511 // Prepares for GC by resetting relocation info in old and map spaces and |
529 // choosing spaces to compact. | 512 // choosing spaces to compact. |
530 void Prepare(); | 513 void Prepare(); |
531 | 514 |
532 // Performs a global garbage collection. | 515 // Performs a global garbage collection. |
533 void CollectGarbage(); | 516 void CollectGarbage(); |
534 | 517 |
535 enum CompactionMode { | 518 enum CompactionMode { INCREMENTAL_COMPACTION, NON_INCREMENTAL_COMPACTION }; |
536 INCREMENTAL_COMPACTION, | |
537 NON_INCREMENTAL_COMPACTION | |
538 }; | |
539 | 519 |
540 bool StartCompaction(CompactionMode mode); | 520 bool StartCompaction(CompactionMode mode); |
541 | 521 |
542 void AbortCompaction(); | 522 void AbortCompaction(); |
543 | 523 |
544 #ifdef DEBUG | 524 #ifdef DEBUG |
545 // Checks whether performing mark-compact collection. | 525 // Checks whether performing mark-compact collection. |
546 bool in_use() { return state_ > PREPARE_GC; } | 526 bool in_use() { return state_ > PREPARE_GC; } |
547 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } | 527 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } |
548 #endif | 528 #endif |
(...skipping 16 matching lines...) Expand all Loading... |
565 void EnableCodeFlushing(bool enable); | 545 void EnableCodeFlushing(bool enable); |
566 | 546 |
567 enum SweeperType { | 547 enum SweeperType { |
568 PARALLEL_CONSERVATIVE, | 548 PARALLEL_CONSERVATIVE, |
569 CONCURRENT_CONSERVATIVE, | 549 CONCURRENT_CONSERVATIVE, |
570 PARALLEL_PRECISE, | 550 PARALLEL_PRECISE, |
571 CONCURRENT_PRECISE, | 551 CONCURRENT_PRECISE, |
572 PRECISE | 552 PRECISE |
573 }; | 553 }; |
574 | 554 |
575 enum SweepingParallelism { | 555 enum SweepingParallelism { SWEEP_ON_MAIN_THREAD, SWEEP_IN_PARALLEL }; |
576 SWEEP_ON_MAIN_THREAD, | |
577 SWEEP_IN_PARALLEL | |
578 }; | |
579 | 556 |
580 #ifdef VERIFY_HEAP | 557 #ifdef VERIFY_HEAP |
581 void VerifyMarkbitsAreClean(); | 558 void VerifyMarkbitsAreClean(); |
582 static void VerifyMarkbitsAreClean(PagedSpace* space); | 559 static void VerifyMarkbitsAreClean(PagedSpace* space); |
583 static void VerifyMarkbitsAreClean(NewSpace* space); | 560 static void VerifyMarkbitsAreClean(NewSpace* space); |
584 void VerifyWeakEmbeddedObjectsInCode(); | 561 void VerifyWeakEmbeddedObjectsInCode(); |
585 void VerifyOmittedMapChecks(); | 562 void VerifyOmittedMapChecks(); |
586 #endif | 563 #endif |
587 | 564 |
588 // Sweep a single page from the given space conservatively. | 565 // Sweep a single page from the given space conservatively. |
589 // Returns the size of the biggest continuous freed memory chunk in bytes. | 566 // Returns the size of the biggest continuous freed memory chunk in bytes. |
590 template<SweepingParallelism type> | 567 template <SweepingParallelism type> |
591 static int SweepConservatively(PagedSpace* space, | 568 static int SweepConservatively(PagedSpace* space, FreeList* free_list, |
592 FreeList* free_list, | 569 Page* p); |
593 Page* p); | |
594 | 570 |
595 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object** anchor)) { | 571 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object** anchor)) { |
596 return Page::FromAddress(reinterpret_cast<Address>(anchor))-> | 572 return Page::FromAddress(reinterpret_cast<Address>(anchor)) |
597 ShouldSkipEvacuationSlotRecording(); | 573 ->ShouldSkipEvacuationSlotRecording(); |
598 } | 574 } |
599 | 575 |
600 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { | 576 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { |
601 return Page::FromAddress(reinterpret_cast<Address>(host))-> | 577 return Page::FromAddress(reinterpret_cast<Address>(host)) |
602 ShouldSkipEvacuationSlotRecording(); | 578 ->ShouldSkipEvacuationSlotRecording(); |
603 } | 579 } |
604 | 580 |
605 INLINE(static bool IsOnEvacuationCandidate(Object* obj)) { | 581 INLINE(static bool IsOnEvacuationCandidate(Object* obj)) { |
606 return Page::FromAddress(reinterpret_cast<Address>(obj))-> | 582 return Page::FromAddress(reinterpret_cast<Address>(obj)) |
607 IsEvacuationCandidate(); | 583 ->IsEvacuationCandidate(); |
608 } | 584 } |
609 | 585 |
610 INLINE(void EvictEvacuationCandidate(Page* page)) { | 586 INLINE(void EvictEvacuationCandidate(Page* page)) { |
611 if (FLAG_trace_fragmentation) { | 587 if (FLAG_trace_fragmentation) { |
612 PrintF("Page %p is too popular. Disabling evacuation.\n", | 588 PrintF("Page %p is too popular. Disabling evacuation.\n", |
613 reinterpret_cast<void*>(page)); | 589 reinterpret_cast<void*>(page)); |
614 } | 590 } |
615 | 591 |
616 // TODO(gc) If all evacuation candidates are too popular we | 592 // TODO(gc) If all evacuation candidates are too popular we |
617 // should stop slots recording entirely. | 593 // should stop slots recording entirely. |
618 page->ClearEvacuationCandidate(); | 594 page->ClearEvacuationCandidate(); |
619 | 595 |
620 // We were not collecting slots on this page that point | 596 // We were not collecting slots on this page that point |
621 // to other evacuation candidates thus we have to | 597 // to other evacuation candidates thus we have to |
622 // rescan the page after evacuation to discover and update all | 598 // rescan the page after evacuation to discover and update all |
623 // pointers to evacuated objects. | 599 // pointers to evacuated objects. |
624 if (page->owner()->identity() == OLD_DATA_SPACE) { | 600 if (page->owner()->identity() == OLD_DATA_SPACE) { |
625 evacuation_candidates_.RemoveElement(page); | 601 evacuation_candidates_.RemoveElement(page); |
626 } else { | 602 } else { |
627 page->SetFlag(Page::RESCAN_ON_EVACUATION); | 603 page->SetFlag(Page::RESCAN_ON_EVACUATION); |
628 } | 604 } |
629 } | 605 } |
630 | 606 |
631 void RecordRelocSlot(RelocInfo* rinfo, Object* target); | 607 void RecordRelocSlot(RelocInfo* rinfo, Object* target); |
632 void RecordCodeEntrySlot(Address slot, Code* target); | 608 void RecordCodeEntrySlot(Address slot, Code* target); |
633 void RecordCodeTargetPatch(Address pc, Code* target); | 609 void RecordCodeTargetPatch(Address pc, Code* target); |
634 | 610 |
635 INLINE(void RecordSlot(Object** anchor_slot, | 611 INLINE(void RecordSlot( |
636 Object** slot, | 612 Object** anchor_slot, Object** slot, Object* object, |
637 Object* object, | 613 SlotsBuffer::AdditionMode mode = SlotsBuffer::FAIL_ON_OVERFLOW)); |
638 SlotsBuffer::AdditionMode mode = | |
639 SlotsBuffer::FAIL_ON_OVERFLOW)); | |
640 | 614 |
641 void MigrateObject(HeapObject* dst, | 615 void MigrateObject(HeapObject* dst, HeapObject* src, int size, |
642 HeapObject* src, | |
643 int size, | |
644 AllocationSpace to_old_space); | 616 AllocationSpace to_old_space); |
645 | 617 |
646 bool TryPromoteObject(HeapObject* object, int object_size); | 618 bool TryPromoteObject(HeapObject* object, int object_size); |
647 | 619 |
648 void InvalidateCode(Code* code); | 620 void InvalidateCode(Code* code); |
649 | 621 |
650 void ClearMarkbits(); | 622 void ClearMarkbits(); |
651 | 623 |
652 bool abort_incremental_marking() const { return abort_incremental_marking_; } | 624 bool abort_incremental_marking() const { return abort_incremental_marking_; } |
653 | 625 |
(...skipping 23 matching lines...) Expand all Loading... |
677 | 649 |
678 bool AreSweeperThreadsActivated(); | 650 bool AreSweeperThreadsActivated(); |
679 | 651 |
680 // Checks if sweeping is in progress right now on any space. | 652 // Checks if sweeping is in progress right now on any space. |
681 bool sweeping_in_progress() { return sweeping_in_progress_; } | 653 bool sweeping_in_progress() { return sweeping_in_progress_; } |
682 | 654 |
683 void set_sequential_sweeping(bool sequential_sweeping) { | 655 void set_sequential_sweeping(bool sequential_sweeping) { |
684 sequential_sweeping_ = sequential_sweeping; | 656 sequential_sweeping_ = sequential_sweeping; |
685 } | 657 } |
686 | 658 |
687 bool sequential_sweeping() const { | 659 bool sequential_sweeping() const { return sequential_sweeping_; } |
688 return sequential_sweeping_; | |
689 } | |
690 | 660 |
691 // Mark the global table which maps weak objects to dependent code without | 661 // Mark the global table which maps weak objects to dependent code without |
692 // marking its contents. | 662 // marking its contents. |
693 void MarkWeakObjectToCodeTable(); | 663 void MarkWeakObjectToCodeTable(); |
694 | 664 |
695 // Special case for processing weak references in a full collection. We need | 665 // Special case for processing weak references in a full collection. We need |
696 // to artificially keep AllocationSites alive for a time. | 666 // to artificially keep AllocationSites alive for a time. |
697 void MarkAllocationSite(AllocationSite* site); | 667 void MarkAllocationSite(AllocationSite* site); |
698 | 668 |
699 private: | 669 private: |
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
868 // their space's free list. Active eden semispace is compacted by | 838 // their space's free list. Active eden semispace is compacted by |
869 // evacuation. | 839 // evacuation. |
870 // | 840 // |
871 | 841 |
872 // If we are not compacting the heap, we simply sweep the spaces except | 842 // If we are not compacting the heap, we simply sweep the spaces except |
873 // for the large object space, clearing mark bits and adding unmarked | 843 // for the large object space, clearing mark bits and adding unmarked |
874 // regions to each space's free list. | 844 // regions to each space's free list. |
875 void SweepSpaces(); | 845 void SweepSpaces(); |
876 | 846 |
877 int DiscoverAndEvacuateBlackObjectsOnPage(NewSpace* new_space, | 847 int DiscoverAndEvacuateBlackObjectsOnPage(NewSpace* new_space, |
878 NewSpacePage* p); | 848 NewSpacePage* p); |
879 | 849 |
880 void EvacuateNewSpace(); | 850 void EvacuateNewSpace(); |
881 | 851 |
882 void EvacuateLiveObjectsFromPage(Page* p); | 852 void EvacuateLiveObjectsFromPage(Page* p); |
883 | 853 |
884 void EvacuatePages(); | 854 void EvacuatePages(); |
885 | 855 |
886 void EvacuateNewSpaceAndCandidates(); | 856 void EvacuateNewSpaceAndCandidates(); |
887 | 857 |
888 void ReleaseEvacuationCandidates(); | 858 void ReleaseEvacuationCandidates(); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
920 | 890 |
921 SmartPointer<FreeList> free_list_old_data_space_; | 891 SmartPointer<FreeList> free_list_old_data_space_; |
922 SmartPointer<FreeList> free_list_old_pointer_space_; | 892 SmartPointer<FreeList> free_list_old_pointer_space_; |
923 | 893 |
924 friend class Heap; | 894 friend class Heap; |
925 }; | 895 }; |
926 | 896 |
927 | 897 |
928 class MarkBitCellIterator BASE_EMBEDDED { | 898 class MarkBitCellIterator BASE_EMBEDDED { |
929 public: | 899 public: |
930 explicit MarkBitCellIterator(MemoryChunk* chunk) | 900 explicit MarkBitCellIterator(MemoryChunk* chunk) : chunk_(chunk) { |
931 : chunk_(chunk) { | 901 last_cell_index_ = Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
932 last_cell_index_ = Bitmap::IndexToCell( | 902 chunk_->AddressToMarkbitIndex(chunk_->area_end()))); |
933 Bitmap::CellAlignIndex( | |
934 chunk_->AddressToMarkbitIndex(chunk_->area_end()))); | |
935 cell_base_ = chunk_->area_start(); | 903 cell_base_ = chunk_->area_start(); |
936 cell_index_ = Bitmap::IndexToCell( | 904 cell_index_ = Bitmap::IndexToCell( |
937 Bitmap::CellAlignIndex( | 905 Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_))); |
938 chunk_->AddressToMarkbitIndex(cell_base_))); | |
939 cells_ = chunk_->markbits()->cells(); | 906 cells_ = chunk_->markbits()->cells(); |
940 } | 907 } |
941 | 908 |
942 inline bool Done() { return cell_index_ == last_cell_index_; } | 909 inline bool Done() { return cell_index_ == last_cell_index_; } |
943 | 910 |
944 inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; } | 911 inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; } |
945 | 912 |
946 inline MarkBit::CellType* CurrentCell() { | 913 inline MarkBit::CellType* CurrentCell() { |
947 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( | 914 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
948 chunk_->AddressToMarkbitIndex(cell_base_)))); | 915 chunk_->AddressToMarkbitIndex(cell_base_)))); |
949 return &cells_[cell_index_]; | 916 return &cells_[cell_index_]; |
950 } | 917 } |
951 | 918 |
952 inline Address CurrentCellBase() { | 919 inline Address CurrentCellBase() { |
953 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( | 920 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
954 chunk_->AddressToMarkbitIndex(cell_base_)))); | 921 chunk_->AddressToMarkbitIndex(cell_base_)))); |
955 return cell_base_; | 922 return cell_base_; |
956 } | 923 } |
957 | 924 |
958 inline void Advance() { | 925 inline void Advance() { |
959 cell_index_++; | 926 cell_index_++; |
960 cell_base_ += 32 * kPointerSize; | 927 cell_base_ += 32 * kPointerSize; |
961 } | 928 } |
962 | 929 |
963 private: | 930 private: |
964 MemoryChunk* chunk_; | 931 MemoryChunk* chunk_; |
965 MarkBit::CellType* cells_; | 932 MarkBit::CellType* cells_; |
966 unsigned int last_cell_index_; | 933 unsigned int last_cell_index_; |
967 unsigned int cell_index_; | 934 unsigned int cell_index_; |
968 Address cell_base_; | 935 Address cell_base_; |
969 }; | 936 }; |
970 | 937 |
971 | 938 |
972 class SequentialSweepingScope BASE_EMBEDDED { | 939 class SequentialSweepingScope BASE_EMBEDDED { |
973 public: | 940 public: |
974 explicit SequentialSweepingScope(MarkCompactCollector *collector) : | 941 explicit SequentialSweepingScope(MarkCompactCollector* collector) |
975 collector_(collector) { | 942 : collector_(collector) { |
976 collector_->set_sequential_sweeping(true); | 943 collector_->set_sequential_sweeping(true); |
977 } | 944 } |
978 | 945 |
979 ~SequentialSweepingScope() { | 946 ~SequentialSweepingScope() { collector_->set_sequential_sweeping(false); } |
980 collector_->set_sequential_sweeping(false); | |
981 } | |
982 | 947 |
983 private: | 948 private: |
984 MarkCompactCollector* collector_; | 949 MarkCompactCollector* collector_; |
985 }; | 950 }; |
986 | 951 |
987 | 952 |
988 const char* AllocationSpaceName(AllocationSpace space); | 953 const char* AllocationSpaceName(AllocationSpace space); |
| 954 } |
| 955 } // namespace v8::internal |
989 | 956 |
990 } } // namespace v8::internal | 957 #endif // V8_HEAP_MARK_COMPACT_H_ |
991 | |
992 #endif // V8_MARK_COMPACT_H_ | |
OLD | NEW |