Index: src/mark-compact.h |
diff --git a/src/mark-compact.h b/src/mark-compact.h |
index aafe95d263515e30b8e13be85b45e85ee35a8b03..0e2db00a6fab07a40086c588097c3aec0495b215 100644 |
--- a/src/mark-compact.h |
+++ b/src/mark-compact.h |
@@ -267,29 +267,104 @@ class MarkingDeque { |
}; |
+class SlotsBufferAllocator { |
+ public: |
+ SlotsBuffer* AllocateBuffer(SlotsBuffer* next_buffer); |
+ void DeallocateBuffer(SlotsBuffer* buffer); |
+ |
+ void DeallocateChain(SlotsBuffer** buffer_address); |
+}; |
+ |
+ |
class SlotsBuffer { |
public: |
typedef Object** ObjectSlot; |
- SlotsBuffer(); |
- ~SlotsBuffer(); |
+ SlotsBuffer() { UNREACHABLE(); } |
Erik Corry
2011/07/08 13:02:38
I think you should use DISALLOW_IMPLICIT_CONSTRUCT
|
+ ~SlotsBuffer() { UNREACHABLE(); } |
- void Clear(); |
- void Add(ObjectSlot slot); |
- void Update(); |
- void Report(); |
+ void Initialize(SlotsBuffer* next_buffer) { |
+ idx_ = 0; |
+ next_ = next_buffer; |
+ if (next_ != NULL) { |
+ chain_length_ = next_->chain_length_ + 1; |
+ } else { |
+ chain_length_ = 1; |
+ } |
+ } |
+ |
+ void Add(ObjectSlot slot) { |
+ ASSERT(0 <= idx_ && idx_ < kNumberOfElements); |
+ AsArray()[idx_++] = slot; |
+ ASSERT(reinterpret_cast<Address>(AsArray() + idx_) <= |
+ reinterpret_cast<Address>(this) + kSizeWords * kPointerSize); |
+ } |
+ |
+ void UpdateSlots(); |
+ |
+ SlotsBuffer* next() { return next_; } |
+ |
+ static int SizeOfChain(SlotsBuffer* buffer) { |
+ if (buffer == NULL) return 0; |
+ return buffer->idx_ + (buffer->chain_length_ - 1) * kNumberOfElements; |
+ } |
+ |
+ inline bool IsFull() { |
+ return idx_ == kNumberOfElements; |
+ } |
+ |
+ static void UpdateSlotsRecordedIn(SlotsBuffer* buffer) { |
+ while (buffer != NULL) { |
+ buffer->UpdateSlots(); |
+ buffer = buffer->next(); |
+ } |
+ } |
+ |
+ enum AdditionMode { |
+ FAIL_ON_OVERFLOW, |
+ IGNORE_OVERFLOW |
+ }; |
+ |
+ static bool AddTo(SlotsBufferAllocator* allocator, |
+ SlotsBuffer** buffer_address, |
+ ObjectSlot slot, |
+ AdditionMode mode) { |
+ SlotsBuffer* buffer = *buffer_address; |
+ if (buffer == NULL || buffer->IsFull()) { |
+ if (mode == FAIL_ON_OVERFLOW && |
+ buffer != NULL && |
+ buffer->chain_length_ >= kChainLengthThreshold) { |
+ allocator->DeallocateChain(buffer_address); |
+ return false; |
+ } |
+ buffer = allocator->AllocateBuffer(buffer); |
+ *buffer_address = buffer; |
+ } |
+ buffer->Add(slot); |
+ return true; |
+ } |
+ |
+ static const int kHeaderSizeWords = 3; |
+ static const int kSizeWords = 1024; |
+ static const int kNumberOfElements = kSizeWords - kHeaderSizeWords; |
private: |
- static const int kBufferSize = 1024; |
+ ObjectSlot* AsArray() { |
+ return reinterpret_cast<ObjectSlot*>(this + 1); |
Erik Corry
2011/07/08 13:02:38
This casting seems unnecessary. You could just ha
|
+ } |
- List<ObjectSlot*> buffers_; |
- ObjectSlot* buffer_; |
+ static const int kChainLengthThreshold = 6; |
- int idx_; |
- int buffer_idx_; |
+ intptr_t idx_; |
+ intptr_t chain_length_; |
+ SlotsBuffer* next_; |
}; |
+STATIC_ASSERT(SlotsBuffer::kHeaderSizeWords * kPointerSize == |
+ sizeof(SlotsBuffer)); |
+ |
+ |
// ------------------------------------------------------------------------- |
// Mark-Compact collector |
class MarkCompactCollector { |
@@ -382,9 +457,9 @@ class MarkCompactCollector { |
// Return a number of reclaimed bytes. |
static int SweepConservatively(PagedSpace* space, Page* p); |
- INLINE(static bool IsOnEvacuationCandidateOrInNewSpace(Object** anchor)) { |
+ INLINE(static bool ShouldSkipEvacuationSlotRecording(Object** anchor)) { |
return Page::FromAddress(reinterpret_cast<Address>(anchor))-> |
- IsEvacuationCandidateOrNewSpace(); |
+ ShouldSkipEvacuationSlotRecording(); |
} |
INLINE(static bool IsOnEvacuationCandidate(Object* obj)) { |
@@ -393,9 +468,26 @@ class MarkCompactCollector { |
} |
INLINE(void RecordSlot(Object** anchor_slot, Object** slot, Object* object)) { |
- if (IsOnEvacuationCandidate(object) && |
- !IsOnEvacuationCandidateOrInNewSpace(anchor_slot)) { |
- slots_buffer_.Add(slot); |
+ Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object)); |
+ if (object_page->IsEvacuationCandidate() && |
+ !ShouldSkipEvacuationSlotRecording(anchor_slot)) { |
+ if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, |
+ object_page->slots_buffer_address(), |
+ slot, |
+ SlotsBuffer::FAIL_ON_OVERFLOW)) { |
+ if (FLAG_trace_fragmentation) { |
+ PrintF("Page %p is too popular. Disabling evacuation.\n", |
+ reinterpret_cast<void*>(object_page)); |
+ } |
+ // TODO(gc) If all evacuation candidates are too popular we |
+ // should stop slots recording entirely. |
+ object_page->ClearEvacuationCandidate(); |
+ if (object_page->owner()->identity() == OLD_DATA_SPACE) { |
Erik Corry
2011/07/08 13:02:38
Comment required!
|
+ evacuation_candidates_.RemoveElement(object_page); |
+ } else { |
+ object_page->SetFlag(Page::RESCAN_ON_EVACUATION); |
+ } |
+ } |
} |
} |
@@ -437,7 +529,9 @@ class MarkCompactCollector { |
// collection (NULL before and after). |
GCTracer* tracer_; |
- SlotsBuffer slots_buffer_; |
+ SlotsBufferAllocator slots_buffer_allocator_; |
+ |
+ SlotsBuffer* migration_slots_buffer_; |
// Finishes GC, performs heap verification if enabled. |
void Finish(); |