Chromium Code Reviews| Index: src/heap/remembered-set.h |
| diff --git a/src/heap/remembered-set.h b/src/heap/remembered-set.h |
| index 351d76edb84c18706da7fede9003948d75562a59..78094b768d149be44f5d0ebf17ed0251dca30748 100644 |
| --- a/src/heap/remembered-set.h |
| +++ b/src/heap/remembered-set.h |
| @@ -56,10 +56,12 @@ class RememberedSet { |
| } |
| // Iterates and filters the remembered set with the given callback. |
| - // The callback should take (Address slot) and return SlotSet::CallbackResult. |
| + // The callback should take (Address slot) and return SlotCallbackResult. |
| template <typename Callback> |
| static void Iterate(Heap* heap, Callback callback) { |
| - PointerChunkIterator it(heap); |
| + MemoryChunkIterator it(heap, direction == OLD_TO_OLD |
| + ? MemoryChunkIterator::ALL |
| + : MemoryChunkIterator::ALL_BUT_CODE_SPACE); |
| MemoryChunk* chunk; |
| while ((chunk = it.next()) != nullptr) { |
| SlotSet* slots = GetSlotSet(chunk); |
| @@ -89,6 +91,60 @@ class RememberedSet { |
| }); |
| } |
| + // Given a page and a typed slot in that page, this function adds the slot |
| + // to the remembered set. |
| + static void InsertTyped(Page* page, SlotType slot_type, Address slot_addr) { |
| + STATIC_ASSERT(direction == OLD_TO_OLD); |
| + TypedSlotSet* slot_set = page->typed_old_to_old_slots(); |
| + if (slot_set == nullptr) { |
| + page->AllocateTypedOldToOldSlots(); |
| + slot_set = page->typed_old_to_old_slots(); |
| + } |
| + uintptr_t offset = slot_addr - page->address(); |
| + DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); |
| + slot_set->Insert(slot_type, static_cast<uint32_t>(offset)); |
| + } |
| + |
| + // Given a page and a range of typed slots in that page, this function removes |
| + // the slots from the remembered set. |
| + static void RemoveRangeTyped(Page* page, Address start, Address end) { |
| + TypedSlotSet* slots = page->typed_old_to_old_slots(); |
| + if (slots != nullptr) { |
| + slots->Iterate([start, end](SlotType slot_type, Address slot_addr) { |
| + return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT; |
| + }); |
| + } |
| + } |
| + |
| + // Iterates and filters typed old to old pointers with the given callback. |
| + // The callback should take (SlotType slot_type, Address slot_addr) and |
| + // return SlotCallbackResult. |
| + template <typename Callback> |
| + static void IterateTyped(Heap* heap, Callback callback) { |
| + MemoryChunkIterator it(heap, MemoryChunkIterator::ALL_BUT_MAP_SPACE); |
| + MemoryChunk* chunk; |
| + while ((chunk = it.next()) != nullptr) { |
| + TypedSlotSet* slots = chunk->typed_old_to_old_slots(); |
| + if (slots != nullptr) { |
| + int new_count = slots->Iterate(callback); |
| + if (new_count == 0) { |
| + chunk->ReleaseTypedOldToOldSlots(); |
| + } |
| + } |
| + } |
| + } |
| + |
| + // Clear all old to old slots from the remembered set. |
| + static void ClearAll(Heap* heap) { |
| + STATIC_ASSERT(direction == OLD_TO_OLD); |
| + MemoryChunkIterator it(heap, MemoryChunkIterator::ALL); |
| + MemoryChunk* chunk; |
| + while ((chunk = it.next()) != nullptr) { |
| + chunk->ReleaseOldToOldSlots(); |
| + chunk->ReleaseTypedOldToOldSlots(); |
| + } |
| + } |
| + |
| // Eliminates all stale slots from the remembered set, i.e. |
| // slots that are not part of live objects anymore. This method must be |
| // called after marking, when the whole transitive closure is known and |
| @@ -125,8 +181,8 @@ class RememberedSet { |
| } |
| template <typename Callback> |
| - static SlotSet::CallbackResult Wrapper(Heap* heap, Address slot_address, |
| - Callback slot_callback) { |
| + static SlotCallbackResult Wrapper(Heap* heap, Address slot_address, |
| + Callback slot_callback) { |
| STATIC_ASSERT(direction == OLD_TO_NEW); |
| Object** slot = reinterpret_cast<Object**>(slot_address); |
| Object* object = *slot; |
| @@ -140,17 +196,97 @@ class RememberedSet { |
| // Unfortunately, we do not know about the slot. It could be in a |
| // just freed free space object. |
| if (heap->InToSpace(object)) { |
| - return SlotSet::KEEP_SLOT; |
| + return KEEP_SLOT; |
| } |
| } else { |
| DCHECK(!heap->InNewSpace(object)); |
| } |
| - return SlotSet::REMOVE_SLOT; |
| + return REMOVE_SLOT; |
| } |
| static bool IsValidSlot(Heap* heap, Object** slot); |
| }; |
| +// Buffer for keeping thead local migration slots during compation. |
|
jochen (gone - plz use gerrit)
2016/02/23 13:10:54
compaction
ulan
2016/02/23 13:19:07
Done.
|
| +// TODO(ulan): Remove this once every thread gets local pages in compaction |
| +// space. |
| +class LocalSlotsBuffer BASE_EMBEDDED { |
| + public: |
| + LocalSlotsBuffer() : top_(new Node(nullptr)) {} |
| + |
| + ~LocalSlotsBuffer() { |
| + Node* current = top_; |
| + while (current != nullptr) { |
| + Node* tmp = current->next; |
| + delete current; |
| + current = tmp; |
| + } |
| + } |
| + |
| + void Record(Address addr) { |
| + EnsureSpaceFor(1); |
| + uintptr_t entry = reinterpret_cast<uintptr_t>(addr); |
| + DCHECK_GT(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); |
|
jochen (gone - plz use gerrit)
2016/02/23 13:10:54
GE would be enough, no?
ulan
2016/02/23 13:19:07
Yes. Done.
|
| + Insert(entry); |
| + } |
| + |
| + void Record(SlotType type, Address addr) { |
| + EnsureSpaceFor(2); |
| + Insert(static_cast<uintptr_t>(type)); |
| + uintptr_t entry = reinterpret_cast<uintptr_t>(addr); |
| + DCHECK_GT(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); |
| + Insert(entry); |
| + } |
| + |
| + template <typename UntypedCallback, typename TypedCallback> |
| + void Iterate(UntypedCallback untyped_callback, TypedCallback typed_callback) { |
| + Node* current = top_; |
| + bool typed = false; |
| + SlotType type; |
| + Address addr; |
| + while (current != nullptr) { |
| + for (int i = 0; i < current->count; i++) { |
| + uintptr_t entry = current->buffer[i]; |
| + if (entry < NUMBER_OF_SLOT_TYPES) { |
| + DCHECK(!typed); |
| + typed = true; |
| + type = static_cast<SlotType>(entry); |
| + } else { |
| + addr = reinterpret_cast<Address>(entry); |
| + if (typed) { |
| + typed_callback(type, addr); |
| + typed = false; |
| + } else { |
| + untyped_callback(addr); |
| + } |
| + } |
| + } |
| + current = current->next; |
| + } |
| + } |
| + |
| + private: |
| + void EnsureSpaceFor(int count) { |
| + if (top_->remaining_free_slots() < count) top_ = new Node(top_); |
| + } |
| + |
| + void Insert(uintptr_t entry) { top_->buffer[top_->count++] = entry; } |
| + |
| + static const int kBufferSize = 16 * KB; |
| + |
| + struct Node : Malloced { |
| + explicit Node(Node* next_node) : next(next_node), count(0) {} |
| + |
| + inline int remaining_free_slots() { return kBufferSize - count; } |
| + |
| + Node* next; |
| + uintptr_t buffer[kBufferSize]; |
| + int count; |
| + }; |
| + |
| + Node* top_; |
| +}; |
| + |
| } // namespace internal |
| } // namespace v8 |