OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_REMEMBERED_SET_H | 5 #ifndef V8_REMEMBERED_SET_H |
6 #define V8_REMEMBERED_SET_H | 6 #define V8_REMEMBERED_SET_H |
7 | 7 |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/slot-set.h" | 9 #include "src/heap/slot-set.h" |
10 #include "src/heap/spaces.h" | 10 #include "src/heap/spaces.h" |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
49 uintptr_t start_offset = start - page->address(); | 49 uintptr_t start_offset = start - page->address(); |
50 uintptr_t end_offset = end - page->address(); | 50 uintptr_t end_offset = end - page->address(); |
51 DCHECK_LT(start_offset, end_offset); | 51 DCHECK_LT(start_offset, end_offset); |
52 DCHECK_LE(end_offset, static_cast<uintptr_t>(Page::kPageSize)); | 52 DCHECK_LE(end_offset, static_cast<uintptr_t>(Page::kPageSize)); |
53 slot_set->RemoveRange(static_cast<uint32_t>(start_offset), | 53 slot_set->RemoveRange(static_cast<uint32_t>(start_offset), |
54 static_cast<uint32_t>(end_offset)); | 54 static_cast<uint32_t>(end_offset)); |
55 } | 55 } |
56 } | 56 } |
57 | 57 |
58 // Iterates and filters the remembered set with the given callback. | 58 // Iterates and filters the remembered set with the given callback. |
59 // The callback should take (Address slot) and return SlotSet::CallbackResult. | 59 // The callback should take (Address slot) and return SlotCallbackResult. |
60 template <typename Callback> | 60 template <typename Callback> |
61 static void Iterate(Heap* heap, Callback callback) { | 61 static void Iterate(Heap* heap, Callback callback) { |
62 PointerChunkIterator it(heap); | 62 MemoryChunkIterator it(heap, direction == OLD_TO_OLD |
| 63 ? MemoryChunkIterator::ALL |
| 64 : MemoryChunkIterator::ALL_BUT_CODE_SPACE); |
63 MemoryChunk* chunk; | 65 MemoryChunk* chunk; |
64 while ((chunk = it.next()) != nullptr) { | 66 while ((chunk = it.next()) != nullptr) { |
65 SlotSet* slots = GetSlotSet(chunk); | 67 SlotSet* slots = GetSlotSet(chunk); |
66 if (slots != nullptr) { | 68 if (slots != nullptr) { |
67 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; | 69 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; |
68 int new_count = 0; | 70 int new_count = 0; |
69 for (size_t page = 0; page < pages; page++) { | 71 for (size_t page = 0; page < pages; page++) { |
70 new_count += slots[page].Iterate(callback); | 72 new_count += slots[page].Iterate(callback); |
71 } | 73 } |
72 if (new_count == 0) { | 74 if (new_count == 0) { |
73 ReleaseSlotSet(chunk); | 75 ReleaseSlotSet(chunk); |
74 } | 76 } |
75 } | 77 } |
76 } | 78 } |
77 } | 79 } |
78 | 80 |
79 // Iterates and filters the remembered set with the given callback. | 81 // Iterates and filters the remembered set with the given callback. |
80 // The callback should take (HeapObject** slot, HeapObject* target) and | 82 // The callback should take (HeapObject** slot, HeapObject* target) and |
81 // update the slot. | 83 // update the slot. |
82 // A special wrapper takes care of filtering the slots based on their values. | 84 // A special wrapper takes care of filtering the slots based on their values. |
83 // For OLD_TO_NEW case: slots that do not point to the ToSpace after | 85 // For OLD_TO_NEW case: slots that do not point to the ToSpace after |
84 // callback invocation will be removed from the set. | 86 // callback invocation will be removed from the set. |
85 template <typename Callback> | 87 template <typename Callback> |
86 static void IterateWithWrapper(Heap* heap, Callback callback) { | 88 static void IterateWithWrapper(Heap* heap, Callback callback) { |
87 Iterate(heap, [heap, callback](Address addr) { | 89 Iterate(heap, [heap, callback](Address addr) { |
88 return Wrapper(heap, addr, callback); | 90 return Wrapper(heap, addr, callback); |
89 }); | 91 }); |
90 } | 92 } |
91 | 93 |
| 94 // Given a page and a typed slot in that page, this function adds the slot |
| 95 // to the remembered set. |
| 96 static void InsertTyped(Page* page, SlotType slot_type, Address slot_addr) { |
| 97 STATIC_ASSERT(direction == OLD_TO_OLD); |
| 98 TypedSlotSet* slot_set = page->typed_old_to_old_slots(); |
| 99 if (slot_set == nullptr) { |
| 100 page->AllocateTypedOldToOldSlots(); |
| 101 slot_set = page->typed_old_to_old_slots(); |
| 102 } |
| 103 uintptr_t offset = slot_addr - page->address(); |
| 104 DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); |
| 105 slot_set->Insert(slot_type, static_cast<uint32_t>(offset)); |
| 106 } |
| 107 |
| 108 // Given a page and a range of typed slots in that page, this function removes |
| 109 // the slots from the remembered set. |
| 110 static void RemoveRangeTyped(Page* page, Address start, Address end) { |
| 111 TypedSlotSet* slots = page->typed_old_to_old_slots(); |
| 112 if (slots != nullptr) { |
| 113 slots->Iterate([start, end](SlotType slot_type, Address slot_addr) { |
| 114 return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT; |
| 115 }); |
| 116 } |
| 117 } |
| 118 |
| 119 // Iterates and filters typed old to old pointers with the given callback. |
| 120 // The callback should take (SlotType slot_type, Address slot_addr) and |
| 121 // return SlotCallbackResult. |
| 122 template <typename Callback> |
| 123 static void IterateTyped(Heap* heap, Callback callback) { |
| 124 MemoryChunkIterator it(heap, MemoryChunkIterator::ALL_BUT_MAP_SPACE); |
| 125 MemoryChunk* chunk; |
| 126 while ((chunk = it.next()) != nullptr) { |
| 127 TypedSlotSet* slots = chunk->typed_old_to_old_slots(); |
| 128 if (slots != nullptr) { |
| 129 int new_count = slots->Iterate(callback); |
| 130 if (new_count == 0) { |
| 131 chunk->ReleaseTypedOldToOldSlots(); |
| 132 } |
| 133 } |
| 134 } |
| 135 } |
| 136 |
| 137 // Clear all old to old slots from the remembered set. |
| 138 static void ClearAll(Heap* heap) { |
| 139 STATIC_ASSERT(direction == OLD_TO_OLD); |
| 140 MemoryChunkIterator it(heap, MemoryChunkIterator::ALL); |
| 141 MemoryChunk* chunk; |
| 142 while ((chunk = it.next()) != nullptr) { |
| 143 chunk->ReleaseOldToOldSlots(); |
| 144 chunk->ReleaseTypedOldToOldSlots(); |
| 145 } |
| 146 } |
| 147 |
92 // Eliminates all stale slots from the remembered set, i.e. | 148 // Eliminates all stale slots from the remembered set, i.e. |
93 // slots that are not part of live objects anymore. This method must be | 149 // slots that are not part of live objects anymore. This method must be |
94 // called after marking, when the whole transitive closure is known and | 150 // called after marking, when the whole transitive closure is known and |
95 // must be called before sweeping when mark bits are still intact. | 151 // must be called before sweeping when mark bits are still intact. |
96 static void ClearInvalidSlots(Heap* heap); | 152 static void ClearInvalidSlots(Heap* heap); |
97 | 153 |
98 static void VerifyValidSlots(Heap* heap); | 154 static void VerifyValidSlots(Heap* heap); |
99 | 155 |
100 private: | 156 private: |
101 static SlotSet* GetSlotSet(MemoryChunk* chunk) { | 157 static SlotSet* GetSlotSet(MemoryChunk* chunk) { |
(...skipping 16 matching lines...) Expand all Loading... |
118 if (direction == OLD_TO_OLD) { | 174 if (direction == OLD_TO_OLD) { |
119 chunk->AllocateOldToOldSlots(); | 175 chunk->AllocateOldToOldSlots(); |
120 return chunk->old_to_old_slots(); | 176 return chunk->old_to_old_slots(); |
121 } else { | 177 } else { |
122 chunk->AllocateOldToNewSlots(); | 178 chunk->AllocateOldToNewSlots(); |
123 return chunk->old_to_new_slots(); | 179 return chunk->old_to_new_slots(); |
124 } | 180 } |
125 } | 181 } |
126 | 182 |
127 template <typename Callback> | 183 template <typename Callback> |
128 static SlotSet::CallbackResult Wrapper(Heap* heap, Address slot_address, | 184 static SlotCallbackResult Wrapper(Heap* heap, Address slot_address, |
129 Callback slot_callback) { | 185 Callback slot_callback) { |
130 STATIC_ASSERT(direction == OLD_TO_NEW); | 186 STATIC_ASSERT(direction == OLD_TO_NEW); |
131 Object** slot = reinterpret_cast<Object**>(slot_address); | 187 Object** slot = reinterpret_cast<Object**>(slot_address); |
132 Object* object = *slot; | 188 Object* object = *slot; |
133 if (heap->InFromSpace(object)) { | 189 if (heap->InFromSpace(object)) { |
134 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); | 190 HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
135 DCHECK(heap_object->IsHeapObject()); | 191 DCHECK(heap_object->IsHeapObject()); |
136 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); | 192 slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
137 object = *slot; | 193 object = *slot; |
138 // If the object was in from space before and is after executing the | 194 // If the object was in from space before and is after executing the |
139 // callback in to space, the object is still live. | 195 // callback in to space, the object is still live. |
140 // Unfortunately, we do not know about the slot. It could be in a | 196 // Unfortunately, we do not know about the slot. It could be in a |
141 // just freed free space object. | 197 // just freed free space object. |
142 if (heap->InToSpace(object)) { | 198 if (heap->InToSpace(object)) { |
143 return SlotSet::KEEP_SLOT; | 199 return KEEP_SLOT; |
144 } | 200 } |
145 } else { | 201 } else { |
146 DCHECK(!heap->InNewSpace(object)); | 202 DCHECK(!heap->InNewSpace(object)); |
147 } | 203 } |
148 return SlotSet::REMOVE_SLOT; | 204 return REMOVE_SLOT; |
149 } | 205 } |
150 | 206 |
151 static bool IsValidSlot(Heap* heap, Object** slot); | 207 static bool IsValidSlot(Heap* heap, Object** slot); |
152 }; | 208 }; |
153 | 209 |
| 210 // Buffer for keeping thead local migration slots during compaction. |
| 211 // TODO(ulan): Remove this once every thread gets local pages in compaction |
| 212 // space. |
| 213 class LocalSlotsBuffer BASE_EMBEDDED { |
| 214 public: |
| 215 LocalSlotsBuffer() : top_(new Node(nullptr)) {} |
| 216 |
| 217 ~LocalSlotsBuffer() { |
| 218 Node* current = top_; |
| 219 while (current != nullptr) { |
| 220 Node* tmp = current->next; |
| 221 delete current; |
| 222 current = tmp; |
| 223 } |
| 224 } |
| 225 |
| 226 void Record(Address addr) { |
| 227 EnsureSpaceFor(1); |
| 228 uintptr_t entry = reinterpret_cast<uintptr_t>(addr); |
| 229 DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); |
| 230 Insert(entry); |
| 231 } |
| 232 |
| 233 void Record(SlotType type, Address addr) { |
| 234 EnsureSpaceFor(2); |
| 235 Insert(static_cast<uintptr_t>(type)); |
| 236 uintptr_t entry = reinterpret_cast<uintptr_t>(addr); |
| 237 DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES)); |
| 238 Insert(entry); |
| 239 } |
| 240 |
| 241 template <typename UntypedCallback, typename TypedCallback> |
| 242 void Iterate(UntypedCallback untyped_callback, TypedCallback typed_callback) { |
| 243 Node* current = top_; |
| 244 bool typed = false; |
| 245 SlotType type; |
| 246 Address addr; |
| 247 while (current != nullptr) { |
| 248 for (int i = 0; i < current->count; i++) { |
| 249 uintptr_t entry = current->buffer[i]; |
| 250 if (entry < NUMBER_OF_SLOT_TYPES) { |
| 251 DCHECK(!typed); |
| 252 typed = true; |
| 253 type = static_cast<SlotType>(entry); |
| 254 } else { |
| 255 addr = reinterpret_cast<Address>(entry); |
| 256 if (typed) { |
| 257 typed_callback(type, addr); |
| 258 typed = false; |
| 259 } else { |
| 260 untyped_callback(addr); |
| 261 } |
| 262 } |
| 263 } |
| 264 current = current->next; |
| 265 } |
| 266 } |
| 267 |
| 268 private: |
| 269 void EnsureSpaceFor(int count) { |
| 270 if (top_->remaining_free_slots() < count) top_ = new Node(top_); |
| 271 } |
| 272 |
| 273 void Insert(uintptr_t entry) { top_->buffer[top_->count++] = entry; } |
| 274 |
| 275 static const int kBufferSize = 16 * KB; |
| 276 |
| 277 struct Node : Malloced { |
| 278 explicit Node(Node* next_node) : next(next_node), count(0) {} |
| 279 |
| 280 inline int remaining_free_slots() { return kBufferSize - count; } |
| 281 |
| 282 Node* next; |
| 283 uintptr_t buffer[kBufferSize]; |
| 284 int count; |
| 285 }; |
| 286 |
| 287 Node* top_; |
| 288 }; |
| 289 |
154 } // namespace internal | 290 } // namespace internal |
155 } // namespace v8 | 291 } // namespace v8 |
156 | 292 |
157 #endif // V8_REMEMBERED_SET_H | 293 #endif // V8_REMEMBERED_SET_H |
OLD | NEW |