OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap/objects-visiting.h" | 7 #include "src/heap/objects-visiting.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
184 return heap->gc_state() == Heap::MARK_COMPACT && | 184 return heap->gc_state() == Heap::MARK_COMPACT && |
185 heap->mark_compact_collector()->is_compacting(); | 185 heap->mark_compact_collector()->is_compacting(); |
186 } | 186 } |
187 | 187 |
188 | 188 |
189 template <class T> | 189 template <class T> |
190 struct WeakListVisitor; | 190 struct WeakListVisitor; |
191 | 191 |
192 | 192 |
193 template <class T> | 193 template <class T> |
194 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { | 194 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer, |
| 195 bool stop_after_young) { |
195 Object* undefined = heap->undefined_value(); | 196 Object* undefined = heap->undefined_value(); |
196 Object* head = undefined; | 197 Object* head = undefined; |
197 T* tail = NULL; | 198 T* tail = NULL; |
198 MarkCompactCollector* collector = heap->mark_compact_collector(); | 199 MarkCompactCollector* collector = heap->mark_compact_collector(); |
199 bool record_slots = MustRecordSlots(heap); | 200 bool record_slots = MustRecordSlots(heap); |
| 201 |
200 while (list != undefined) { | 202 while (list != undefined) { |
201 // Check whether to keep the candidate in the list. | 203 // Check whether to keep the candidate in the list. |
202 T* candidate = reinterpret_cast<T*>(list); | 204 T* candidate = reinterpret_cast<T*>(list); |
| 205 T* original_candidate = candidate; |
| 206 |
203 Object* retained = retainer->RetainAs(list); | 207 Object* retained = retainer->RetainAs(list); |
204 if (retained != NULL) { | 208 if (retained != NULL) { |
205 if (head == undefined) { | 209 if (head == undefined) { |
206 // First element in the list. | 210 // First element in the list. |
207 head = retained; | 211 head = retained; |
208 } else { | 212 } else { |
209 // Subsequent elements in the list. | 213 // Subsequent elements in the list. |
210 DCHECK(tail != NULL); | 214 DCHECK(tail != NULL); |
211 WeakListVisitor<T>::SetWeakNext(tail, retained); | 215 WeakListVisitor<T>::SetWeakNext(tail, retained); |
212 if (record_slots) { | 216 if (record_slots) { |
213 Object** next_slot = | 217 Object** next_slot = |
214 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); | 218 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); |
215 collector->RecordSlot(next_slot, next_slot, retained); | 219 collector->RecordSlot(next_slot, next_slot, retained); |
216 } | 220 } |
217 } | 221 } |
218 // Retained object is new tail. | 222 // Retained object is new tail. |
219 DCHECK(!retained->IsUndefined()); | 223 DCHECK(!retained->IsUndefined()); |
220 candidate = reinterpret_cast<T*>(retained); | 224 candidate = reinterpret_cast<T*>(retained); |
221 tail = candidate; | 225 tail = candidate; |
222 | 226 |
223 | |
224 // tail is a live object, visit it. | 227 // tail is a live object, visit it. |
225 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer); | 228 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer); |
| 229 |
| 230 // The list of weak objects is usually order. It starts with objects |
| 231 // recently allocated in the young generation followed by objects |
| 232 // allocated in the old generation. When a migration failure happened, |
| 233 // the list is not ordered until the next GC that has no migration |
| 234 // failure. |
| 235 // For young generation collections we just have to visit until the last |
| 236 // young generation objects. |
| 237 if (stop_after_young && !heap->migration_failure() && |
| 238 !heap->previous_migration_failure() && |
| 239 !heap->InNewSpace(original_candidate)) { |
| 240 return head; |
| 241 } |
226 } else { | 242 } else { |
227 WeakListVisitor<T>::VisitPhantomObject(heap, candidate); | 243 WeakListVisitor<T>::VisitPhantomObject(heap, candidate); |
228 } | 244 } |
229 | 245 |
230 // Move to next element in the list. | 246 // Move to next element in the list. |
231 list = WeakListVisitor<T>::WeakNext(candidate); | 247 list = WeakListVisitor<T>::WeakNext(candidate); |
232 } | 248 } |
233 | 249 |
234 // Terminate the list if there is one or more elements. | 250 // Terminate the list if there is one or more elements. |
235 if (tail != NULL) { | 251 if (tail != NULL) { |
236 WeakListVisitor<T>::SetWeakNext(tail, undefined); | 252 WeakListVisitor<T>::SetWeakNext(tail, undefined); |
237 } | 253 } |
238 return head; | 254 return head; |
239 } | 255 } |
240 | 256 |
241 | 257 |
| 258 Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list, |
| 259 WeakObjectRetainer* retainer) { |
| 260 Object* undefined = heap->undefined_value(); |
| 261 Object* previous = undefined; |
| 262 Object* head = undefined; |
| 263 Object* next; |
| 264 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 265 bool record_slots = MustRecordSlots(heap); |
| 266 |
| 267 for (Object* o = list; o != undefined;) { |
| 268 JSArrayBufferView* view = JSArrayBufferView::cast(o); |
| 269 next = view->weak_next(); |
| 270 if (!heap->InNewSpace(view)) { |
| 271 if (previous != undefined) { |
| 272 // We are in the middle of the list, skip the old space element. |
| 273 JSArrayBufferView* previous_view = JSArrayBufferView::cast(previous); |
| 274 previous_view->set_weak_next(next); |
| 275 if (record_slots) { |
| 276 Object** next_slot = HeapObject::RawField( |
| 277 previous_view, JSArrayBufferView::kWeakNextOffset); |
| 278 collector->RecordSlot(next_slot, next_slot, next); |
| 279 } |
| 280 } |
| 281 JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer()); |
| 282 view->set_weak_next(buffer->weak_first_view()); |
| 283 if (record_slots) { |
| 284 Object** next_slot = |
| 285 HeapObject::RawField(view, JSArrayBufferView::kWeakNextOffset); |
| 286 collector->RecordSlot(next_slot, next_slot, buffer->weak_first_view()); |
| 287 } |
| 288 buffer->set_weak_first_view(view); |
| 289 if (record_slots) { |
| 290 Object** slot = |
| 291 HeapObject::RawField(buffer, JSArrayBuffer::kWeakFirstViewOffset); |
| 292 heap->mark_compact_collector()->RecordSlot(slot, slot, view); |
| 293 } |
| 294 } else { |
| 295 // We found a valid new space view, remember it. |
| 296 previous = view; |
| 297 if (head == undefined) { |
| 298 // We are at the list head. |
| 299 head = view; |
| 300 } |
| 301 } |
| 302 o = next; |
| 303 } |
| 304 return head; |
| 305 } |
| 306 |
| 307 |
242 template <class T> | 308 template <class T> |
243 static void ClearWeakList(Heap* heap, Object* list) { | 309 static void ClearWeakList(Heap* heap, Object* list) { |
244 Object* undefined = heap->undefined_value(); | 310 Object* undefined = heap->undefined_value(); |
245 while (list != undefined) { | 311 while (list != undefined) { |
246 T* candidate = reinterpret_cast<T*>(list); | 312 T* candidate = reinterpret_cast<T*>(list); |
247 list = WeakListVisitor<T>::WeakNext(candidate); | 313 list = WeakListVisitor<T>::WeakNext(candidate); |
248 WeakListVisitor<T>::SetWeakNext(candidate, undefined); | 314 WeakListVisitor<T>::SetWeakNext(candidate, undefined); |
249 } | 315 } |
250 } | 316 } |
251 | 317 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 if (heap->gc_state() == Heap::MARK_COMPACT) { | 375 if (heap->gc_state() == Heap::MARK_COMPACT) { |
310 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); | 376 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); |
311 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); | 377 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); |
312 } | 378 } |
313 } | 379 } |
314 | 380 |
315 template <class T> | 381 template <class T> |
316 static void DoWeakList(Heap* heap, Context* context, | 382 static void DoWeakList(Heap* heap, Context* context, |
317 WeakObjectRetainer* retainer, int index) { | 383 WeakObjectRetainer* retainer, int index) { |
318 // Visit the weak list, removing dead intermediate elements. | 384 // Visit the weak list, removing dead intermediate elements. |
319 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer); | 385 Object* list_head = |
| 386 VisitWeakList<T>(heap, context->get(index), retainer, false); |
320 | 387 |
321 // Update the list head. | 388 // Update the list head. |
322 context->set(index, list_head, UPDATE_WRITE_BARRIER); | 389 context->set(index, list_head, UPDATE_WRITE_BARRIER); |
323 | 390 |
324 if (MustRecordSlots(heap)) { | 391 if (MustRecordSlots(heap)) { |
325 // Record the updated slot if necessary. | 392 // Record the updated slot if necessary. |
326 Object** head_slot = | 393 Object** head_slot = |
327 HeapObject::RawField(context, FixedArray::SizeFor(index)); | 394 HeapObject::RawField(context, FixedArray::SizeFor(index)); |
328 heap->mark_compact_collector()->RecordSlot(head_slot, head_slot, | 395 heap->mark_compact_collector()->RecordSlot(head_slot, head_slot, |
329 list_head); | 396 list_head); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
361 obj->set_weak_next(next); | 428 obj->set_weak_next(next); |
362 } | 429 } |
363 | 430 |
364 static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); } | 431 static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); } |
365 | 432 |
366 static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; } | 433 static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; } |
367 | 434 |
368 static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer, | 435 static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer, |
369 WeakObjectRetainer* retainer) { | 436 WeakObjectRetainer* retainer) { |
370 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>( | 437 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>( |
371 heap, array_buffer->weak_first_view(), retainer); | 438 heap, array_buffer->weak_first_view(), retainer, false); |
372 array_buffer->set_weak_first_view(typed_array_obj); | 439 array_buffer->set_weak_first_view(typed_array_obj); |
373 if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) { | 440 if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) { |
374 Object** slot = HeapObject::RawField(array_buffer, | 441 Object** slot = HeapObject::RawField(array_buffer, |
375 JSArrayBuffer::kWeakFirstViewOffset); | 442 JSArrayBuffer::kWeakFirstViewOffset); |
376 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); | 443 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); |
377 } | 444 } |
378 } | 445 } |
379 | 446 |
380 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { | 447 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { |
381 Runtime::FreeArrayBuffer(heap->isolate(), phantom); | 448 Runtime::FreeArrayBuffer(heap->isolate(), phantom); |
(...skipping 10 matching lines...) Expand all Loading... |
392 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); } | 459 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); } |
393 | 460 |
394 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; } | 461 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; } |
395 | 462 |
396 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {} | 463 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {} |
397 | 464 |
398 static void VisitPhantomObject(Heap*, AllocationSite*) {} | 465 static void VisitPhantomObject(Heap*, AllocationSite*) {} |
399 }; | 466 }; |
400 | 467 |
401 | 468 |
402 template Object* VisitWeakList<Code>(Heap* heap, Object* list, | |
403 WeakObjectRetainer* retainer); | |
404 | |
405 | |
406 template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list, | |
407 WeakObjectRetainer* retainer); | |
408 | |
409 | |
410 template Object* VisitWeakList<Context>(Heap* heap, Object* list, | 469 template Object* VisitWeakList<Context>(Heap* heap, Object* list, |
411 WeakObjectRetainer* retainer); | 470 WeakObjectRetainer* retainer, |
| 471 bool stop_after_young); |
412 | 472 |
413 | 473 |
414 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list, | 474 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list, |
415 WeakObjectRetainer* retainer); | 475 WeakObjectRetainer* retainer, |
| 476 bool stop_after_young); |
416 | 477 |
| 478 template Object* VisitWeakList<JSArrayBufferView>(Heap* heap, Object* list, |
| 479 WeakObjectRetainer* retainer, |
| 480 bool stop_after_young); |
417 | 481 |
418 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list, | 482 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list, |
419 WeakObjectRetainer* retainer); | 483 WeakObjectRetainer* retainer, |
| 484 bool stop_after_young); |
420 } | 485 } |
421 } // namespace v8::internal | 486 } // namespace v8::internal |
OLD | NEW |