OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/objects-visiting.h" | 5 #include "src/heap/objects-visiting.h" |
6 | 6 |
7 #include "src/heap/mark-compact-inl.h" | 7 #include "src/heap/mark-compact-inl.h" |
8 #include "src/heap/objects-visiting-inl.h" | 8 #include "src/heap/objects-visiting-inl.h" |
9 | 9 |
10 namespace v8 { | 10 namespace v8 { |
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
300 static int WeakNextOffset() { | 300 static int WeakNextOffset() { |
301 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); | 301 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); |
302 } | 302 } |
303 | 303 |
304 static void VisitLiveObject(Heap* heap, Context* context, | 304 static void VisitLiveObject(Heap* heap, Context* context, |
305 WeakObjectRetainer* retainer) { | 305 WeakObjectRetainer* retainer) { |
306 // Process the three weak lists linked off the context. | 306 // Process the three weak lists linked off the context. |
307 DoWeakList<JSFunction>(heap, context, retainer, | 307 DoWeakList<JSFunction>(heap, context, retainer, |
308 Context::OPTIMIZED_FUNCTIONS_LIST); | 308 Context::OPTIMIZED_FUNCTIONS_LIST); |
309 | 309 |
310 // Code objects are always allocated in Code space, we do not have to visit | |
311 // them during scavenges. | |
312 if (heap->gc_state() == Heap::MARK_COMPACT) { | 310 if (heap->gc_state() == Heap::MARK_COMPACT) { |
| 311 // Record the slots of the weak entries in the native context. |
| 312 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 313 for (int idx = Context::FIRST_WEAK_SLOT; |
| 314 idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) { |
| 315 Object** slot = Context::cast(context)->RawFieldOfElementAt(idx); |
| 316 collector->RecordSlot(context, slot, *slot); |
| 317 } |
| 318 // Code objects are always allocated in Code space, we do not have to |
| 319 // visit |
| 320 // them during scavenges. |
313 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); | 321 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); |
314 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); | 322 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); |
315 } | 323 } |
316 } | 324 } |
317 | 325 |
318 template <class T> | 326 template <class T> |
319 static void DoWeakList(Heap* heap, Context* context, | 327 static void DoWeakList(Heap* heap, Context* context, |
320 WeakObjectRetainer* retainer, int index) { | 328 WeakObjectRetainer* retainer, int index) { |
321 // Visit the weak list, removing dead intermediate elements. | 329 // Visit the weak list, removing dead intermediate elements. |
322 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer); | 330 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
357 }; | 365 }; |
358 | 366 |
359 | 367 |
360 template Object* VisitWeakList<Context>(Heap* heap, Object* list, | 368 template Object* VisitWeakList<Context>(Heap* heap, Object* list, |
361 WeakObjectRetainer* retainer); | 369 WeakObjectRetainer* retainer); |
362 | 370 |
363 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list, | 371 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list, |
364 WeakObjectRetainer* retainer); | 372 WeakObjectRetainer* retainer); |
365 } // namespace internal | 373 } // namespace internal |
366 } // namespace v8 | 374 } // namespace v8 |
OLD | NEW |