OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
180 #if ENABLE(ASSERT) | 180 #if ENABLE(ASSERT) |
181 static void checkGCInfo(Visitor* visitor, const T* t) | 181 static void checkGCInfo(Visitor* visitor, const T* t) |
182 { | 182 { |
183 DefaultTraceTrait<T>::checkGCInfo(visitor, t); | 183 DefaultTraceTrait<T>::checkGCInfo(visitor, t); |
184 } | 184 } |
185 #endif | 185 #endif |
186 }; | 186 }; |
187 | 187 |
188 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; | 188 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; |
189 | 189 |
190 // If ENABLE_EAGER_TRACING_BY_DEFAULT is set to 1, GCed objects will | |
191 // be eagerly traced by default. A class type can opt out by declaring | |
192 // a TraceEagerlyTrait<> specialization, mapping the value to 'false' | |
193 // (see the WILL_NOT_BE_EAGERLY_TRACED() macro below.) | |
194 #define ENABLE_EAGER_TRACING_BY_DEFAULT 0 | |
195 | |
196 // DISABLE_ALL_EAGER_TRACING provides the "kill switch" for eager | |
197 // tracing; setting it to 1 will disable the use of eager tracing | |
198 // entirely. That is, eager tracing is disabled even if traits have | |
199 // been declared. | |
200 #define DISABLE_ALL_EAGER_TRACING 0 | |
201 | |
202 // If TraceEagerlyTrait<T>::value is true, then the marker thread should | |
203 // invoke trace() on unmarked objects deriving from class T right away, | |
haraken
2014/12/02 06:16:05
unmarked objects => not-yet-marked objects
sof
2014/12/02 09:52:15
Done.
| |
204 // and not queue its trace callback on the marker stack. | |
haraken
2014/12/02 06:16:05
its trace callback => their trace callbacks
sof
2014/12/02 09:52:15
Done.
| |
205 // | |
206 // Specific template specializations of TraceEagerlyTrait<T> can be used | |
207 // to declare that eager tracing should always be used when tracing over | |
208 // GCed objects with class type T. If the trait's boolean 'value' is | |
209 // mapped to 'true' that is; declare it as 'false' to disable eager tracing. | |
210 // | |
211 // The trait can be declared to enable/disable eager tracing for a class T | |
212 // and any of its subclasses, or just to the class T (but none of its subclasses .) | |
213 // | |
214 template<typename T, typename Enabled = void> | |
215 class TraceEagerlyTrait { | |
216 public: | |
217 static const bool value = ENABLE_EAGER_TRACING_BY_DEFAULT; | |
218 }; | |
219 | |
220 template<typename T> | |
221 class TraceEagerlyTrait<Member<T>> { | |
haraken
2014/12/02 06:16:05
Don't we need the TraceEagerlyTrait for Persistent
sof
2014/12/02 09:52:15
That would be preferable; thanks for catching thei
| |
222 public: | |
223 static const bool value = TraceEagerlyTrait<T>::value; | |
224 }; | |
225 | |
226 template<typename T> | |
227 class TraceEagerlyTrait<WeakMember<T>> { | |
228 public: | |
229 static const bool value = TraceEagerlyTrait<T>::value; | |
230 }; | |
231 | |
232 #define WILL_BE_EAGERLY_TRACED(TYPE) \ | |
233 template<typename U> \ | |
234 class TraceEagerlyTrait<U, typename WTF::EnableIf<WTF::IsSubclass<U, TYPE>::valu e>::Type> { \ | |
235 public: \ | |
236 static const bool value = true; \ | |
237 } | |
238 | |
239 #define WILL_NOT_BE_EAGERLY_TRACED(TYPE) \ | |
240 template<typename U> \ | |
241 class TraceEagerlyTrait<U, typename WTF::EnableIf<WTF::IsSubclass<U, TYPE>::valu e>::Type> { \ | |
242 public: \ | |
243 static const bool value = false; \ | |
244 } | |
245 | |
246 // Limit eager tracing to only apply to TYPE (but not any of its subclasses.) | |
247 #define WILL_BE_EAGERLY_TRACED_CLASS(TYPE) \ | |
248 template<> \ | |
249 class TraceEagerlyTrait<TYPE> { \ | |
250 public: \ | |
251 static const bool value = true; \ | |
252 } | |
253 | |
254 #define WILL_NOT_BE_EAGERLY_TRACED_CLASS(TYPE) \ | |
255 template<> \ | |
256 class TraceEagerlyTrait<TYPE> { \ | |
257 public: \ | |
258 static const bool value = false; \ | |
259 } | |
260 | |
261 // Set to 1 if you want collections to be eagerly traced regardless | |
262 // of whether the elements are eagerly traceable or not. | |
263 #define ENABLE_EAGER_HEAP_COLLECTION_TRACING ENABLE_EAGER_TRACING_BY_DEFAULT | |
264 | |
265 #if ENABLE_EAGER_HEAP_COLLECTION_TRACING | |
266 #define IS_EAGERLY_TRACED_HEAP_COLLECTION(Type) true | |
267 #else | |
268 #define IS_EAGERLY_TRACED_HEAP_COLLECTION(Type) TraceEagerlyTrait<Type>::value | |
269 #endif | |
270 | |
190 template<typename Collection> | 271 template<typename Collection> |
191 struct OffHeapCollectionTraceTrait; | 272 struct OffHeapCollectionTraceTrait; |
192 | 273 |
193 template<typename T> | 274 template<typename T> |
194 struct ObjectAliveTrait { | 275 struct ObjectAliveTrait { |
195 static bool isHeapObjectAlive(Visitor*, T*); | 276 static bool isHeapObjectAlive(Visitor*, T*); |
196 }; | 277 }; |
197 | 278 |
198 // Visitor is used to traverse the Blink object graph. Used for the | 279 // Visitor is used to traverse the Blink object graph. Used for the |
199 // marking phase of the mark-sweep garbage collector. | 280 // marking phase of the mark-sweep garbage collector. |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
322 template<typename T> void trace(const WeakPtr<T>&) { } | 403 template<typename T> void trace(const WeakPtr<T>&) { } |
323 #endif | 404 #endif |
324 | 405 |
325 // This method marks an object and adds it to the set of objects | 406 // This method marks an object and adds it to the set of objects |
326 // that should have their trace method called. Since not all | 407 // that should have their trace method called. Since not all |
327 // objects have vtables we have to have the callback as an | 408 // objects have vtables we have to have the callback as an |
328 // explicit argument, but we can use the templated one-argument | 409 // explicit argument, but we can use the templated one-argument |
329 // mark method above to automatically provide the callback | 410 // mark method above to automatically provide the callback |
330 // function. | 411 // function. |
331 virtual void mark(const void*, TraceCallback) = 0; | 412 virtual void mark(const void*, TraceCallback) = 0; |
413 | |
414 template<typename T> void markNoTracing(const T* pointer) { mark(pointer, re interpret_cast<TraceCallback>(0)); } | |
332 void markNoTracing(const void* pointer) { mark(pointer, reinterpret_cast<Tra ceCallback>(0)); } | 415 void markNoTracing(const void* pointer) { mark(pointer, reinterpret_cast<Tra ceCallback>(0)); } |
333 void markNoTracing(HeapObjectHeader* header) { mark(header, reinterpret_cast <TraceCallback>(0)); } | 416 void markNoTracing(HeapObjectHeader* header) { mark(header, reinterpret_cast <TraceCallback>(0)); } |
334 void markNoTracing(FinalizedHeapObjectHeader* header) { mark(header, reinter pret_cast<TraceCallback>(0)); } | 417 void markNoTracing(FinalizedHeapObjectHeader* header) { mark(header, reinter pret_cast<TraceCallback>(0)); } |
335 | 418 |
336 // Used to mark objects during conservative scanning. | 419 // Used to mark objects during conservative scanning. |
337 virtual void mark(HeapObjectHeader*, TraceCallback) = 0; | 420 virtual void mark(HeapObjectHeader*, TraceCallback) = 0; |
338 virtual void mark(FinalizedHeapObjectHeader*, TraceCallback) = 0; | 421 virtual void mark(FinalizedHeapObjectHeader*, TraceCallback) = 0; |
339 | 422 |
340 // Used to delay the marking of objects until the usual marking | 423 // Used to delay the marking of objects until the usual marking |
341 // including emphemeron iteration is done. This is used to delay | 424 // including emphemeron iteration is done. This is used to delay |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
387 { | 470 { |
388 registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>); | 471 registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>); |
389 } | 472 } |
390 | 473 |
391 virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCall back) = 0; | 474 virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCall back) = 0; |
392 #if ENABLE(ASSERT) | 475 #if ENABLE(ASSERT) |
393 virtual bool weakTableRegistered(const void*) = 0; | 476 virtual bool weakTableRegistered(const void*) = 0; |
394 #endif | 477 #endif |
395 | 478 |
396 virtual bool isMarked(const void*) = 0; | 479 virtual bool isMarked(const void*) = 0; |
480 virtual bool ensureMarked(const void*) = 0; | |
397 | 481 |
398 template<typename T> inline bool isAlive(T* obj) | 482 template<typename T> inline bool isAlive(T* obj) |
399 { | 483 { |
400 // Check that we actually know the definition of T when tracing. | 484 // Check that we actually know the definition of T when tracing. |
401 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing ); | 485 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing ); |
402 // The strongification of collections relies on the fact that once a | 486 // The strongification of collections relies on the fact that once a |
403 // collection has been strongified, there is no way that it can contain | 487 // collection has been strongified, there is no way that it can contain |
404 // non-live entries, so no entries will be removed. Since you can't set | 488 // non-live entries, so no entries will be removed. Since you can't set |
405 // the mark bit on a null pointer, that means that null pointers are | 489 // the mark bit on a null pointer, that means that null pointers are |
406 // always 'alive'. | 490 // always 'alive'. |
407 if (!obj) | 491 if (!obj) |
408 return true; | 492 return true; |
409 return ObjectAliveTrait<T>::isHeapObjectAlive(this, obj); | 493 return ObjectAliveTrait<T>::isHeapObjectAlive(this, obj); |
410 } | 494 } |
411 template<typename T> inline bool isAlive(const Member<T>& member) | 495 template<typename T> inline bool isAlive(const Member<T>& member) |
412 { | 496 { |
413 return isAlive(member.get()); | 497 return isAlive(member.get()); |
414 } | 498 } |
415 template<typename T> inline bool isAlive(RawPtr<T> ptr) | 499 template<typename T> inline bool isAlive(RawPtr<T> ptr) |
416 { | 500 { |
417 return isAlive(ptr.get()); | 501 return isAlive(ptr.get()); |
418 } | 502 } |
419 | 503 |
420 #if ENABLE(ASSERT) | 504 #if ENABLE(ASSERT) |
421 void checkGCInfo(const void*, const GCInfo*); | 505 void checkGCInfo(const void*, const GCInfo*); |
422 #endif | 506 #endif |
423 | 507 |
424 // Macro to declare methods needed for each typed heap. | 508 // Macro to declare methods needed for each typed heap. |
425 #define DECLARE_VISITOR_METHODS(Type) \ | 509 #define DECLARE_VISITOR_METHODS(Type) \ |
426 DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \ | 510 DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \ |
427 virtual void mark(const Type*, TraceCallback) = 0; \ | 511 virtual void mark(const Type*, TraceCallback) = 0; \ |
428 virtual bool isMarked(const Type*) = 0; | 512 virtual bool isMarked(const Type*) = 0; \ |
513 virtual bool ensureMarked(const Type*) = 0; | |
429 | 514 |
430 FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS) | 515 FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS) |
431 #undef DECLARE_VISITOR_METHODS | 516 #undef DECLARE_VISITOR_METHODS |
432 | 517 |
433 #if ENABLE(GC_PROFILE_MARKING) | 518 #if ENABLE(GC_PROFILE_MARKING) |
434 void setHostInfo(void* object, const String& name) | 519 void setHostInfo(void* object, const String& name) |
435 { | 520 { |
436 m_hostObject = object; | 521 m_hostObject = object; |
437 m_hostName = name; | 522 m_hostName = name; |
438 } | 523 } |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
495 | 580 |
496 template<typename T> | 581 template<typename T> |
497 class DefaultTraceTrait<T, false> { | 582 class DefaultTraceTrait<T, false> { |
498 public: | 583 public: |
499 static void mark(Visitor* visitor, const T* t) | 584 static void mark(Visitor* visitor, const T* t) |
500 { | 585 { |
501 // Default mark method of the trait just calls the two-argument mark | 586 // Default mark method of the trait just calls the two-argument mark |
502 // method on the visitor. The second argument is the static trace method | 587 // method on the visitor. The second argument is the static trace method |
503 // of the trait, which by default calls the instance method | 588 // of the trait, which by default calls the instance method |
504 // trace(Visitor*) on the object. | 589 // trace(Visitor*) on the object. |
590 // | |
591 // If trait allows it, invoke trace callback right here on unmarked obje cts. | |
haraken
2014/12/02 06:16:05
unmarked objects => not-yet-marked objects
sof
2014/12/02 09:52:15
Thanks, done.
| |
592 if (!DISABLE_ALL_EAGER_TRACING && TraceEagerlyTrait<T>::value) { | |
593 if (visitor->ensureMarked(t)) | |
594 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); | |
595 return; | |
596 } | |
505 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); | 597 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); |
506 } | 598 } |
507 | 599 |
508 #if ENABLE(ASSERT) | 600 #if ENABLE(ASSERT) |
509 static void checkGCInfo(Visitor* visitor, const T* t) | 601 static void checkGCInfo(Visitor* visitor, const T* t) |
510 { | 602 { |
511 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); | 603 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); |
512 } | 604 } |
513 #endif | 605 #endif |
514 }; | 606 }; |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
585 | 677 |
586 class PLATFORM_EXPORT GarbageCollectedMixin { | 678 class PLATFORM_EXPORT GarbageCollectedMixin { |
587 public: | 679 public: |
588 virtual void adjustAndMark(Visitor*) const = 0; | 680 virtual void adjustAndMark(Visitor*) const = 0; |
589 virtual bool isHeapObjectAlive(Visitor*) const = 0; | 681 virtual bool isHeapObjectAlive(Visitor*) const = 0; |
590 virtual void trace(Visitor*) { } | 682 virtual void trace(Visitor*) { } |
591 }; | 683 }; |
592 | 684 |
593 #define USING_GARBAGE_COLLECTED_MIXIN(TYPE) \ | 685 #define USING_GARBAGE_COLLECTED_MIXIN(TYPE) \ |
594 public: \ | 686 public: \ |
595 virtual void adjustAndMark(blink::Visitor* visitor) const override \ | 687 virtual void adjustAndMark(blink::Visitor* visitor) const override \ |
596 { \ | 688 { \ |
597 typedef WTF::IsSubclassOfTemplate<typename WTF::RemoveConst<TYPE>::Type, blink::GarbageCollected> IsSubclassOfGarbageCollected; \ | 689 typedef WTF::IsSubclassOfTemplate<typename WTF::RemoveConst<TYPE>::Type, blink::GarbageCollected> IsSubclassOfGarbageCollected; \ |
598 COMPILE_ASSERT(IsSubclassOfGarbageCollected::value, OnlyGarbageCollected ObjectsCanHaveGarbageCollectedMixins); \ | 690 COMPILE_ASSERT(IsSubclassOfGarbageCollected::value, OnlyGarbageCollected ObjectsCanHaveGarbageCollectedMixins); \ |
691 if (!DISABLE_ALL_EAGER_TRACING && TraceEagerlyTrait<TYPE>::value) { \ | |
692 if (visitor->ensureMarked(static_cast<const TYPE*>(this))) \ | |
693 TraceTrait<TYPE>::trace(visitor, const_cast<TYPE*>(this)); \ | |
694 return; \ | |
695 } \ | |
599 visitor->mark(static_cast<const TYPE*>(this), &blink::TraceTrait<TYPE>:: trace); \ | 696 visitor->mark(static_cast<const TYPE*>(this), &blink::TraceTrait<TYPE>:: trace); \ |
600 } \ | 697 } \ |
601 virtual bool isHeapObjectAlive(blink::Visitor* visitor) const override \ | 698 virtual bool isHeapObjectAlive(blink::Visitor* visitor) const override \ |
602 { \ | 699 { \ |
603 return visitor->isAlive(this); \ | 700 return visitor->isAlive(this); \ |
604 } \ | 701 } \ |
605 private: | 702 private: |
606 | 703 |
607 #if ENABLE(OILPAN) | 704 #if ENABLE(OILPAN) |
608 #define WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(TYPE) USING_GARBAGE_COLLECTED_MIXI N(TYPE) | 705 #define WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(TYPE) USING_GARBAGE_COLLECTED_MIXI N(TYPE) |
609 #else | 706 #else |
610 #define WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(TYPE) | 707 #define WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(TYPE) |
611 #endif | 708 #endif |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
655 struct GCInfoTrait { | 752 struct GCInfoTrait { |
656 static const GCInfo* get() | 753 static const GCInfo* get() |
657 { | 754 { |
658 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); | 755 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); |
659 } | 756 } |
660 }; | 757 }; |
661 | 758 |
662 } | 759 } |
663 | 760 |
664 #endif | 761 #endif |
OLD | NEW |