OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
184 } | 184 } |
185 #endif | 185 #endif |
186 }; | 186 }; |
187 | 187 |
188 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; | 188 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; |
189 | 189 |
190 // If ENABLE_EAGER_TRACING_BY_DEFAULT is set to 1, GCed objects will | 190 // If ENABLE_EAGER_TRACING_BY_DEFAULT is set to 1, GCed objects will |
191 // be eagerly traced by default. A class type can opt out by declaring | 191 // be eagerly traced by default. A class type can opt out by declaring |
192 // a TraceEagerlyTrait<> specialization, mapping the value to 'false' | 192 // a TraceEagerlyTrait<> specialization, mapping the value to 'false' |
193 // (see the WILL_NOT_BE_EAGERLY_TRACED() macro below.) | 193 // (see the WILL_NOT_BE_EAGERLY_TRACED() macro below.) |
194 #define ENABLE_EAGER_TRACING_BY_DEFAULT 0 | 194 #define ENABLE_EAGER_TRACING_BY_DEFAULT 1 |
195 | 195 |
196 // DISABLE_ALL_EAGER_TRACING provides the "kill switch" for eager | 196 // DISABLE_ALL_EAGER_TRACING provides the "kill switch" for eager |
197 // tracing; setting it to 1 will disable the use of eager tracing | 197 // tracing; setting it to 1 will disable the use of eager tracing |
198 // entirely. That is, eager tracing is disabled even if traits have | 198 // entirely. That is, eager tracing is disabled even if traits have |
199 // been declared. | 199 // been declared. |
200 #define DISABLE_ALL_EAGER_TRACING 0 | 200 #define DISABLE_ALL_EAGER_TRACING 0 |
201 | 201 |
202 // If TraceEagerlyTrait<T>::value is true, then the marker thread should | 202 // If TraceEagerlyTrait<T>::value is true, then the marker thread should |
203 // invoke trace() on not-yet-marked objects deriving from class T right | 203 // invoke trace() on not-yet-marked objects deriving from class T right |
204 // away, and not queue their trace callbacks on its marker stack. | 204 // away, and not queue their trace callbacks on its marker stack. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
257 #endif | 257 #endif |
258 | 258 |
259 template<typename Collection> | 259 template<typename Collection> |
260 struct OffHeapCollectionTraceTrait; | 260 struct OffHeapCollectionTraceTrait; |
261 | 261 |
262 template<typename T> | 262 template<typename T> |
263 struct ObjectAliveTrait { | 263 struct ObjectAliveTrait { |
264 static bool isHeapObjectAlive(Visitor*, T*); | 264 static bool isHeapObjectAlive(Visitor*, T*); |
265 }; | 265 }; |
266 | 266 |
| 267 enum MarkingMode { |
| 268 GlobalMarking, |
| 269 ThreadLocalMarking, |
| 270 }; |
| 271 |
267 // Visitor is used to traverse the Blink object graph. Used for the | 272 // Visitor is used to traverse the Blink object graph. Used for the |
268 // marking phase of the mark-sweep garbage collector. | 273 // marking phase of the mark-sweep garbage collector. |
269 // | 274 // |
270 // Pointers are marked and pushed on the marking stack by calling the | 275 // Pointers are marked and pushed on the marking stack by calling the |
271 // |mark| method with the pointer as an argument. | 276 // |mark| method with the pointer as an argument. |
272 // | 277 // |
273 // Pointers within objects are traced by calling the |trace| methods | 278 // Pointers within objects are traced by calling the |trace| methods |
274 // with the object as an argument. Tracing objects will mark all of the | 279 // with the object as an argument. Tracing objects will mark all of the |
275 // contained pointers and push them on the marking stack. | 280 // contained pointers and push them on the marking stack. |
276 class PLATFORM_EXPORT Visitor { | 281 class PLATFORM_EXPORT Visitor { |
277 public: | 282 public: |
| 283 Visitor(MarkingMode mode) : m_mode(mode) {} |
278 virtual ~Visitor() { } | 284 virtual ~Visitor() { } |
279 | 285 |
280 template<typename T> | 286 template<typename T> |
281 static void verifyGarbageCollectedIfMember(T*) | 287 static void verifyGarbageCollectedIfMember(T*) |
282 { | 288 { |
283 } | 289 } |
284 | 290 |
285 template<typename T> | 291 template<typename T> |
286 static void verifyGarbageCollectedIfMember(Member<T>* t) | 292 static void verifyGarbageCollectedIfMember(Member<T>* t) |
287 { | 293 { |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
458 { | 464 { |
459 registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>); | 465 registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>); |
460 } | 466 } |
461 | 467 |
462 virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCall
back) = 0; | 468 virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCall
back) = 0; |
463 #if ENABLE(ASSERT) | 469 #if ENABLE(ASSERT) |
464 virtual bool weakTableRegistered(const void*) = 0; | 470 virtual bool weakTableRegistered(const void*) = 0; |
465 #endif | 471 #endif |
466 | 472 |
467 virtual bool isMarked(const void*) = 0; | 473 virtual bool isMarked(const void*) = 0; |
468 virtual bool ensureMarked(const void*) = 0; | 474 |
| 475 bool ensureMarked(const void*); |
469 | 476 |
470 template<typename T> inline bool isAlive(T* obj) | 477 template<typename T> inline bool isAlive(T* obj) |
471 { | 478 { |
472 // Check that we actually know the definition of T when tracing. | 479 // Check that we actually know the definition of T when tracing. |
473 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing
); | 480 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing
); |
474 // The strongification of collections relies on the fact that once a | 481 // The strongification of collections relies on the fact that once a |
475 // collection has been strongified, there is no way that it can contain | 482 // collection has been strongified, there is no way that it can contain |
476 // non-live entries, so no entries will be removed. Since you can't set | 483 // non-live entries, so no entries will be removed. Since you can't set |
477 // the mark bit on a null pointer, that means that null pointers are | 484 // the mark bit on a null pointer, that means that null pointers are |
478 // always 'alive'. | 485 // always 'alive'. |
(...skipping 11 matching lines...) Expand all Loading... |
490 } | 497 } |
491 | 498 |
492 #if ENABLE(ASSERT) | 499 #if ENABLE(ASSERT) |
493 void checkGCInfo(const void*, const GCInfo*); | 500 void checkGCInfo(const void*, const GCInfo*); |
494 #endif | 501 #endif |
495 | 502 |
496 // Macro to declare methods needed for each typed heap. | 503 // Macro to declare methods needed for each typed heap. |
497 #define DECLARE_VISITOR_METHODS(Type) \ | 504 #define DECLARE_VISITOR_METHODS(Type) \ |
498 DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \ | 505 DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \ |
499 virtual void mark(const Type*, TraceCallback) = 0; \ | 506 virtual void mark(const Type*, TraceCallback) = 0; \ |
500 virtual bool isMarked(const Type*) = 0; \ | 507 virtual bool isMarked(const Type*) = 0; \ |
501 virtual bool ensureMarked(const Type*) = 0; | 508 bool ensureMarked(const Type*); |
502 | 509 |
503 FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS) | 510 FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS) |
504 #undef DECLARE_VISITOR_METHODS | 511 #undef DECLARE_VISITOR_METHODS |
505 | 512 |
506 #if ENABLE(GC_PROFILE_MARKING) | 513 #if ENABLE(GC_PROFILE_MARKING) |
507 void setHostInfo(void* object, const String& name) | 514 void setHostInfo(void* object, const String& name) |
508 { | 515 { |
509 m_hostObject = object; | 516 m_hostObject = object; |
510 m_hostName = name; | 517 m_hostName = name; |
511 } | 518 } |
512 #endif | 519 #endif |
513 | 520 |
514 inline bool canTraceEagerly() const { return m_traceDepth < kMaxEagerTraceDe
pth; } | |
515 inline void incrementTraceDepth() { m_traceDepth++; } | |
516 inline void decrementTraceDepth() { ASSERT(m_traceDepth > 0); m_traceDepth--
; } | |
517 | |
518 protected: | 521 protected: |
519 Visitor() | |
520 : m_traceDepth(0) | |
521 { | |
522 } | |
523 | |
524 virtual void registerWeakCell(void**, WeakPointerCallback) = 0; | 522 virtual void registerWeakCell(void**, WeakPointerCallback) = 0; |
525 #if ENABLE(GC_PROFILE_MARKING) | 523 #if ENABLE(GC_PROFILE_MARKING) |
526 void* m_hostObject; | 524 void* m_hostObject; |
527 String m_hostName; | 525 String m_hostName; |
528 #endif | 526 #endif |
529 | 527 |
530 private: | 528 private: |
531 template<typename T> | 529 template<typename T> |
532 static void handleWeakCell(Visitor* self, void* obj) | 530 static void handleWeakCell(Visitor* self, void* obj) |
533 { | 531 { |
534 T** cell = reinterpret_cast<T**>(obj); | 532 T** cell = reinterpret_cast<T**>(obj); |
535 if (*cell && !self->isAlive(*cell)) | 533 if (*cell && !self->isAlive(*cell)) |
536 *cell = 0; | 534 *cell = 0; |
537 } | 535 } |
538 | 536 |
539 // The maximum depth of eager, unrolled trace() calls that is | 537 MarkingMode m_mode; |
540 // considered safe and allowed. | |
541 const int kMaxEagerTraceDepth = 100; | |
542 | |
543 int m_traceDepth; | |
544 }; | 538 }; |
545 | 539 |
546 // We trace vectors by using the trace trait on each element, which means you | 540 // We trace vectors by using the trace trait on each element, which means you |
547 // can have vectors of general objects (not just pointers to objects) that can | 541 // can have vectors of general objects (not just pointers to objects) that can |
548 // be traced. | 542 // be traced. |
549 template<typename T, size_t N> | 543 template<typename T, size_t N> |
550 struct OffHeapCollectionTraceTrait<WTF::Vector<T, N, WTF::DefaultAllocator> > { | 544 struct OffHeapCollectionTraceTrait<WTF::Vector<T, N, WTF::DefaultAllocator> > { |
551 typedef WTF::Vector<T, N, WTF::DefaultAllocator> Vector; | 545 typedef WTF::Vector<T, N, WTF::DefaultAllocator> Vector; |
552 | 546 |
553 static void trace(Visitor* visitor, const Vector& vector) | 547 static void trace(Visitor* visitor, const Vector& vector) |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
587 static void mark(Visitor* visitor, const T* t) | 581 static void mark(Visitor* visitor, const T* t) |
588 { | 582 { |
589 // Default mark method of the trait just calls the two-argument mark | 583 // Default mark method of the trait just calls the two-argument mark |
590 // method on the visitor. The second argument is the static trace method | 584 // method on the visitor. The second argument is the static trace method |
591 // of the trait, which by default calls the instance method | 585 // of the trait, which by default calls the instance method |
592 // trace(Visitor*) on the object. | 586 // trace(Visitor*) on the object. |
593 // | 587 // |
594 // If the trait allows it, invoke the trace callback right here on the | 588 // If the trait allows it, invoke the trace callback right here on the |
595 // not-yet-marked object. | 589 // not-yet-marked object. |
596 if (!DISABLE_ALL_EAGER_TRACING && TraceEagerlyTrait<T>::value) { | 590 if (!DISABLE_ALL_EAGER_TRACING && TraceEagerlyTrait<T>::value) { |
597 // Protect against too deep trace call chains, and the | 591 if (visitor->ensureMarked(t)) |
598 // unbounded system stack usage they can bring about. | 592 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); |
599 // | 593 return; |
600 // Assert against deep stacks so as to flush them out, | |
601 // but test and appropriately handle them should they occur | |
602 // in release builds. | |
603 ASSERT(visitor->canTraceEagerly()); | |
604 if (LIKELY(visitor->canTraceEagerly())) { | |
605 if (visitor->ensureMarked(t)) { | |
606 visitor->incrementTraceDepth(); | |
607 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); | |
608 visitor->decrementTraceDepth(); | |
609 } | |
610 return; | |
611 } | |
612 } | 594 } |
613 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); | 595 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); |
614 } | 596 } |
615 | 597 |
616 #if ENABLE(ASSERT) | 598 #if ENABLE(ASSERT) |
617 static void checkGCInfo(Visitor* visitor, const T* t) | 599 static void checkGCInfo(Visitor* visitor, const T* t) |
618 { | 600 { |
619 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); | 601 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); |
620 } | 602 } |
621 #endif | 603 #endif |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
768 struct GCInfoTrait { | 750 struct GCInfoTrait { |
769 static const GCInfo* get() | 751 static const GCInfo* get() |
770 { | 752 { |
771 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); | 753 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); |
772 } | 754 } |
773 }; | 755 }; |
774 | 756 |
775 } | 757 } |
776 | 758 |
777 #endif | 759 #endif |
OLD | NEW |