| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 83 // HeapHashMap<WeakMember<Foo>, std::unique_ptr<Disposer>> idiom usages with the | 83 // HeapHashMap<WeakMember<Foo>, std::unique_ptr<Disposer>> idiom usages with the |
| 84 // pre-finalizer if the replacement won't cause performance regressions. | 84 // pre-finalizer if the replacement won't cause performance regressions. |
| 85 // | 85 // |
| 86 // Usage: | 86 // Usage: |
| 87 // | 87 // |
| 88 // class Foo : GarbageCollected<Foo> { | 88 // class Foo : GarbageCollected<Foo> { |
| 89 // USING_PRE_FINALIZER(Foo, dispose); | 89 // USING_PRE_FINALIZER(Foo, dispose); |
| 90 // private: | 90 // private: |
| 91 // void dispose() | 91 // void dispose() |
| 92 // { | 92 // { |
| 93 // m_bar->...; // It is safe to touch other on-heap objects. | 93 // bar_->...; // It is safe to touch other on-heap objects. |
| 94 // } | 94 // } |
| 95 // Member<Bar> m_bar; | 95 // Member<Bar> bar_; |
| 96 // }; | 96 // }; |
| 97 #define USING_PRE_FINALIZER(Class, preFinalizer) \ | 97 #define USING_PRE_FINALIZER(Class, preFinalizer) \ |
| 98 public: \ | 98 public: \ |
| 99 static bool InvokePreFinalizer(void* object) { \ | 99 static bool InvokePreFinalizer(void* object) { \ |
| 100 Class* self = reinterpret_cast<Class*>(object); \ | 100 Class* self = reinterpret_cast<Class*>(object); \ |
| 101 if (ThreadHeap::IsHeapObjectAlive(self)) \ | 101 if (ThreadHeap::IsHeapObjectAlive(self)) \ |
| 102 return false; \ | 102 return false; \ |
| 103 self->Class::preFinalizer(); \ | 103 self->Class::preFinalizer(); \ |
| 104 return true; \ | 104 return true; \ |
| 105 } \ | 105 } \ |
| (...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 471 // - a vector that meets the condition (*) is allocated on the arena | 471 // - a vector that meets the condition (*) is allocated on the arena |
| 472 // | 472 // |
| 473 // (*) More than 33% of the same type of vectors have been promptly | 473 // (*) More than 33% of the same type of vectors have been promptly |
| 474 // freed since the last GC. | 474 // freed since the last GC. |
| 475 // | 475 // |
| 476 BaseArena* VectorBackingArena(size_t gc_info_index) { | 476 BaseArena* VectorBackingArena(size_t gc_info_index) { |
| 477 DCHECK(CheckThread()); | 477 DCHECK(CheckThread()); |
| 478 size_t entry_index = gc_info_index & kLikelyToBePromptlyFreedArrayMask; | 478 size_t entry_index = gc_info_index & kLikelyToBePromptlyFreedArrayMask; |
| 479 --likely_to_be_promptly_freed_[entry_index]; | 479 --likely_to_be_promptly_freed_[entry_index]; |
| 480 int arena_index = vector_backing_arena_index_; | 480 int arena_index = vector_backing_arena_index_; |
| 481 // If m_likelyToBePromptlyFreed[entryIndex] > 0, that means that | 481 // If likely_to_be_promptly_freed_[entryIndex] > 0, that means that |
| 482 // more than 33% of vectors of the type have been promptly freed | 482 // more than 33% of vectors of the type have been promptly freed |
| 483 // since the last GC. | 483 // since the last GC. |
| 484 if (likely_to_be_promptly_freed_[entry_index] > 0) { | 484 if (likely_to_be_promptly_freed_[entry_index] > 0) { |
| 485 arena_ages_[arena_index] = ++current_arena_ages_; | 485 arena_ages_[arena_index] = ++current_arena_ages_; |
| 486 vector_backing_arena_index_ = | 486 vector_backing_arena_index_ = |
| 487 ArenaIndexOfVectorArenaLeastRecentlyExpanded( | 487 ArenaIndexOfVectorArenaLeastRecentlyExpanded( |
| 488 BlinkGC::kVector1ArenaIndex, BlinkGC::kVector4ArenaIndex); | 488 BlinkGC::kVector1ArenaIndex, BlinkGC::kVector4ArenaIndex); |
| 489 } | 489 } |
| 490 DCHECK(IsVectorArenaIndex(arena_index)); | 490 DCHECK(IsVectorArenaIndex(arena_index)); |
| 491 return arenas_[arena_index]; | 491 return arenas_[arena_index]; |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 665 GarbageCollectedMixinConstructorMarker* gc_mixin_marker_; | 665 GarbageCollectedMixinConstructorMarker* gc_mixin_marker_; |
| 666 | 666 |
| 667 bool should_flush_heap_does_not_contain_cache_; | 667 bool should_flush_heap_does_not_contain_cache_; |
| 668 GCState gc_state_; | 668 GCState gc_state_; |
| 669 | 669 |
| 670 using PreFinalizerCallback = bool (*)(void*); | 670 using PreFinalizerCallback = bool (*)(void*); |
| 671 using PreFinalizer = std::pair<void*, PreFinalizerCallback>; | 671 using PreFinalizer = std::pair<void*, PreFinalizerCallback>; |
| 672 | 672 |
| 673 // Pre-finalizers are called in the reverse order in which they are | 673 // Pre-finalizers are called in the reverse order in which they are |
| 674 // registered by the constructors (including constructors of Mixin objects) | 674 // registered by the constructors (including constructors of Mixin objects) |
| 675 // for an object, by processing the m_orderedPreFinalizers back-to-front. | 675 // for an object, by processing the ordered_pre_finalizers_ back-to-front. |
| 676 ListHashSet<PreFinalizer> ordered_pre_finalizers_; | 676 ListHashSet<PreFinalizer> ordered_pre_finalizers_; |
| 677 | 677 |
| 678 v8::Isolate* isolate_; | 678 v8::Isolate* isolate_; |
| 679 void (*trace_dom_wrappers_)(v8::Isolate*, Visitor*); | 679 void (*trace_dom_wrappers_)(v8::Isolate*, Visitor*); |
| 680 void (*invalidate_dead_objects_in_wrappers_marking_deque_)(v8::Isolate*); | 680 void (*invalidate_dead_objects_in_wrappers_marking_deque_)(v8::Isolate*); |
| 681 void (*perform_cleanup_)(v8::Isolate*); | 681 void (*perform_cleanup_)(v8::Isolate*); |
| 682 | 682 |
| 683 #if defined(ADDRESS_SANITIZER) | 683 #if defined(ADDRESS_SANITIZER) |
| 684 void* asan_fake_stack_; | 684 void* asan_fake_stack_; |
| 685 #endif | 685 #endif |
| 686 | 686 |
| 687 // PersistentNodes that are stored in static references; | 687 // PersistentNodes that are stored in static references; |
| 688 // references that either have to be cleared upon the thread | 688 // references that either have to be cleared upon the thread |
| 689 // detaching from Oilpan and shutting down or references we | 689 // detaching from Oilpan and shutting down or references we |
| 690 // have to clear before initiating LSan's leak detection. | 690 // have to clear before initiating LSan's leak detection. |
| 691 HashMap<PersistentNode*, PersistentClearCallback> static_persistents_; | 691 HashMap<PersistentNode*, PersistentClearCallback> static_persistents_; |
| 692 | 692 |
| 693 #if defined(LEAK_SANITIZER) | 693 #if defined(LEAK_SANITIZER) |
| 694 // Count that controls scoped disabling of persistent registration. | 694 // Count that controls scoped disabling of persistent registration. |
| 695 size_t m_disabledStaticPersistentsRegistration; | 695 size_t disabled_static_persistent_registration_; |
| 696 #endif | 696 #endif |
| 697 | 697 |
| 698 // Ideally we want to allocate an array of size |gcInfoTableMax| but it will | 698 // Ideally we want to allocate an array of size |gcInfoTableMax| but it will |
| 699 // waste memory. Thus we limit the array size to 2^8 and share one entry | 699 // waste memory. Thus we limit the array size to 2^8 and share one entry |
| 700 // with multiple types of vectors. This won't be an issue in practice, | 700 // with multiple types of vectors. This won't be an issue in practice, |
| 701 // since there will be less than 2^8 types of objects in common cases. | 701 // since there will be less than 2^8 types of objects in common cases. |
| 702 static const int kLikelyToBePromptlyFreedArraySize = (1 << 8); | 702 static const int kLikelyToBePromptlyFreedArraySize = (1 << 8); |
| 703 static const int kLikelyToBePromptlyFreedArrayMask = | 703 static const int kLikelyToBePromptlyFreedArrayMask = |
| 704 kLikelyToBePromptlyFreedArraySize - 1; | 704 kLikelyToBePromptlyFreedArraySize - 1; |
| 705 std::unique_ptr<int[]> likely_to_be_promptly_freed_; | 705 std::unique_ptr<int[]> likely_to_be_promptly_freed_; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 729 class ThreadStateFor<kAnyThread> { | 729 class ThreadStateFor<kAnyThread> { |
| 730 STATIC_ONLY(ThreadStateFor); | 730 STATIC_ONLY(ThreadStateFor); |
| 731 | 731 |
| 732 public: | 732 public: |
| 733 static ThreadState* GetState() { return ThreadState::Current(); } | 733 static ThreadState* GetState() { return ThreadState::Current(); } |
| 734 }; | 734 }; |
| 735 | 735 |
| 736 } // namespace blink | 736 } // namespace blink |
| 737 | 737 |
| 738 #endif // ThreadState_h | 738 #endif // ThreadState_h |
| OLD | NEW |