OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
131 | 131 |
132 typedef std::pair<Member<IntWrapper>, WeakMember<IntWrapper>> StrongWeakPair; | 132 typedef std::pair<Member<IntWrapper>, WeakMember<IntWrapper>> StrongWeakPair; |
133 | 133 |
134 struct PairWithWeakHandling : public StrongWeakPair { | 134 struct PairWithWeakHandling : public StrongWeakPair { |
135 DISALLOW_NEW_EXCEPT_PLACEMENT_NEW(); | 135 DISALLOW_NEW_EXCEPT_PLACEMENT_NEW(); |
136 | 136 |
137 public: | 137 public: |
138 // Regular constructor. | 138 // Regular constructor. |
139 PairWithWeakHandling(IntWrapper* one, IntWrapper* two) | 139 PairWithWeakHandling(IntWrapper* one, IntWrapper* two) |
140 : StrongWeakPair(one, two) { | 140 : StrongWeakPair(one, two) { |
141 ASSERT(one); // We use null first field to indicate empty slots in the hash | 141 DCHECK(one); // We use null first field to indicate empty slots in the hash |
142 // table. | 142 // table. |
143 } | 143 } |
144 | 144 |
145 // The HashTable (via the HashTrait) calls this constructor with a | 145 // The HashTable (via the HashTrait) calls this constructor with a |
146 // placement new to mark slots in the hash table as being deleted. We will | 146 // placement new to mark slots in the hash table as being deleted. We will |
147 // never call trace or the destructor on these slots. We mark ourselves | 147 // never call trace or the destructor on these slots. We mark ourselves |
148 // deleted | 148 // deleted |
149 // with a pointer to -1 in the first field. | 149 // with a pointer to -1 in the first field. |
150 PairWithWeakHandling(WTF::HashTableDeletedValueType) | 150 PairWithWeakHandling(WTF::HashTableDeletedValueType) |
151 : StrongWeakPair(reinterpret_cast<IntWrapper*>(-1), nullptr) {} | 151 : StrongWeakPair(reinterpret_cast<IntWrapper*>(-1), nullptr) {} |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
274 } // namespace WTF | 274 } // namespace WTF |
275 | 275 |
276 namespace blink { | 276 namespace blink { |
277 | 277 |
278 class TestGCScope { | 278 class TestGCScope { |
279 public: | 279 public: |
280 explicit TestGCScope(BlinkGC::StackState state) | 280 explicit TestGCScope(BlinkGC::StackState state) |
281 : m_state(ThreadState::current()), | 281 : m_state(ThreadState::current()), |
282 m_safePointScope(state), | 282 m_safePointScope(state), |
283 m_parkedAllThreads(false) { | 283 m_parkedAllThreads(false) { |
284 ASSERT(m_state->checkThread()); | 284 DCHECK(m_state->checkThread()); |
285 if (LIKELY(m_state->heap().park())) { | 285 if (LIKELY(m_state->heap().park())) { |
286 m_state->heap().preGC(); | 286 m_state->heap().preGC(); |
287 m_parkedAllThreads = true; | 287 m_parkedAllThreads = true; |
288 } | 288 } |
289 } | 289 } |
290 | 290 |
291 bool allThreadsParked() { return m_parkedAllThreads; } | 291 bool allThreadsParked() { return m_parkedAllThreads; } |
292 | 292 |
293 ~TestGCScope() { | 293 ~TestGCScope() { |
294 // Only cleanup if we parked all threads in which case the GC happened | 294 // Only cleanup if we parked all threads in which case the GC happened |
(...skipping 26 matching lines...) Expand all Loading... |
321 : Visitor(state, Visitor::ThreadLocalMarking), | 321 : Visitor(state, Visitor::ThreadLocalMarking), |
322 m_scope(&state->heap().stackFrameDepth()), | 322 m_scope(&state->heap().stackFrameDepth()), |
323 m_count(0) {} | 323 m_count(0) {} |
324 | 324 |
325 void mark(const void* object, TraceCallback) override { | 325 void mark(const void* object, TraceCallback) override { |
326 if (object) | 326 if (object) |
327 m_count++; | 327 m_count++; |
328 } | 328 } |
329 | 329 |
330 void markHeader(HeapObjectHeader* header, TraceCallback callback) override { | 330 void markHeader(HeapObjectHeader* header, TraceCallback callback) override { |
331 ASSERT(header->payload()); | 331 DCHECK(header->payload()); |
332 m_count++; | 332 m_count++; |
333 } | 333 } |
334 | 334 |
335 void registerDelayedMarkNoTracing(const void*) override {} | 335 void registerDelayedMarkNoTracing(const void*) override {} |
336 void registerWeakMembers(const void*, const void*, WeakCallback) override {} | 336 void registerWeakMembers(const void*, const void*, WeakCallback) override {} |
337 void registerWeakTable(const void*, | 337 void registerWeakTable(const void*, |
338 EphemeronCallback, | 338 EphemeronCallback, |
339 EphemeronCallback) override {} | 339 EphemeronCallback) override {} |
340 #if ENABLE(ASSERT) | 340 #if DCHECK_IS_ON() |
341 bool weakTableRegistered(const void*) override { return false; } | 341 bool weakTableRegistered(const void*) override { return false; } |
342 #endif | 342 #endif |
343 void registerWeakCellWithCallback(void**, WeakCallback) override {} | 343 void registerWeakCellWithCallback(void**, WeakCallback) override {} |
344 bool ensureMarked(const void* objectPointer) override { | 344 bool ensureMarked(const void* objectPointer) override { |
345 if (!objectPointer || | 345 if (!objectPointer || |
346 HeapObjectHeader::fromPayload(objectPointer)->isMarked()) | 346 HeapObjectHeader::fromPayload(objectPointer)->isMarked()) |
347 return false; | 347 return false; |
348 markNoTracing(objectPointer); | 348 markNoTracing(objectPointer); |
349 return true; | 349 return true; |
350 } | 350 } |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
531 const int ThreadedTesterBase::numberOfThreads; | 531 const int ThreadedTesterBase::numberOfThreads; |
532 | 532 |
533 class ThreadedHeapTester : public ThreadedTesterBase { | 533 class ThreadedHeapTester : public ThreadedTesterBase { |
534 public: | 534 public: |
535 static void test() { ThreadedTesterBase::test(new ThreadedHeapTester); } | 535 static void test() { ThreadedTesterBase::test(new ThreadedHeapTester); } |
536 | 536 |
537 ~ThreadedHeapTester() override { | 537 ~ThreadedHeapTester() override { |
538 // Verify that the threads cleared their CTPs when | 538 // Verify that the threads cleared their CTPs when |
539 // terminating, preventing access to a finalized heap. | 539 // terminating, preventing access to a finalized heap. |
540 for (auto& globalIntWrapper : m_crossPersistents) { | 540 for (auto& globalIntWrapper : m_crossPersistents) { |
541 ASSERT(globalIntWrapper.get()); | 541 DCHECK(globalIntWrapper.get()); |
542 EXPECT_FALSE(globalIntWrapper.get()->get()); | 542 EXPECT_FALSE(globalIntWrapper.get()->get()); |
543 } | 543 } |
544 } | 544 } |
545 | 545 |
546 protected: | 546 protected: |
547 using GlobalIntWrapperPersistent = CrossThreadPersistent<IntWrapper>; | 547 using GlobalIntWrapperPersistent = CrossThreadPersistent<IntWrapper>; |
548 | 548 |
549 Mutex m_mutex; | 549 Mutex m_mutex; |
550 Vector<std::unique_ptr<GlobalIntWrapperPersistent>> m_crossPersistents; | 550 Vector<std::unique_ptr<GlobalIntWrapperPersistent>> m_crossPersistents; |
551 | 551 |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
955 : public GarbageCollectedFinalized<RefCountedAndGarbageCollected> { | 955 : public GarbageCollectedFinalized<RefCountedAndGarbageCollected> { |
956 public: | 956 public: |
957 static RefCountedAndGarbageCollected* create() { | 957 static RefCountedAndGarbageCollected* create() { |
958 return new RefCountedAndGarbageCollected; | 958 return new RefCountedAndGarbageCollected; |
959 } | 959 } |
960 | 960 |
961 ~RefCountedAndGarbageCollected() { ++s_destructorCalls; } | 961 ~RefCountedAndGarbageCollected() { ++s_destructorCalls; } |
962 | 962 |
963 void ref() { | 963 void ref() { |
964 if (UNLIKELY(!m_refCount)) { | 964 if (UNLIKELY(!m_refCount)) { |
965 ASSERT(ThreadState::current()->findPageFromAddress( | 965 DCHECK(ThreadState::current()->findPageFromAddress( |
966 reinterpret_cast<Address>(this))); | 966 reinterpret_cast<Address>(this))); |
967 m_keepAlive = this; | 967 m_keepAlive = this; |
968 } | 968 } |
969 ++m_refCount; | 969 ++m_refCount; |
970 } | 970 } |
971 | 971 |
972 void deref() { | 972 void deref() { |
973 ASSERT(m_refCount > 0); | 973 DCHECK_GT(m_refCount, 0); |
974 if (!--m_refCount) | 974 if (!--m_refCount) |
975 m_keepAlive.clear(); | 975 m_keepAlive.clear(); |
976 } | 976 } |
977 | 977 |
978 DEFINE_INLINE_TRACE() {} | 978 DEFINE_INLINE_TRACE() {} |
979 | 979 |
980 static int s_destructorCalls; | 980 static int s_destructorCalls; |
981 | 981 |
982 private: | 982 private: |
983 RefCountedAndGarbageCollected() : m_refCount(0) {} | 983 RefCountedAndGarbageCollected() : m_refCount(0) {} |
984 | 984 |
985 int m_refCount; | 985 int m_refCount; |
986 SelfKeepAlive<RefCountedAndGarbageCollected> m_keepAlive; | 986 SelfKeepAlive<RefCountedAndGarbageCollected> m_keepAlive; |
987 }; | 987 }; |
988 | 988 |
989 int RefCountedAndGarbageCollected::s_destructorCalls = 0; | 989 int RefCountedAndGarbageCollected::s_destructorCalls = 0; |
990 | 990 |
991 class RefCountedAndGarbageCollected2 | 991 class RefCountedAndGarbageCollected2 |
992 : public HeapTestOtherSuperClass, | 992 : public HeapTestOtherSuperClass, |
993 public GarbageCollectedFinalized<RefCountedAndGarbageCollected2> { | 993 public GarbageCollectedFinalized<RefCountedAndGarbageCollected2> { |
994 public: | 994 public: |
995 static RefCountedAndGarbageCollected2* create() { | 995 static RefCountedAndGarbageCollected2* create() { |
996 return new RefCountedAndGarbageCollected2; | 996 return new RefCountedAndGarbageCollected2; |
997 } | 997 } |
998 | 998 |
999 ~RefCountedAndGarbageCollected2() { ++s_destructorCalls; } | 999 ~RefCountedAndGarbageCollected2() { ++s_destructorCalls; } |
1000 | 1000 |
1001 void ref() { | 1001 void ref() { |
1002 if (UNLIKELY(!m_refCount)) { | 1002 if (UNLIKELY(!m_refCount)) { |
1003 ASSERT(ThreadState::current()->findPageFromAddress( | 1003 DCHECK(ThreadState::current()->findPageFromAddress( |
1004 reinterpret_cast<Address>(this))); | 1004 reinterpret_cast<Address>(this))); |
1005 m_keepAlive = this; | 1005 m_keepAlive = this; |
1006 } | 1006 } |
1007 ++m_refCount; | 1007 ++m_refCount; |
1008 } | 1008 } |
1009 | 1009 |
1010 void deref() { | 1010 void deref() { |
1011 ASSERT(m_refCount > 0); | 1011 DCHECK_GT(m_refCount, 0); |
1012 if (!--m_refCount) | 1012 if (!--m_refCount) |
1013 m_keepAlive.clear(); | 1013 m_keepAlive.clear(); |
1014 } | 1014 } |
1015 | 1015 |
1016 DEFINE_INLINE_TRACE() {} | 1016 DEFINE_INLINE_TRACE() {} |
1017 | 1017 |
1018 static int s_destructorCalls; | 1018 static int s_destructorCalls; |
1019 | 1019 |
1020 private: | 1020 private: |
1021 RefCountedAndGarbageCollected2() : m_refCount(0) {} | 1021 RefCountedAndGarbageCollected2() : m_refCount(0) {} |
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1289 s_didCallWillFinalize = true; | 1289 s_didCallWillFinalize = true; |
1290 } | 1290 } |
1291 | 1291 |
1292 static ObserverMap& observe(Observable& target) { | 1292 static ObserverMap& observe(Observable& target) { |
1293 ObserverMap& map = observers(); | 1293 ObserverMap& map = observers(); |
1294 ObserverMap::AddResult result = map.add(&target, nullptr); | 1294 ObserverMap::AddResult result = map.add(&target, nullptr); |
1295 if (result.isNewEntry) { | 1295 if (result.isNewEntry) { |
1296 result.storedValue->value = | 1296 result.storedValue->value = |
1297 WTF::makeUnique<FinalizationObserverWithHashMap>(target); | 1297 WTF::makeUnique<FinalizationObserverWithHashMap>(target); |
1298 } else { | 1298 } else { |
1299 ASSERT(result.storedValue->value); | 1299 DCHECK(result.storedValue->value); |
1300 } | 1300 } |
1301 return map; | 1301 return map; |
1302 } | 1302 } |
1303 | 1303 |
1304 static void clearObservers() { | 1304 static void clearObservers() { |
1305 delete s_observerMap; | 1305 delete s_observerMap; |
1306 s_observerMap = nullptr; | 1306 s_observerMap = nullptr; |
1307 } | 1307 } |
1308 | 1308 |
1309 static bool s_didCallWillFinalize; | 1309 static bool s_didCallWillFinalize; |
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1825 SimpleFinalizedObject::create(); | 1825 SimpleFinalizedObject::create(); |
1826 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); | 1826 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); |
1827 preciselyCollectGarbage(); | 1827 preciselyCollectGarbage(); |
1828 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); | 1828 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); |
1829 } | 1829 } |
1830 | 1830 |
1831 preciselyCollectGarbage(); | 1831 preciselyCollectGarbage(); |
1832 EXPECT_EQ(1, SimpleFinalizedObject::s_destructorCalls); | 1832 EXPECT_EQ(1, SimpleFinalizedObject::s_destructorCalls); |
1833 } | 1833 } |
1834 | 1834 |
1835 #if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) | 1835 #if DCHECK_IS_ON() || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) |
1836 TEST(HeapTest, FreelistReuse) { | 1836 TEST(HeapTest, FreelistReuse) { |
1837 clearOutOldGarbage(); | 1837 clearOutOldGarbage(); |
1838 | 1838 |
1839 for (int i = 0; i < 100; i++) | 1839 for (int i = 0; i < 100; i++) |
1840 new IntWrapper(i); | 1840 new IntWrapper(i); |
1841 IntWrapper* p1 = new IntWrapper(100); | 1841 IntWrapper* p1 = new IntWrapper(100); |
1842 preciselyCollectGarbage(); | 1842 preciselyCollectGarbage(); |
1843 // In non-production builds, we delay reusing freed memory for at least | 1843 // In non-production builds, we delay reusing freed memory for at least |
1844 // one GC cycle. | 1844 // one GC cycle. |
1845 for (int i = 0; i < 100; i++) { | 1845 for (int i = 0; i < 100; i++) { |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2050 EXPECT_EQ(1u, Bar::s_live); | 2050 EXPECT_EQ(1u, Bar::s_live); |
2051 } | 2051 } |
2052 preciselyCollectGarbage(); | 2052 preciselyCollectGarbage(); |
2053 EXPECT_EQ(0u, Bar::s_live); | 2053 EXPECT_EQ(0u, Bar::s_live); |
2054 } | 2054 } |
2055 | 2055 |
2056 TEST(HeapTest, MarkTest) { | 2056 TEST(HeapTest, MarkTest) { |
2057 { | 2057 { |
2058 Bar::s_live = 0; | 2058 Bar::s_live = 0; |
2059 Persistent<Bar> bar = Bar::create(); | 2059 Persistent<Bar> bar = Bar::create(); |
2060 ASSERT(ThreadState::current()->findPageFromAddress(bar)); | 2060 DCHECK(ThreadState::current()->findPageFromAddress(bar)); |
2061 EXPECT_EQ(1u, Bar::s_live); | 2061 EXPECT_EQ(1u, Bar::s_live); |
2062 { | 2062 { |
2063 Foo* foo = Foo::create(bar); | 2063 Foo* foo = Foo::create(bar); |
2064 ASSERT(ThreadState::current()->findPageFromAddress(foo)); | 2064 DCHECK(ThreadState::current()->findPageFromAddress(foo)); |
2065 EXPECT_EQ(2u, Bar::s_live); | 2065 EXPECT_EQ(2u, Bar::s_live); |
2066 EXPECT_TRUE(reinterpret_cast<Address>(foo) != | 2066 EXPECT_TRUE(reinterpret_cast<Address>(foo) != |
2067 reinterpret_cast<Address>(bar.get())); | 2067 reinterpret_cast<Address>(bar.get())); |
2068 conservativelyCollectGarbage(); | 2068 conservativelyCollectGarbage(); |
2069 EXPECT_TRUE(foo != bar); // To make sure foo is kept alive. | 2069 EXPECT_TRUE(foo != bar); // To make sure foo is kept alive. |
2070 EXPECT_EQ(2u, Bar::s_live); | 2070 EXPECT_EQ(2u, Bar::s_live); |
2071 } | 2071 } |
2072 preciselyCollectGarbage(); | 2072 preciselyCollectGarbage(); |
2073 EXPECT_EQ(1u, Bar::s_live); | 2073 EXPECT_EQ(1u, Bar::s_live); |
2074 } | 2074 } |
2075 preciselyCollectGarbage(); | 2075 preciselyCollectGarbage(); |
2076 EXPECT_EQ(0u, Bar::s_live); | 2076 EXPECT_EQ(0u, Bar::s_live); |
2077 } | 2077 } |
2078 | 2078 |
2079 TEST(HeapTest, DeepTest) { | 2079 TEST(HeapTest, DeepTest) { |
2080 const unsigned depth = 100000; | 2080 const unsigned depth = 100000; |
2081 Bar::s_live = 0; | 2081 Bar::s_live = 0; |
2082 { | 2082 { |
2083 Bar* bar = Bar::create(); | 2083 Bar* bar = Bar::create(); |
2084 ASSERT(ThreadState::current()->findPageFromAddress(bar)); | 2084 DCHECK(ThreadState::current()->findPageFromAddress(bar)); |
2085 Foo* foo = Foo::create(bar); | 2085 Foo* foo = Foo::create(bar); |
2086 ASSERT(ThreadState::current()->findPageFromAddress(foo)); | 2086 DCHECK(ThreadState::current()->findPageFromAddress(foo)); |
2087 EXPECT_EQ(2u, Bar::s_live); | 2087 EXPECT_EQ(2u, Bar::s_live); |
2088 for (unsigned i = 0; i < depth; i++) { | 2088 for (unsigned i = 0; i < depth; i++) { |
2089 Foo* foo2 = Foo::create(foo); | 2089 Foo* foo2 = Foo::create(foo); |
2090 foo = foo2; | 2090 foo = foo2; |
2091 ASSERT(ThreadState::current()->findPageFromAddress(foo)); | 2091 DCHECK(ThreadState::current()->findPageFromAddress(foo)); |
2092 } | 2092 } |
2093 EXPECT_EQ(depth + 2, Bar::s_live); | 2093 EXPECT_EQ(depth + 2, Bar::s_live); |
2094 conservativelyCollectGarbage(); | 2094 conservativelyCollectGarbage(); |
2095 EXPECT_TRUE(foo != bar); // To make sure foo and bar are kept alive. | 2095 EXPECT_TRUE(foo != bar); // To make sure foo and bar are kept alive. |
2096 EXPECT_EQ(depth + 2, Bar::s_live); | 2096 EXPECT_EQ(depth + 2, Bar::s_live); |
2097 } | 2097 } |
2098 preciselyCollectGarbage(); | 2098 preciselyCollectGarbage(); |
2099 EXPECT_EQ(0u, Bar::s_live); | 2099 EXPECT_EQ(0u, Bar::s_live); |
2100 } | 2100 } |
2101 | 2101 |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2224 ThreadHeap& heap = ThreadState::current()->heap(); | 2224 ThreadHeap& heap = ThreadState::current()->heap(); |
2225 clearOutOldGarbage(); | 2225 clearOutOldGarbage(); |
2226 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting(); | 2226 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting(); |
2227 size_t initialAllocatedSpace = heap.heapStats().allocatedSpace(); | 2227 size_t initialAllocatedSpace = heap.heapStats().allocatedSpace(); |
2228 IntWrapper::s_destructorCalls = 0; | 2228 IntWrapper::s_destructorCalls = 0; |
2229 LargeHeapObject::s_destructorCalls = 0; | 2229 LargeHeapObject::s_destructorCalls = 0; |
2230 { | 2230 { |
2231 int slack = | 2231 int slack = |
2232 8; // LargeHeapObject points to an IntWrapper that is also allocated. | 2232 8; // LargeHeapObject points to an IntWrapper that is also allocated. |
2233 Persistent<LargeHeapObject> object = LargeHeapObject::create(); | 2233 Persistent<LargeHeapObject> object = LargeHeapObject::create(); |
2234 ASSERT(ThreadState::current()->findPageFromAddress(object)); | 2234 DCHECK(ThreadState::current()->findPageFromAddress(object)); |
2235 ASSERT(ThreadState::current()->findPageFromAddress( | 2235 DCHECK(ThreadState::current()->findPageFromAddress( |
2236 reinterpret_cast<char*>(object.get()) + sizeof(LargeHeapObject) - 1)); | 2236 reinterpret_cast<char*>(object.get()) + sizeof(LargeHeapObject) - 1)); |
2237 clearOutOldGarbage(); | 2237 clearOutOldGarbage(); |
2238 size_t afterAllocation = heap.heapStats().allocatedSpace(); | 2238 size_t afterAllocation = heap.heapStats().allocatedSpace(); |
2239 { | 2239 { |
2240 object->set(0, 'a'); | 2240 object->set(0, 'a'); |
2241 EXPECT_EQ('a', object->get(0)); | 2241 EXPECT_EQ('a', object->get(0)); |
2242 object->set(object->length() - 1, 'b'); | 2242 object->set(object->length() - 1, 'b'); |
2243 EXPECT_EQ('b', object->get(object->length() - 1)); | 2243 EXPECT_EQ('b', object->get(object->length() - 1)); |
2244 size_t expectedLargeHeapObjectPayloadSize = | 2244 size_t expectedLargeHeapObjectPayloadSize = |
2245 ThreadHeap::allocationSizeFromSize(sizeof(LargeHeapObject)); | 2245 ThreadHeap::allocationSizeFromSize(sizeof(LargeHeapObject)); |
(...skipping 1366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3612 if (deleteAfterwards) | 3612 if (deleteAfterwards) |
3613 EXPECT_EQ(i, | 3613 EXPECT_EQ(i, |
3614 strongWeak->take(keepNumbersAlive[i + 1])->value()); | 3614 strongWeak->take(keepNumbersAlive[i + 1])->value()); |
3615 } else if (collectionNumber == weakWeakIndex) { | 3615 } else if (collectionNumber == weakWeakIndex) { |
3616 if (deleteAfterwards) | 3616 if (deleteAfterwards) |
3617 EXPECT_EQ(i + 1, weakWeak->take(keepNumbersAlive[i])->value()); | 3617 EXPECT_EQ(i + 1, weakWeak->take(keepNumbersAlive[i])->value()); |
3618 } | 3618 } |
3619 if (!deleteAfterwards) | 3619 if (!deleteAfterwards) |
3620 count++; | 3620 count++; |
3621 } else if (collectionNumber == weakSetIndex && firstAlive) { | 3621 } else if (collectionNumber == weakSetIndex && firstAlive) { |
3622 ASSERT_TRUE(weakSet->contains(keepNumbersAlive[i])); | 3622 DCHECK(weakSet->contains(keepNumbersAlive[i])); |
3623 if (deleteAfterwards) | 3623 if (deleteAfterwards) |
3624 weakSet->remove(keepNumbersAlive[i]); | 3624 weakSet->remove(keepNumbersAlive[i]); |
3625 else | 3625 else |
3626 count++; | 3626 count++; |
3627 } else if (collectionNumber == weakOrderedSetIndex && firstAlive) { | 3627 } else if (collectionNumber == weakOrderedSetIndex && firstAlive) { |
3628 ASSERT_TRUE(weakOrderedSet->contains(keepNumbersAlive[i])); | 3628 DCHECK(weakOrderedSet->contains(keepNumbersAlive[i])); |
3629 if (deleteAfterwards) | 3629 if (deleteAfterwards) |
3630 weakOrderedSet->remove(keepNumbersAlive[i]); | 3630 weakOrderedSet->remove(keepNumbersAlive[i]); |
3631 else | 3631 else |
3632 count++; | 3632 count++; |
3633 } | 3633 } |
3634 } | 3634 } |
3635 if (addAfterwards) { | 3635 if (addAfterwards) { |
3636 for (int i = 1000; i < 1100; i++) { | 3636 for (int i = 1000; i < 1100; i++) { |
3637 IntWrapper* wrapped = IntWrapper::create(i); | 3637 IntWrapper* wrapped = IntWrapper::create(i); |
3638 keepNumbersAlive.append(wrapped); | 3638 keepNumbersAlive.append(wrapped); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3693 set.add(new IntWrapper(1)); | 3693 set.add(new IntWrapper(1)); |
3694 set.add(new IntWrapper(2)); | 3694 set.add(new IntWrapper(2)); |
3695 | 3695 |
3696 copyToVector(set, vector); | 3696 copyToVector(set, vector); |
3697 EXPECT_EQ(3u, vector.size()); | 3697 EXPECT_EQ(3u, vector.size()); |
3698 | 3698 |
3699 Vector<int> intVector; | 3699 Vector<int> intVector; |
3700 for (const auto& i : vector) | 3700 for (const auto& i : vector) |
3701 intVector.append(i->value()); | 3701 intVector.append(i->value()); |
3702 std::sort(intVector.begin(), intVector.end()); | 3702 std::sort(intVector.begin(), intVector.end()); |
3703 ASSERT_EQ(3u, intVector.size()); | 3703 DCHECK_EQ(3u, intVector.size()); |
3704 EXPECT_EQ(1, intVector[0]); | 3704 EXPECT_EQ(1, intVector[0]); |
3705 EXPECT_EQ(1, intVector[1]); | 3705 EXPECT_EQ(1, intVector[1]); |
3706 EXPECT_EQ(2, intVector[2]); | 3706 EXPECT_EQ(2, intVector[2]); |
3707 } | 3707 } |
3708 | 3708 |
3709 TEST(HeapTest, WeakHeapHashCountedSetToVector) { | 3709 TEST(HeapTest, WeakHeapHashCountedSetToVector) { |
3710 HeapHashCountedSet<WeakMember<IntWrapper>> set; | 3710 HeapHashCountedSet<WeakMember<IntWrapper>> set; |
3711 HeapVector<Member<IntWrapper>> vector; | 3711 HeapVector<Member<IntWrapper>> vector; |
3712 set.add(new IntWrapper(1)); | 3712 set.add(new IntWrapper(1)); |
3713 set.add(new IntWrapper(1)); | 3713 set.add(new IntWrapper(1)); |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3814 EXPECT_EQ(1, RefCountedAndGarbageCollected2::s_destructorCalls); | 3814 EXPECT_EQ(1, RefCountedAndGarbageCollected2::s_destructorCalls); |
3815 } | 3815 } |
3816 | 3816 |
3817 TEST(HeapTest, WeakMembers) { | 3817 TEST(HeapTest, WeakMembers) { |
3818 Bar::s_live = 0; | 3818 Bar::s_live = 0; |
3819 { | 3819 { |
3820 Persistent<Bar> h1 = Bar::create(); | 3820 Persistent<Bar> h1 = Bar::create(); |
3821 Persistent<Weak> h4; | 3821 Persistent<Weak> h4; |
3822 Persistent<WithWeakMember> h5; | 3822 Persistent<WithWeakMember> h5; |
3823 preciselyCollectGarbage(); | 3823 preciselyCollectGarbage(); |
3824 ASSERT_EQ(1u, Bar::s_live); // h1 is live. | 3824 DCHECK_EQ(1u, Bar::s_live); // h1 is live. |
3825 { | 3825 { |
3826 Bar* h2 = Bar::create(); | 3826 Bar* h2 = Bar::create(); |
3827 Bar* h3 = Bar::create(); | 3827 Bar* h3 = Bar::create(); |
3828 h4 = Weak::create(h2, h3); | 3828 h4 = Weak::create(h2, h3); |
3829 h5 = WithWeakMember::create(h2, h3); | 3829 h5 = WithWeakMember::create(h2, h3); |
3830 conservativelyCollectGarbage(); | 3830 conservativelyCollectGarbage(); |
3831 EXPECT_EQ(5u, Bar::s_live); // The on-stack pointer keeps h3 alive. | 3831 EXPECT_EQ(5u, Bar::s_live); // The on-stack pointer keeps h3 alive. |
3832 EXPECT_FALSE(h3->hasBeenFinalized()); | 3832 EXPECT_FALSE(h3->hasBeenFinalized()); |
3833 EXPECT_TRUE(h4->strongIsThere()); | 3833 EXPECT_TRUE(h4->strongIsThere()); |
3834 EXPECT_TRUE(h4->weakIsThere()); | 3834 EXPECT_TRUE(h4->weakIsThere()); |
(...skipping 1843 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5678 TEST(HeapTest, ThreadedStrongification) { | 5678 TEST(HeapTest, ThreadedStrongification) { |
5679 ThreadedStrongificationTester::test(); | 5679 ThreadedStrongificationTester::test(); |
5680 } | 5680 } |
5681 | 5681 |
5682 static bool allocateAndReturnBool() { | 5682 static bool allocateAndReturnBool() { |
5683 conservativelyCollectGarbage(); | 5683 conservativelyCollectGarbage(); |
5684 return true; | 5684 return true; |
5685 } | 5685 } |
5686 | 5686 |
5687 static bool checkGCForbidden() { | 5687 static bool checkGCForbidden() { |
5688 ASSERT(ThreadState::current()->isGCForbidden()); | 5688 DCHECK(ThreadState::current()->isGCForbidden()); |
5689 return true; | 5689 return true; |
5690 } | 5690 } |
5691 | 5691 |
5692 class MixinClass : public GarbageCollectedMixin { | 5692 class MixinClass : public GarbageCollectedMixin { |
5693 public: | 5693 public: |
5694 MixinClass() : m_dummy(checkGCForbidden()) {} | 5694 MixinClass() : m_dummy(checkGCForbidden()) {} |
5695 | 5695 |
5696 private: | 5696 private: |
5697 bool m_dummy; | 5697 bool m_dummy; |
5698 }; | 5698 }; |
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6137 static int sTraceLazy; | 6137 static int sTraceLazy; |
6138 }; | 6138 }; |
6139 | 6139 |
6140 int DeepEagerly::sTraceCalls = 0; | 6140 int DeepEagerly::sTraceCalls = 0; |
6141 int DeepEagerly::sTraceLazy = 0; | 6141 int DeepEagerly::sTraceLazy = 0; |
6142 | 6142 |
6143 TEST(HeapTest, TraceDeepEagerly) { | 6143 TEST(HeapTest, TraceDeepEagerly) { |
6144 // The allocation & GC overhead is considerable for this test, | 6144 // The allocation & GC overhead is considerable for this test, |
6145 // straining debug builds and lower-end targets too much to be | 6145 // straining debug builds and lower-end targets too much to be |
6146 // worth running. | 6146 // worth running. |
6147 #if !ENABLE(ASSERT) && !OS(ANDROID) | 6147 #if !DCHECK_IS_ON() && !OS(ANDROID) |
6148 DeepEagerly* obj = nullptr; | 6148 DeepEagerly* obj = nullptr; |
6149 for (int i = 0; i < 10000000; i++) | 6149 for (int i = 0; i < 10000000; i++) |
6150 obj = new DeepEagerly(obj); | 6150 obj = new DeepEagerly(obj); |
6151 | 6151 |
6152 Persistent<DeepEagerly> persistent(obj); | 6152 Persistent<DeepEagerly> persistent(obj); |
6153 preciselyCollectGarbage(); | 6153 preciselyCollectGarbage(); |
6154 | 6154 |
6155 // Verify that the DeepEagerly chain isn't completely unravelled | 6155 // Verify that the DeepEagerly chain isn't completely unravelled |
6156 // by performing eager trace() calls, but the explicit mark | 6156 // by performing eager trace() calls, but the explicit mark |
6157 // stack is switched once some nesting limit is exceeded. | 6157 // stack is switched once some nesting limit is exceeded. |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6347 // Disable ASan, otherwise its stack checking (use-after-return) will | 6347 // Disable ASan, otherwise its stack checking (use-after-return) will |
6348 // confuse the direction check. | 6348 // confuse the direction check. |
6349 static char* previous = nullptr; | 6349 static char* previous = nullptr; |
6350 char dummy; | 6350 char dummy; |
6351 if (!previous) { | 6351 if (!previous) { |
6352 previous = &dummy; | 6352 previous = &dummy; |
6353 GrowthDirection result = stackGrowthDirection(); | 6353 GrowthDirection result = stackGrowthDirection(); |
6354 previous = nullptr; | 6354 previous = nullptr; |
6355 return result; | 6355 return result; |
6356 } | 6356 } |
6357 ASSERT(&dummy != previous); | 6357 DCHECK_NE(&dummy, previous); |
6358 return &dummy < previous ? GrowsTowardsLower : GrowsTowardsHigher; | 6358 return &dummy < previous ? GrowsTowardsLower : GrowsTowardsHigher; |
6359 } | 6359 } |
6360 | 6360 |
6361 } // namespace | 6361 } // namespace |
6362 | 6362 |
6363 TEST(HeapTest, StackGrowthDirection) { | 6363 TEST(HeapTest, StackGrowthDirection) { |
6364 // The implementation of marking probes stack usage as it runs, | 6364 // The implementation of marking probes stack usage as it runs, |
6365 // and has a builtin assumption that the stack grows towards | 6365 // and has a builtin assumption that the stack grows towards |
6366 // lower addresses. | 6366 // lower addresses. |
6367 EXPECT_EQ(GrowsTowardsLower, stackGrowthDirection()); | 6367 EXPECT_EQ(GrowsTowardsLower, stackGrowthDirection()); |
6368 } | 6368 } |
6369 | 6369 |
6370 class TestMixinAllocationA : public GarbageCollected<TestMixinAllocationA>, | 6370 class TestMixinAllocationA : public GarbageCollected<TestMixinAllocationA>, |
6371 public GarbageCollectedMixin { | 6371 public GarbageCollectedMixin { |
6372 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationA); | 6372 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationA); |
6373 | 6373 |
6374 public: | 6374 public: |
6375 TestMixinAllocationA() { | 6375 TestMixinAllocationA() { |
6376 // Completely wrong in general, but test only | 6376 // Completely wrong in general, but test only |
6377 // runs this constructor while constructing another mixin. | 6377 // runs this constructor while constructing another mixin. |
6378 ASSERT(ThreadState::current()->isGCForbidden()); | 6378 DCHECK(ThreadState::current()->isGCForbidden()); |
6379 } | 6379 } |
6380 | 6380 |
6381 DEFINE_INLINE_VIRTUAL_TRACE() {} | 6381 DEFINE_INLINE_VIRTUAL_TRACE() {} |
6382 }; | 6382 }; |
6383 | 6383 |
6384 class TestMixinAllocationB : public TestMixinAllocationA { | 6384 class TestMixinAllocationB : public TestMixinAllocationA { |
6385 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationB); | 6385 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationB); |
6386 | 6386 |
6387 public: | 6387 public: |
6388 TestMixinAllocationB() | 6388 TestMixinAllocationB() |
6389 : m_a(new TestMixinAllocationA()) // Construct object during a mixin | 6389 : m_a(new TestMixinAllocationA()) // Construct object during a mixin |
6390 // construction. | 6390 // construction. |
6391 { | 6391 { |
6392 // Completely wrong in general, but test only | 6392 // Completely wrong in general, but test only |
6393 // runs this constructor while constructing another mixin. | 6393 // runs this constructor while constructing another mixin. |
6394 ASSERT(ThreadState::current()->isGCForbidden()); | 6394 DCHECK(ThreadState::current()->isGCForbidden()); |
6395 } | 6395 } |
6396 | 6396 |
6397 DEFINE_INLINE_TRACE() { | 6397 DEFINE_INLINE_TRACE() { |
6398 visitor->trace(m_a); | 6398 visitor->trace(m_a); |
6399 TestMixinAllocationA::trace(visitor); | 6399 TestMixinAllocationA::trace(visitor); |
6400 } | 6400 } |
6401 | 6401 |
6402 private: | 6402 private: |
6403 Member<TestMixinAllocationA> m_a; | 6403 Member<TestMixinAllocationA> m_a; |
6404 }; | 6404 }; |
6405 | 6405 |
6406 class TestMixinAllocationC final : public TestMixinAllocationB { | 6406 class TestMixinAllocationC final : public TestMixinAllocationB { |
6407 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationC); | 6407 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocationC); |
6408 | 6408 |
6409 public: | 6409 public: |
6410 TestMixinAllocationC() { ASSERT(!ThreadState::current()->isGCForbidden()); } | 6410 TestMixinAllocationC() { DCHECK(!ThreadState::current()->isGCForbidden()); } |
6411 | 6411 |
6412 DEFINE_INLINE_TRACE() { TestMixinAllocationB::trace(visitor); } | 6412 DEFINE_INLINE_TRACE() { TestMixinAllocationB::trace(visitor); } |
6413 }; | 6413 }; |
6414 | 6414 |
6415 TEST(HeapTest, NestedMixinConstruction) { | 6415 TEST(HeapTest, NestedMixinConstruction) { |
6416 TestMixinAllocationC* object = new TestMixinAllocationC(); | 6416 TestMixinAllocationC* object = new TestMixinAllocationC(); |
6417 EXPECT_TRUE(object); | 6417 EXPECT_TRUE(object); |
6418 } | 6418 } |
6419 | 6419 |
6420 class ObjectWithLargeAmountsOfAllocationInConstructor { | 6420 class ObjectWithLargeAmountsOfAllocationInConstructor { |
6421 public: | 6421 public: |
6422 ObjectWithLargeAmountsOfAllocationInConstructor( | 6422 ObjectWithLargeAmountsOfAllocationInConstructor( |
6423 size_t numberOfLargeObjectsToAllocate, | 6423 size_t numberOfLargeObjectsToAllocate, |
6424 ClassWithMember* member) { | 6424 ClassWithMember* member) { |
6425 // Should a constructor allocate plenty in its constructor, | 6425 // Should a constructor allocate plenty in its constructor, |
6426 // and it is a base of GC mixin, GCs will remain locked out | 6426 // and it is a base of GC mixin, GCs will remain locked out |
6427 // regardless, as we cannot safely trace the leftmost GC | 6427 // regardless, as we cannot safely trace the leftmost GC |
6428 // mixin base. | 6428 // mixin base. |
6429 ASSERT(ThreadState::current()->isGCForbidden()); | 6429 DCHECK(ThreadState::current()->isGCForbidden()); |
6430 for (size_t i = 0; i < numberOfLargeObjectsToAllocate; i++) { | 6430 for (size_t i = 0; i < numberOfLargeObjectsToAllocate; i++) { |
6431 LargeHeapObject* largeObject = LargeHeapObject::create(); | 6431 LargeHeapObject* largeObject = LargeHeapObject::create(); |
6432 EXPECT_TRUE(largeObject); | 6432 EXPECT_TRUE(largeObject); |
6433 EXPECT_EQ(0, member->traceCount()); | 6433 EXPECT_EQ(0, member->traceCount()); |
6434 } | 6434 } |
6435 } | 6435 } |
6436 }; | 6436 }; |
6437 | 6437 |
6438 class TestMixinAllocatingObject final | 6438 class TestMixinAllocatingObject final |
6439 : public TestMixinAllocationB, | 6439 : public TestMixinAllocationB, |
6440 public ObjectWithLargeAmountsOfAllocationInConstructor { | 6440 public ObjectWithLargeAmountsOfAllocationInConstructor { |
6441 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocatingObject); | 6441 USING_GARBAGE_COLLECTED_MIXIN(TestMixinAllocatingObject); |
6442 | 6442 |
6443 public: | 6443 public: |
6444 static TestMixinAllocatingObject* create(ClassWithMember* member) { | 6444 static TestMixinAllocatingObject* create(ClassWithMember* member) { |
6445 return new TestMixinAllocatingObject(member); | 6445 return new TestMixinAllocatingObject(member); |
6446 } | 6446 } |
6447 | 6447 |
6448 DEFINE_INLINE_TRACE() { | 6448 DEFINE_INLINE_TRACE() { |
6449 visitor->trace(m_traceCounter); | 6449 visitor->trace(m_traceCounter); |
6450 TestMixinAllocationB::trace(visitor); | 6450 TestMixinAllocationB::trace(visitor); |
6451 } | 6451 } |
6452 | 6452 |
6453 int traceCount() const { return m_traceCounter->traceCount(); } | 6453 int traceCount() const { return m_traceCounter->traceCount(); } |
6454 | 6454 |
6455 private: | 6455 private: |
6456 TestMixinAllocatingObject(ClassWithMember* member) | 6456 TestMixinAllocatingObject(ClassWithMember* member) |
6457 : ObjectWithLargeAmountsOfAllocationInConstructor(600, member), | 6457 : ObjectWithLargeAmountsOfAllocationInConstructor(600, member), |
6458 m_traceCounter(TraceCounter::create()) { | 6458 m_traceCounter(TraceCounter::create()) { |
6459 ASSERT(!ThreadState::current()->isGCForbidden()); | 6459 DCHECK(!ThreadState::current()->isGCForbidden()); |
6460 conservativelyCollectGarbage(); | 6460 conservativelyCollectGarbage(); |
6461 EXPECT_GT(member->traceCount(), 0); | 6461 EXPECT_GT(member->traceCount(), 0); |
6462 EXPECT_GT(traceCount(), 0); | 6462 EXPECT_GT(traceCount(), 0); |
6463 } | 6463 } |
6464 | 6464 |
6465 Member<TraceCounter> m_traceCounter; | 6465 Member<TraceCounter> m_traceCounter; |
6466 }; | 6466 }; |
6467 | 6467 |
6468 TEST(HeapTest, MixinConstructionNoGC) { | 6468 TEST(HeapTest, MixinConstructionNoGC) { |
6469 Persistent<ClassWithMember> object = ClassWithMember::create(); | 6469 Persistent<ClassWithMember> object = ClassWithMember::create(); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6533 std::unique_ptr<WebThread> workerThread = | 6533 std::unique_ptr<WebThread> workerThread = |
6534 WTF::wrapUnique(Platform::current()->createThread("Test Worker Thread")); | 6534 WTF::wrapUnique(Platform::current()->createThread("Test Worker Thread")); |
6535 DestructorLockingObject* object = nullptr; | 6535 DestructorLockingObject* object = nullptr; |
6536 workerThread->getWebTaskRunner()->postTask( | 6536 workerThread->getWebTaskRunner()->postTask( |
6537 BLINK_FROM_HERE, | 6537 BLINK_FROM_HERE, |
6538 crossThreadBind(workerThreadMainForCrossThreadWeakPersistentTest, | 6538 crossThreadBind(workerThreadMainForCrossThreadWeakPersistentTest, |
6539 crossThreadUnretained(&object))); | 6539 crossThreadUnretained(&object))); |
6540 parkMainThread(); | 6540 parkMainThread(); |
6541 | 6541 |
6542 // Step 3: Set up a CrossThreadWeakPersistent. | 6542 // Step 3: Set up a CrossThreadWeakPersistent. |
6543 ASSERT_TRUE(object); | 6543 DCHECK(object); |
6544 CrossThreadWeakPersistent<DestructorLockingObject> crossThreadWeakPersistent( | 6544 CrossThreadWeakPersistent<DestructorLockingObject> crossThreadWeakPersistent( |
6545 object); | 6545 object); |
6546 object = nullptr; | 6546 object = nullptr; |
6547 { | 6547 { |
6548 SafePointAwareMutexLocker recursiveMutexLocker(recursiveMutex()); | 6548 SafePointAwareMutexLocker recursiveMutexLocker(recursiveMutex()); |
6549 EXPECT_EQ(0, DestructorLockingObject::s_destructorCalls); | 6549 EXPECT_EQ(0, DestructorLockingObject::s_destructorCalls); |
6550 } | 6550 } |
6551 | 6551 |
6552 { | 6552 { |
6553 // Pretend we have no pointers on stack during the step 4. | 6553 // Pretend we have no pointers on stack during the step 4. |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6805 "HeapVector"); | 6805 "HeapVector"); |
6806 static_assert( | 6806 static_assert( |
6807 WTF::IsGarbageCollectedType<HeapDeque<Member<IntWrapper>>>::value, | 6807 WTF::IsGarbageCollectedType<HeapDeque<Member<IntWrapper>>>::value, |
6808 "HeapDeque"); | 6808 "HeapDeque"); |
6809 static_assert(WTF::IsGarbageCollectedType< | 6809 static_assert(WTF::IsGarbageCollectedType< |
6810 HeapTerminatedArray<Member<IntWrapper>>>::value, | 6810 HeapTerminatedArray<Member<IntWrapper>>>::value, |
6811 "HeapTerminatedArray"); | 6811 "HeapTerminatedArray"); |
6812 } | 6812 } |
6813 | 6813 |
6814 } // namespace blink | 6814 } // namespace blink |
OLD | NEW |