Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(304)

Side by Side Diff: third_party/WebKit/Source/platform/heap/HeapTest.cpp

Issue 1477023003: Refactor the Heap into ThreadHeap to prepare for per thread heaps Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 30 matching lines...) Expand all
41 #include "public/platform/WebTaskRunner.h" 41 #include "public/platform/WebTaskRunner.h"
42 #include "public/platform/WebTraceLocation.h" 42 #include "public/platform/WebTraceLocation.h"
43 #include "testing/gtest/include/gtest/gtest.h" 43 #include "testing/gtest/include/gtest/gtest.h"
44 #include "wtf/HashTraits.h" 44 #include "wtf/HashTraits.h"
45 #include "wtf/LinkedHashSet.h" 45 #include "wtf/LinkedHashSet.h"
46 46
47 namespace blink { 47 namespace blink {
48 48
49 static void preciselyCollectGarbage() 49 static void preciselyCollectGarbage()
50 { 50 {
51 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithSweep, B linkGC::ForcedGC); 51 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithSw eep, BlinkGC::ForcedGC);
52 } 52 }
53 53
54 static void conservativelyCollectGarbage() 54 static void conservativelyCollectGarbage()
55 { 55 {
56 Heap::collectGarbage(BlinkGC::HeapPointersOnStack, BlinkGC::GCWithSweep, Bli nkGC::ForcedGC); 56 ThreadHeap::collectGarbage(BlinkGC::HeapPointersOnStack, BlinkGC::GCWithSwee p, BlinkGC::ForcedGC);
57 } 57 }
58 58
59 class IntWrapper : public GarbageCollectedFinalized<IntWrapper> { 59 class IntWrapper : public GarbageCollectedFinalized<IntWrapper> {
60 public: 60 public:
61 static IntWrapper* create(int x) 61 static IntWrapper* create(int x)
62 { 62 {
63 return new IntWrapper(x); 63 return new IntWrapper(x);
64 } 64 }
65 65
66 virtual ~IntWrapper() 66 virtual ~IntWrapper()
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 bool isHashTableDeletedValue() const { return first == reinterpret_cast<IntW rapper*>(-1); } 150 bool isHashTableDeletedValue() const { return first == reinterpret_cast<IntW rapper*>(-1); }
151 151
152 // Since we don't allocate independent objects of this type, we don't need 152 // Since we don't allocate independent objects of this type, we don't need
153 // a regular trace method. Instead, we use a traceInCollection method. If 153 // a regular trace method. Instead, we use a traceInCollection method. If
154 // the entry should be deleted from the collection we return true and don't 154 // the entry should be deleted from the collection we return true and don't
155 // trace the strong pointer. 155 // trace the strong pointer.
156 template<typename VisitorDispatcher> 156 template<typename VisitorDispatcher>
157 bool traceInCollection(VisitorDispatcher visitor, WTF::ShouldWeakPointersBeM arkedStrongly strongify) 157 bool traceInCollection(VisitorDispatcher visitor, WTF::ShouldWeakPointersBeM arkedStrongly strongify)
158 { 158 {
159 visitor->traceInCollection(second, strongify); 159 visitor->traceInCollection(second, strongify);
160 if (!Heap::isHeapObjectAlive(second)) 160 if (!ThreadHeap::isHeapObjectAlive(second))
161 return true; 161 return true;
162 // FIXME: traceInCollection is also called from WeakProcessing to check if the entry is dead. 162 // FIXME: traceInCollection is also called from WeakProcessing to check if the entry is dead.
163 // The below if avoids calling trace in that case by only calling trace when |first| is not yet marked. 163 // The below if avoids calling trace in that case by only calling trace when |first| is not yet marked.
164 if (!Heap::isHeapObjectAlive(first)) 164 if (!ThreadHeap::isHeapObjectAlive(first))
165 visitor->trace(first); 165 visitor->trace(first);
166 return false; 166 return false;
167 } 167 }
168 }; 168 };
169 169
170 template<typename T> struct WeakHandlingHashTraits : WTF::SimpleClassHashTraits< T> { 170 template<typename T> struct WeakHandlingHashTraits : WTF::SimpleClassHashTraits< T> {
171 // We want to treat the object as a weak object in the sense that it can 171 // We want to treat the object as a weak object in the sense that it can
172 // disappear from hash sets and hash maps. 172 // disappear from hash sets and hash maps.
173 static const WTF::WeakHandlingFlag weakHandlingFlag = WTF::WeakHandlingInCol lections; 173 static const WTF::WeakHandlingFlag weakHandlingFlag = WTF::WeakHandlingInCol lections;
174 // Normally whether or not an object needs tracing is inferred 174 // Normally whether or not an object needs tracing is inferred
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
237 namespace blink { 237 namespace blink {
238 238
239 class TestGCScope { 239 class TestGCScope {
240 public: 240 public:
241 explicit TestGCScope(BlinkGC::StackState state) 241 explicit TestGCScope(BlinkGC::StackState state)
242 : m_state(ThreadState::current()) 242 : m_state(ThreadState::current())
243 , m_safePointScope(state) 243 , m_safePointScope(state)
244 , m_parkedAllThreads(false) 244 , m_parkedAllThreads(false)
245 { 245 {
246 ASSERT(m_state->checkThread()); 246 ASSERT(m_state->checkThread());
247 if (LIKELY(ThreadState::stopThreads())) { 247 if (LIKELY(m_state->heap().park())) {
248 Heap::preGC(); 248 m_state->heap().preGC();
249 m_parkedAllThreads = true; 249 m_parkedAllThreads = true;
250 } 250 }
251 } 251 }
252 252
253 bool allThreadsParked() { return m_parkedAllThreads; } 253 bool allThreadsParked() { return m_parkedAllThreads; }
254 254
255 ~TestGCScope() 255 ~TestGCScope()
256 { 256 {
257 // Only cleanup if we parked all threads in which case the GC happened 257 // Only cleanup if we parked all threads in which case the GC happened
258 // and we need to resume the other threads. 258 // and we need to resume the other threads.
259 if (LIKELY(m_parkedAllThreads)) { 259 if (LIKELY(m_parkedAllThreads)) {
260 Heap::postGC(BlinkGC::GCWithSweep); 260 m_state->heap().postGC(BlinkGC::GCWithSweep);
261 ThreadState::resumeThreads(); 261 m_state->heap().resume();
262 } 262 }
263 } 263 }
264 264
265 private: 265 private:
266 ThreadState* m_state; 266 ThreadState* m_state;
267 SafePointScope m_safePointScope; 267 SafePointScope m_safePointScope;
268 bool m_parkedAllThreads; // False if we fail to park all threads 268 bool m_parkedAllThreads; // False if we fail to park all threads
269 }; 269 };
270 270
271 #define DEFINE_VISITOR_METHODS(Type) \ 271 #define DEFINE_VISITOR_METHODS(Type) \
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
407 DEFINE_INLINE_TRACE() { } 407 DEFINE_INLINE_TRACE() { }
408 private: 408 private:
409 static const int s_arraySize = 1000; 409 static const int s_arraySize = 1000;
410 int8_t m_array[s_arraySize]; 410 int8_t m_array[s_arraySize];
411 }; 411 };
412 412
413 // Do several GCs to make sure that later GCs don't free up old memory from 413 // Do several GCs to make sure that later GCs don't free up old memory from
414 // previously run tests in this process. 414 // previously run tests in this process.
415 static void clearOutOldGarbage() 415 static void clearOutOldGarbage()
416 { 416 {
417 ThreadHeap& heap = ThreadState::current()->heap();
417 while (true) { 418 while (true) {
418 size_t used = Heap::objectPayloadSizeForTesting(); 419 size_t used = heap.objectPayloadSizeForTesting();
419 preciselyCollectGarbage(); 420 preciselyCollectGarbage();
420 if (Heap::objectPayloadSizeForTesting() >= used) 421 if (heap.objectPayloadSizeForTesting() >= used)
421 break; 422 break;
422 } 423 }
423 } 424 }
424 425
425 class OffHeapInt : public RefCounted<OffHeapInt> { 426 class OffHeapInt : public RefCounted<OffHeapInt> {
426 public: 427 public:
427 static RefPtr<OffHeapInt> create(int x) 428 static RefPtr<OffHeapInt> create(int x)
428 { 429 {
429 return adoptRef(new OffHeapInt(x)); 430 return adoptRef(new OffHeapInt(x));
430 } 431 }
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
537 538
538 if (gcCount < gcPerThread) { 539 if (gcCount < gcPerThread) {
539 preciselyCollectGarbage(); 540 preciselyCollectGarbage();
540 gcCount++; 541 gcCount++;
541 atomicIncrement(&m_gcCount); 542 atomicIncrement(&m_gcCount);
542 } 543 }
543 544
544 // Taking snapshot shouldn't have any bad side effect. 545 // Taking snapshot shouldn't have any bad side effect.
545 // TODO(haraken): This snapshot GC causes crashes, so disable 546 // TODO(haraken): This snapshot GC causes crashes, so disable
546 // it at the moment. Fix the crash and enable it. 547 // it at the moment. Fix the crash and enable it.
547 // Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC: :TakeSnapshot, BlinkGC::ForcedGC); 548 // ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, Bl inkGC::TakeSnapshot, BlinkGC::ForcedGC);
548 preciselyCollectGarbage(); 549 preciselyCollectGarbage();
549 EXPECT_EQ(wrapper->value(), 0x0bbac0de); 550 EXPECT_EQ(wrapper->value(), 0x0bbac0de);
550 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb); 551 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb);
551 } 552 }
552 SafePointScope scope(BlinkGC::NoHeapPointersOnStack); 553 SafePointScope scope(BlinkGC::NoHeapPointersOnStack);
553 testing::yieldCurrentThread(); 554 testing::yieldCurrentThread();
554 } 555 }
555 556
556 // Intentionally leak the cross-thread persistent so as to verify 557 // Intentionally leak the cross-thread persistent so as to verify
557 // that later GCs correctly handle cross-thread persistents that 558 // that later GCs correctly handle cross-thread persistents that
558 // refer to finalized objects after their heaps have been detached 559 // refer to finalized objects after their heaps have been detached
559 // and freed. 560 // and freed.
560 EXPECT_TRUE(longLivingPersistent.leakPtr()); 561 EXPECT_TRUE(longLivingPersistent.leakPtr());
561 562
562 ThreadState::detach(); 563 ThreadState::detachCurrentThread();
563 atomicDecrement(&m_threadsToFinish); 564 atomicDecrement(&m_threadsToFinish);
564 } 565 }
565 }; 566 };
566 567
567 class ThreadedWeaknessTester : public ThreadedTesterBase { 568 class ThreadedWeaknessTester : public ThreadedTesterBase {
568 public: 569 public:
569 static void test() 570 static void test()
570 { 571 {
571 ThreadedTesterBase::test(new ThreadedWeaknessTester); 572 ThreadedTesterBase::test(new ThreadedWeaknessTester);
572 } 573 }
(...skipping 19 matching lines...) Expand all
592 593
593 if (gcCount < gcPerThread) { 594 if (gcCount < gcPerThread) {
594 preciselyCollectGarbage(); 595 preciselyCollectGarbage();
595 gcCount++; 596 gcCount++;
596 atomicIncrement(&m_gcCount); 597 atomicIncrement(&m_gcCount);
597 } 598 }
598 599
599 // Taking snapshot shouldn't have any bad side effect. 600 // Taking snapshot shouldn't have any bad side effect.
600 // TODO(haraken): This snapshot GC causes crashes, so disable 601 // TODO(haraken): This snapshot GC causes crashes, so disable
601 // it at the moment. Fix the crash and enable it. 602 // it at the moment. Fix the crash and enable it.
602 // Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC: :TakeSnapshot, BlinkGC::ForcedGC); 603 // ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, Bl inkGC::TakeSnapshot, BlinkGC::ForcedGC);
603 preciselyCollectGarbage(); 604 preciselyCollectGarbage();
604 EXPECT_TRUE(weakMap->isEmpty()); 605 EXPECT_TRUE(weakMap->isEmpty());
605 EXPECT_TRUE(weakMap2.isEmpty()); 606 EXPECT_TRUE(weakMap2.isEmpty());
606 } 607 }
607 SafePointScope scope(BlinkGC::NoHeapPointersOnStack); 608 SafePointScope scope(BlinkGC::NoHeapPointersOnStack);
608 testing::yieldCurrentThread(); 609 testing::yieldCurrentThread();
609 } 610 }
610 ThreadState::detach(); 611 ThreadState::detachCurrentThread();
611 atomicDecrement(&m_threadsToFinish); 612 atomicDecrement(&m_threadsToFinish);
612 } 613 }
613 }; 614 };
614 615
615 class ThreadPersistentHeapTester : public ThreadedTesterBase { 616 class ThreadPersistentHeapTester : public ThreadedTesterBase {
616 public: 617 public:
617 static void test() 618 static void test()
618 { 619 {
619 ThreadedTesterBase::test(new ThreadPersistentHeapTester); 620 ThreadedTesterBase::test(new ThreadPersistentHeapTester);
620 } 621 }
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
669 void runThread() override 670 void runThread() override
670 { 671 {
671 ThreadState::attach(); 672 ThreadState::attach();
672 673
673 PersistentChain::create(100); 674 PersistentChain::create(100);
674 675
675 // Upon thread detach, GCs will run until all persistents have been 676 // Upon thread detach, GCs will run until all persistents have been
676 // released. We verify that the draining of persistents proceeds 677 // released. We verify that the draining of persistents proceeds
677 // as expected by dropping one Persistent<> per GC until there 678 // as expected by dropping one Persistent<> per GC until there
678 // are none left. 679 // are none left.
679 ThreadState::detach(); 680 ThreadState::detachCurrentThread();
680 atomicDecrement(&m_threadsToFinish); 681 atomicDecrement(&m_threadsToFinish);
681 } 682 }
682 }; 683 };
683 684
684 // The accounting for memory includes the memory used by rounding up object 685 // The accounting for memory includes the memory used by rounding up object
685 // sizes. This is done in a different way on 32 bit and 64 bit, so we have to 686 // sizes. This is done in a different way on 32 bit and 64 bit, so we have to
686 // have some slack in the tests. 687 // have some slack in the tests.
687 template<typename T> 688 template<typename T>
688 void CheckWithSlack(T expected, T actual, int slack) 689 void CheckWithSlack(T expected, T actual, int slack)
689 { 690 {
(...skipping 23 matching lines...) Expand all
713 714
714 class ClassWithMember : public GarbageCollected<ClassWithMember> { 715 class ClassWithMember : public GarbageCollected<ClassWithMember> {
715 public: 716 public:
716 static ClassWithMember* create() 717 static ClassWithMember* create()
717 { 718 {
718 return new ClassWithMember(); 719 return new ClassWithMember();
719 } 720 }
720 721
721 DEFINE_INLINE_TRACE() 722 DEFINE_INLINE_TRACE()
722 { 723 {
723 EXPECT_TRUE(Heap::isHeapObjectAlive(this)); 724 EXPECT_TRUE(ThreadHeap::isHeapObjectAlive(this));
724 if (!traceCount()) 725 if (!traceCount())
725 EXPECT_FALSE(Heap::isHeapObjectAlive(m_traceCounter)); 726 EXPECT_FALSE(ThreadHeap::isHeapObjectAlive(m_traceCounter));
726 else 727 else
727 EXPECT_TRUE(Heap::isHeapObjectAlive(m_traceCounter)); 728 EXPECT_TRUE(ThreadHeap::isHeapObjectAlive(m_traceCounter));
728 729
729 visitor->trace(m_traceCounter); 730 visitor->trace(m_traceCounter);
730 } 731 }
731 732
732 int traceCount() { return m_traceCounter->traceCount(); } 733 int traceCount() { return m_traceCounter->traceCount(); }
733 734
734 private: 735 private:
735 ClassWithMember() 736 ClassWithMember()
736 : m_traceCounter(TraceCounter::create()) 737 : m_traceCounter(TraceCounter::create())
737 { } 738 { }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
769 // Override operator new to allocate IntNode subtype objects onto 770 // Override operator new to allocate IntNode subtype objects onto
770 // the dedicated heap for blink::Node. 771 // the dedicated heap for blink::Node.
771 // 772 //
772 // TODO(haraken): untangling the heap unit tests from Blink would 773 // TODO(haraken): untangling the heap unit tests from Blink would
773 // simplify and avoid running into this problem - http://crbug.com/425381 774 // simplify and avoid running into this problem - http://crbug.com/425381
774 GC_PLUGIN_IGNORE("crbug.com/443854") 775 GC_PLUGIN_IGNORE("crbug.com/443854")
775 void* operator new(size_t size) 776 void* operator new(size_t size)
776 { 777 {
777 ThreadState* state = ThreadState::current(); 778 ThreadState* state = ThreadState::current();
778 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(IntNode); 779 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(IntNode);
779 return Heap::allocateOnArenaIndex(state, size, BlinkGC::NodeArenaIndex, GCInfoTrait<IntNode>::index(), typeName); 780 return ThreadHeap::allocateOnArenaIndex(state, size, BlinkGC::NodeArenaI ndex, GCInfoTrait<IntNode>::index(), typeName);
780 } 781 }
781 782
782 static IntNode* create(int i) 783 static IntNode* create(int i)
783 { 784 {
784 return new IntNode(i); 785 return new IntNode(i);
785 } 786 }
786 787
787 DEFINE_INLINE_TRACE() { } 788 DEFINE_INLINE_TRACE() { }
788 789
789 int value() { return m_value; } 790 int value() { return m_value; }
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
1093 } 1094 }
1094 1095
1095 DEFINE_INLINE_VIRTUAL_TRACE() 1096 DEFINE_INLINE_VIRTUAL_TRACE()
1096 { 1097 {
1097 visitor->trace(m_strongBar); 1098 visitor->trace(m_strongBar);
1098 visitor->template registerWeakMembers<Weak, &Weak::zapWeakMembers>(this) ; 1099 visitor->template registerWeakMembers<Weak, &Weak::zapWeakMembers>(this) ;
1099 } 1100 }
1100 1101
1101 void zapWeakMembers(Visitor* visitor) 1102 void zapWeakMembers(Visitor* visitor)
1102 { 1103 {
1103 if (!Heap::isHeapObjectAlive(m_weakBar)) 1104 if (!ThreadHeap::isHeapObjectAlive(m_weakBar))
1104 m_weakBar = 0; 1105 m_weakBar = 0;
1105 } 1106 }
1106 1107
1107 bool strongIsThere() { return !!m_strongBar; } 1108 bool strongIsThere() { return !!m_strongBar; }
1108 bool weakIsThere() { return !!m_weakBar; } 1109 bool weakIsThere() { return !!m_weakBar; }
1109 1110
1110 private: 1111 private:
1111 Weak(Bar* strongBar, Bar* weakBar) 1112 Weak(Bar* strongBar, Bar* weakBar)
1112 : Bar() 1113 : Bar()
1113 , m_strongBar(strongBar) 1114 , m_strongBar(strongBar)
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 static FinalizationObserver* create(T* data) { return new FinalizationObserv er(data); } 1289 static FinalizationObserver* create(T* data) { return new FinalizationObserv er(data); }
1289 bool didCallWillFinalize() const { return m_didCallWillFinalize; } 1290 bool didCallWillFinalize() const { return m_didCallWillFinalize; }
1290 1291
1291 DEFINE_INLINE_TRACE() 1292 DEFINE_INLINE_TRACE()
1292 { 1293 {
1293 visitor->template registerWeakMembers<FinalizationObserver<T>, &Finaliza tionObserver<T>::zapWeakMembers>(this); 1294 visitor->template registerWeakMembers<FinalizationObserver<T>, &Finaliza tionObserver<T>::zapWeakMembers>(this);
1294 } 1295 }
1295 1296
1296 void zapWeakMembers(Visitor* visitor) 1297 void zapWeakMembers(Visitor* visitor)
1297 { 1298 {
1298 if (m_data && !Heap::isHeapObjectAlive(m_data)) { 1299 if (m_data && !ThreadHeap::isHeapObjectAlive(m_data)) {
1299 m_data->willFinalize(); 1300 m_data->willFinalize();
1300 m_data = nullptr; 1301 m_data = nullptr;
1301 m_didCallWillFinalize = true; 1302 m_didCallWillFinalize = true;
1302 } 1303 }
1303 } 1304 }
1304 1305
1305 private: 1306 private:
1306 FinalizationObserver(T* data) 1307 FinalizationObserver(T* data)
1307 : m_data(data) 1308 : m_data(data)
1308 , m_didCallWillFinalize(false) 1309 , m_didCallWillFinalize(false)
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
1612 static const size_t s_length = 1024; 1613 static const size_t s_length = 1024;
1613 char m_data[s_length]; 1614 char m_data[s_length];
1614 }; 1615 };
1615 1616
1616 int OneKiloByteObject::s_destructorCalls = 0; 1617 int OneKiloByteObject::s_destructorCalls = 0;
1617 1618
1618 class DynamicallySizedObject : public GarbageCollected<DynamicallySizedObject> { 1619 class DynamicallySizedObject : public GarbageCollected<DynamicallySizedObject> {
1619 public: 1620 public:
1620 static DynamicallySizedObject* create(size_t size) 1621 static DynamicallySizedObject* create(size_t size)
1621 { 1622 {
1622 void* slot = Heap::allocate<DynamicallySizedObject>(size); 1623 void* slot = ThreadHeap::allocate<DynamicallySizedObject>(size);
1623 return new (slot) DynamicallySizedObject(); 1624 return new (slot) DynamicallySizedObject();
1624 } 1625 }
1625 1626
1626 void* operator new(std::size_t, void* location) 1627 void* operator new(std::size_t, void* location)
1627 { 1628 {
1628 return location; 1629 return location;
1629 } 1630 }
1630 1631
1631 uint8_t get(int i) 1632 uint8_t get(int i)
1632 { 1633 {
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1757 ThreadedWeaknessTester::test(); 1758 ThreadedWeaknessTester::test();
1758 } 1759 }
1759 1760
1760 TEST(HeapTest, ThreadPersistent) 1761 TEST(HeapTest, ThreadPersistent)
1761 { 1762 {
1762 ThreadPersistentHeapTester::test(); 1763 ThreadPersistentHeapTester::test();
1763 } 1764 }
1764 1765
1765 TEST(HeapTest, BasicFunctionality) 1766 TEST(HeapTest, BasicFunctionality)
1766 { 1767 {
1768 ThreadHeap& heap = ThreadState::current()->heap();
1767 clearOutOldGarbage(); 1769 clearOutOldGarbage();
1768 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); 1770 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting();
1769 { 1771 {
1770 size_t slack = 0; 1772 size_t slack = 0;
1771 1773
1772 // When the test starts there may already have been leaked some memory 1774 // When the test starts there may already have been leaked some memory
1773 // on the heap, so we establish a base line. 1775 // on the heap, so we establish a base line.
1774 size_t baseLevel = initialObjectPayloadSize; 1776 size_t baseLevel = initialObjectPayloadSize;
1775 bool testPagesAllocated = !baseLevel; 1777 bool testPagesAllocated = !baseLevel;
1776 if (testPagesAllocated) 1778 if (testPagesAllocated)
1777 EXPECT_EQ(Heap::allocatedSpace(), 0ul); 1779 EXPECT_EQ(heap.heapStats().allocatedSpace(), 0ul);
1778 1780
1779 // This allocates objects on the general heap which should add a page of memory. 1781 // This allocates objects on the general heap which should add a page of memory.
1780 DynamicallySizedObject* alloc32 = DynamicallySizedObject::create(32); 1782 DynamicallySizedObject* alloc32 = DynamicallySizedObject::create(32);
1781 slack += 4; 1783 slack += 4;
1782 memset(alloc32, 40, 32); 1784 memset(alloc32, 40, 32);
1783 DynamicallySizedObject* alloc64 = DynamicallySizedObject::create(64); 1785 DynamicallySizedObject* alloc64 = DynamicallySizedObject::create(64);
1784 slack += 4; 1786 slack += 4;
1785 memset(alloc64, 27, 64); 1787 memset(alloc64, 27, 64);
1786 1788
1787 size_t total = 96; 1789 size_t total = 96;
1788 1790
1789 CheckWithSlack(baseLevel + total, Heap::objectPayloadSizeForTesting(), s lack); 1791 CheckWithSlack(baseLevel + total, heap.objectPayloadSizeForTesting(), sl ack);
1790 if (testPagesAllocated) 1792 if (testPagesAllocated)
1791 EXPECT_EQ(Heap::allocatedSpace(), blinkPageSize * 2); 1793 EXPECT_EQ(heap.heapStats().allocatedSpace(), blinkPageSize * 2);
1792 1794
1793 EXPECT_EQ(alloc32->get(0), 40); 1795 EXPECT_EQ(alloc32->get(0), 40);
1794 EXPECT_EQ(alloc32->get(31), 40); 1796 EXPECT_EQ(alloc32->get(31), 40);
1795 EXPECT_EQ(alloc64->get(0), 27); 1797 EXPECT_EQ(alloc64->get(0), 27);
1796 EXPECT_EQ(alloc64->get(63), 27); 1798 EXPECT_EQ(alloc64->get(63), 27);
1797 1799
1798 conservativelyCollectGarbage(); 1800 conservativelyCollectGarbage();
1799 1801
1800 EXPECT_EQ(alloc32->get(0), 40); 1802 EXPECT_EQ(alloc32->get(0), 40);
1801 EXPECT_EQ(alloc32->get(31), 40); 1803 EXPECT_EQ(alloc32->get(31), 40);
1802 EXPECT_EQ(alloc64->get(0), 27); 1804 EXPECT_EQ(alloc64->get(0), 27);
1803 EXPECT_EQ(alloc64->get(63), 27); 1805 EXPECT_EQ(alloc64->get(63), 27);
1804 } 1806 }
1805 1807
1806 clearOutOldGarbage(); 1808 clearOutOldGarbage();
1807 size_t total = 0; 1809 size_t total = 0;
1808 size_t slack = 0; 1810 size_t slack = 0;
1809 size_t baseLevel = Heap::objectPayloadSizeForTesting(); 1811 size_t baseLevel = heap.objectPayloadSizeForTesting();
1810 bool testPagesAllocated = !baseLevel; 1812 bool testPagesAllocated = !baseLevel;
1811 if (testPagesAllocated) 1813 if (testPagesAllocated)
1812 EXPECT_EQ(Heap::allocatedSpace(), 0ul); 1814 EXPECT_EQ(heap.heapStats().allocatedSpace(), 0ul);
1813 1815
1814 size_t big = 1008; 1816 size_t big = 1008;
1815 Persistent<DynamicallySizedObject> bigArea = DynamicallySizedObject::create( big); 1817 Persistent<DynamicallySizedObject> bigArea = DynamicallySizedObject::create( big);
1816 total += big; 1818 total += big;
1817 slack += 4; 1819 slack += 4;
1818 1820
1819 size_t persistentCount = 0; 1821 size_t persistentCount = 0;
1820 const size_t numPersistents = 100000; 1822 const size_t numPersistents = 100000;
1821 Persistent<DynamicallySizedObject>* persistents[numPersistents]; 1823 Persistent<DynamicallySizedObject>* persistents[numPersistents];
1822 1824
1823 for (int i = 0; i < 1000; i++) { 1825 for (int i = 0; i < 1000; i++) {
1824 size_t size = 128 + i * 8; 1826 size_t size = 128 + i * 8;
1825 total += size; 1827 total += size;
1826 persistents[persistentCount++] = new Persistent<DynamicallySizedObject>( DynamicallySizedObject::create(size)); 1828 persistents[persistentCount++] = new Persistent<DynamicallySizedObject>( DynamicallySizedObject::create(size));
1827 slack += 4; 1829 slack += 4;
1828 CheckWithSlack(baseLevel + total, Heap::objectPayloadSizeForTesting(), s lack); 1830 CheckWithSlack(baseLevel + total, heap.objectPayloadSizeForTesting(), sl ack);
1829 if (testPagesAllocated) 1831 if (testPagesAllocated)
1830 EXPECT_EQ(0ul, Heap::allocatedSpace() & (blinkPageSize - 1)); 1832 EXPECT_EQ(0ul, heap.heapStats().allocatedSpace() & (blinkPageSize - 1));
1831 } 1833 }
1832 1834
1833 { 1835 {
1834 DynamicallySizedObject* alloc32b(DynamicallySizedObject::create(32)); 1836 DynamicallySizedObject* alloc32b(DynamicallySizedObject::create(32));
1835 slack += 4; 1837 slack += 4;
1836 memset(alloc32b, 40, 32); 1838 memset(alloc32b, 40, 32);
1837 DynamicallySizedObject* alloc64b(DynamicallySizedObject::create(64)); 1839 DynamicallySizedObject* alloc64b(DynamicallySizedObject::create(64));
1838 slack += 4; 1840 slack += 4;
1839 memset(alloc64b, 27, 64); 1841 memset(alloc64b, 27, 64);
1840 EXPECT_TRUE(alloc32b != alloc64b); 1842 EXPECT_TRUE(alloc32b != alloc64b);
1841 1843
1842 total += 96; 1844 total += 96;
1843 CheckWithSlack(baseLevel + total, Heap::objectPayloadSizeForTesting(), s lack); 1845 CheckWithSlack(baseLevel + total, heap.objectPayloadSizeForTesting(), sl ack);
1844 if (testPagesAllocated) 1846 if (testPagesAllocated)
1845 EXPECT_EQ(0ul, Heap::allocatedSpace() & (blinkPageSize - 1)); 1847 EXPECT_EQ(0ul, heap.heapStats().allocatedSpace() & (blinkPageSize - 1));
1846 } 1848 }
1847 1849
1848 clearOutOldGarbage(); 1850 clearOutOldGarbage();
1849 total -= 96; 1851 total -= 96;
1850 slack -= 8; 1852 slack -= 8;
1851 if (testPagesAllocated) 1853 if (testPagesAllocated)
1852 EXPECT_EQ(0ul, Heap::allocatedSpace() & (blinkPageSize - 1)); 1854 EXPECT_EQ(0ul, heap.heapStats().allocatedSpace() & (blinkPageSize - 1));
1853 1855
1854 // Clear the persistent, so that the big area will be garbage collected. 1856 // Clear the persistent, so that the big area will be garbage collected.
1855 bigArea.release(); 1857 bigArea.release();
1856 clearOutOldGarbage(); 1858 clearOutOldGarbage();
1857 1859
1858 total -= big; 1860 total -= big;
1859 slack -= 4; 1861 slack -= 4;
1860 CheckWithSlack(baseLevel + total, Heap::objectPayloadSizeForTesting(), slack ); 1862 CheckWithSlack(baseLevel + total, heap.objectPayloadSizeForTesting(), slack) ;
1861 if (testPagesAllocated) 1863 if (testPagesAllocated)
1862 EXPECT_EQ(0ul, Heap::allocatedSpace() & (blinkPageSize - 1)); 1864 EXPECT_EQ(0ul, heap.heapStats().allocatedSpace() & (blinkPageSize - 1));
1863 1865
1864 CheckWithSlack(baseLevel + total, Heap::objectPayloadSizeForTesting(), slack ); 1866 CheckWithSlack(baseLevel + total, heap.objectPayloadSizeForTesting(), slack) ;
1865 if (testPagesAllocated) 1867 if (testPagesAllocated)
1866 EXPECT_EQ(0ul, Heap::allocatedSpace() & (blinkPageSize - 1)); 1868 EXPECT_EQ(0ul, heap.heapStats().allocatedSpace() & (blinkPageSize - 1));
1867 1869
1868 for (size_t i = 0; i < persistentCount; i++) { 1870 for (size_t i = 0; i < persistentCount; i++) {
1869 delete persistents[i]; 1871 delete persistents[i];
1870 persistents[i] = 0; 1872 persistents[i] = 0;
1871 } 1873 }
1872 1874
1873 uint8_t* address = reinterpret_cast<uint8_t*>(Heap::allocate<DynamicallySize dObject>(100)); 1875 uint8_t* address = reinterpret_cast<uint8_t*>(ThreadHeap::allocate<Dynamical lySizedObject>(100));
1874 for (int i = 0; i < 100; i++) 1876 for (int i = 0; i < 100; i++)
1875 address[i] = i; 1877 address[i] = i;
1876 address = reinterpret_cast<uint8_t*>(Heap::reallocate<DynamicallySizedObject >(address, 100000)); 1878 address = reinterpret_cast<uint8_t*>(ThreadHeap::reallocate<DynamicallySized Object>(address, 100000));
1877 for (int i = 0; i < 100; i++) 1879 for (int i = 0; i < 100; i++)
1878 EXPECT_EQ(address[i], i); 1880 EXPECT_EQ(address[i], i);
1879 address = reinterpret_cast<uint8_t*>(Heap::reallocate<DynamicallySizedObject >(address, 50)); 1881 address = reinterpret_cast<uint8_t*>(ThreadHeap::reallocate<DynamicallySized Object>(address, 50));
1880 for (int i = 0; i < 50; i++) 1882 for (int i = 0; i < 50; i++)
1881 EXPECT_EQ(address[i], i); 1883 EXPECT_EQ(address[i], i);
1882 // This should be equivalent to free(address). 1884 // This should be equivalent to free(address).
1883 EXPECT_EQ(reinterpret_cast<uintptr_t>(Heap::reallocate<DynamicallySizedObjec t>(address, 0)), 0ul); 1885 EXPECT_EQ(reinterpret_cast<uintptr_t>(ThreadHeap::reallocate<DynamicallySize dObject>(address, 0)), 0ul);
1884 // This should be equivalent to malloc(0). 1886 // This should be equivalent to malloc(0).
1885 EXPECT_EQ(reinterpret_cast<uintptr_t>(Heap::reallocate<DynamicallySizedObjec t>(0, 0)), 0ul); 1887 EXPECT_EQ(reinterpret_cast<uintptr_t>(ThreadHeap::reallocate<DynamicallySize dObject>(0, 0)), 0ul);
1886 } 1888 }
1887 1889
1888 TEST(HeapTest, SimpleAllocation) 1890 TEST(HeapTest, SimpleAllocation)
1889 { 1891 {
1892 ThreadHeap& heap = ThreadState::current()->heap();
1890 clearOutOldGarbage(); 1893 clearOutOldGarbage();
1891 EXPECT_EQ(0ul, Heap::objectPayloadSizeForTesting()); 1894 EXPECT_EQ(0ul, heap.objectPayloadSizeForTesting());
1892 1895
1893 // Allocate an object in the heap. 1896 // Allocate an object in the heap.
1894 HeapAllocatedArray* array = new HeapAllocatedArray(); 1897 HeapAllocatedArray* array = new HeapAllocatedArray();
1895 EXPECT_TRUE(Heap::objectPayloadSizeForTesting() >= sizeof(HeapAllocatedArray )); 1898 EXPECT_TRUE(heap.objectPayloadSizeForTesting() >= sizeof(HeapAllocatedArray) );
1896 1899
1897 // Sanity check of the contents in the heap. 1900 // Sanity check of the contents in the heap.
1898 EXPECT_EQ(0, array->at(0)); 1901 EXPECT_EQ(0, array->at(0));
1899 EXPECT_EQ(42, array->at(42)); 1902 EXPECT_EQ(42, array->at(42));
1900 EXPECT_EQ(0, array->at(128)); 1903 EXPECT_EQ(0, array->at(128));
1901 EXPECT_EQ(999 % 128, array->at(999)); 1904 EXPECT_EQ(999 % 128, array->at(999));
1902 } 1905 }
1903 1906
1904 TEST(HeapTest, SimplePersistent) 1907 TEST(HeapTest, SimplePersistent)
1905 { 1908 {
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
1962 #endif 1965 #endif
1963 1966
1964 TEST(HeapTest, LazySweepingPages) 1967 TEST(HeapTest, LazySweepingPages)
1965 { 1968 {
1966 clearOutOldGarbage(); 1969 clearOutOldGarbage();
1967 1970
1968 SimpleFinalizedObject::s_destructorCalls = 0; 1971 SimpleFinalizedObject::s_destructorCalls = 0;
1969 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); 1972 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls);
1970 for (int i = 0; i < 1000; i++) 1973 for (int i = 0; i < 1000; i++)
1971 SimpleFinalizedObject::create(); 1974 SimpleFinalizedObject::create();
1972 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithoutSweep , BlinkGC::ForcedGC); 1975 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithou tSweep, BlinkGC::ForcedGC);
1973 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); 1976 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls);
1974 for (int i = 0; i < 10000; i++) 1977 for (int i = 0; i < 10000; i++)
1975 SimpleFinalizedObject::create(); 1978 SimpleFinalizedObject::create();
1976 EXPECT_EQ(1000, SimpleFinalizedObject::s_destructorCalls); 1979 EXPECT_EQ(1000, SimpleFinalizedObject::s_destructorCalls);
1977 preciselyCollectGarbage(); 1980 preciselyCollectGarbage();
1978 EXPECT_EQ(11000, SimpleFinalizedObject::s_destructorCalls); 1981 EXPECT_EQ(11000, SimpleFinalizedObject::s_destructorCalls);
1979 } 1982 }
1980 1983
1981 TEST(HeapTest, LazySweepingLargeObjectPages) 1984 TEST(HeapTest, LazySweepingLargeObjectPages)
1982 { 1985 {
1983 clearOutOldGarbage(); 1986 clearOutOldGarbage();
1984 1987
1985 // Create free lists that can be reused for IntWrappers created in 1988 // Create free lists that can be reused for IntWrappers created in
1986 // LargeHeapObject::create(). 1989 // LargeHeapObject::create().
1987 Persistent<IntWrapper> p1 = new IntWrapper(1); 1990 Persistent<IntWrapper> p1 = new IntWrapper(1);
1988 for (int i = 0; i < 100; i++) { 1991 for (int i = 0; i < 100; i++) {
1989 new IntWrapper(i); 1992 new IntWrapper(i);
1990 } 1993 }
1991 Persistent<IntWrapper> p2 = new IntWrapper(2); 1994 Persistent<IntWrapper> p2 = new IntWrapper(2);
1992 preciselyCollectGarbage(); 1995 preciselyCollectGarbage();
1993 preciselyCollectGarbage(); 1996 preciselyCollectGarbage();
1994 1997
1995 LargeHeapObject::s_destructorCalls = 0; 1998 LargeHeapObject::s_destructorCalls = 0;
1996 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls); 1999 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls);
1997 for (int i = 0; i < 10; i++) 2000 for (int i = 0; i < 10; i++)
1998 LargeHeapObject::create(); 2001 LargeHeapObject::create();
1999 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithoutSweep , BlinkGC::ForcedGC); 2002 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithou tSweep, BlinkGC::ForcedGC);
2000 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls); 2003 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls);
2001 for (int i = 0; i < 10; i++) { 2004 for (int i = 0; i < 10; i++) {
2002 LargeHeapObject::create(); 2005 LargeHeapObject::create();
2003 EXPECT_EQ(i + 1, LargeHeapObject::s_destructorCalls); 2006 EXPECT_EQ(i + 1, LargeHeapObject::s_destructorCalls);
2004 } 2007 }
2005 LargeHeapObject::create(); 2008 LargeHeapObject::create();
2006 LargeHeapObject::create(); 2009 LargeHeapObject::create();
2007 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls); 2010 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls);
2008 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithoutSweep , BlinkGC::ForcedGC); 2011 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithou tSweep, BlinkGC::ForcedGC);
2009 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls); 2012 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls);
2010 preciselyCollectGarbage(); 2013 preciselyCollectGarbage();
2011 EXPECT_EQ(22, LargeHeapObject::s_destructorCalls); 2014 EXPECT_EQ(22, LargeHeapObject::s_destructorCalls);
2012 } 2015 }
2013 2016
2014 class SimpleFinalizedEagerObjectBase : public GarbageCollectedFinalized<SimpleFi nalizedEagerObjectBase> { 2017 class SimpleFinalizedEagerObjectBase : public GarbageCollectedFinalized<SimpleFi nalizedEagerObjectBase> {
2015 public: 2018 public:
2016 virtual ~SimpleFinalizedEagerObjectBase() { } 2019 virtual ~SimpleFinalizedEagerObjectBase() { }
2017 DEFINE_INLINE_TRACE() { } 2020 DEFINE_INLINE_TRACE() { }
2018 2021
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2073 SimpleFinalizedEagerObject::s_destructorCalls = 0; 2076 SimpleFinalizedEagerObject::s_destructorCalls = 0;
2074 SimpleFinalizedObjectInstanceOfTemplate::s_destructorCalls = 0; 2077 SimpleFinalizedObjectInstanceOfTemplate::s_destructorCalls = 0;
2075 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); 2078 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls);
2076 EXPECT_EQ(0, SimpleFinalizedEagerObject::s_destructorCalls); 2079 EXPECT_EQ(0, SimpleFinalizedEagerObject::s_destructorCalls);
2077 for (int i = 0; i < 1000; i++) 2080 for (int i = 0; i < 1000; i++)
2078 SimpleFinalizedObject::create(); 2081 SimpleFinalizedObject::create();
2079 for (int i = 0; i < 100; i++) 2082 for (int i = 0; i < 100; i++)
2080 SimpleFinalizedEagerObject::create(); 2083 SimpleFinalizedEagerObject::create();
2081 for (int i = 0; i < 100; i++) 2084 for (int i = 0; i < 100; i++)
2082 SimpleFinalizedObjectInstanceOfTemplate::create(); 2085 SimpleFinalizedObjectInstanceOfTemplate::create();
2083 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithoutSweep , BlinkGC::ForcedGC); 2086 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithou tSweep, BlinkGC::ForcedGC);
2084 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls); 2087 EXPECT_EQ(0, SimpleFinalizedObject::s_destructorCalls);
2085 EXPECT_EQ(100, SimpleFinalizedEagerObject::s_destructorCalls); 2088 EXPECT_EQ(100, SimpleFinalizedEagerObject::s_destructorCalls);
2086 EXPECT_EQ(100, SimpleFinalizedObjectInstanceOfTemplate::s_destructorCalls); 2089 EXPECT_EQ(100, SimpleFinalizedObjectInstanceOfTemplate::s_destructorCalls);
2087 } 2090 }
2088 2091
2089 TEST(HeapTest, Finalization) 2092 TEST(HeapTest, Finalization)
2090 { 2093 {
2091 { 2094 {
2092 HeapTestSubClass* t1 = HeapTestSubClass::create(); 2095 HeapTestSubClass* t1 = HeapTestSubClass::create();
2093 HeapTestSubClass* t2 = HeapTestSubClass::create(); 2096 HeapTestSubClass* t2 = HeapTestSubClass::create();
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
2213 // for the conservative stack scan to find. 2216 // for the conservative stack scan to find.
2214 EXPECT_EQ(width, bars->getWidth()); 2217 EXPECT_EQ(width, bars->getWidth());
2215 } 2218 }
2216 EXPECT_EQ(Bars::width + 1, Bar::s_live); 2219 EXPECT_EQ(Bars::width + 1, Bar::s_live);
2217 preciselyCollectGarbage(); 2220 preciselyCollectGarbage();
2218 EXPECT_EQ(0u, Bar::s_live); 2221 EXPECT_EQ(0u, Bar::s_live);
2219 } 2222 }
2220 2223
2221 TEST(HeapTest, HashMapOfMembers) 2224 TEST(HeapTest, HashMapOfMembers)
2222 { 2225 {
2226 ThreadHeap& heap = ThreadState::current()->heap();
2223 IntWrapper::s_destructorCalls = 0; 2227 IntWrapper::s_destructorCalls = 0;
2224 2228
2225 clearOutOldGarbage(); 2229 clearOutOldGarbage();
2226 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); 2230 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting();
2227 { 2231 {
2228 typedef HeapHashMap< 2232 typedef HeapHashMap<
2229 Member<IntWrapper>, 2233 Member<IntWrapper>,
2230 Member<IntWrapper>, 2234 Member<IntWrapper>,
2231 DefaultHash<Member<IntWrapper>>::Hash, 2235 DefaultHash<Member<IntWrapper>>::Hash,
2232 HashTraits<Member<IntWrapper>>, 2236 HashTraits<Member<IntWrapper>>,
2233 HashTraits<Member<IntWrapper>>> HeapObjectIdentityMap; 2237 HashTraits<Member<IntWrapper>>> HeapObjectIdentityMap;
2234 2238
2235 Persistent<HeapObjectIdentityMap> map = new HeapObjectIdentityMap(); 2239 Persistent<HeapObjectIdentityMap> map = new HeapObjectIdentityMap();
2236 2240
2237 map->clear(); 2241 map->clear();
2238 size_t afterSetWasCreated = Heap::objectPayloadSizeForTesting(); 2242 size_t afterSetWasCreated = heap.objectPayloadSizeForTesting();
2239 EXPECT_TRUE(afterSetWasCreated > initialObjectPayloadSize); 2243 EXPECT_TRUE(afterSetWasCreated > initialObjectPayloadSize);
2240 2244
2241 preciselyCollectGarbage(); 2245 preciselyCollectGarbage();
2242 size_t afterGC = Heap::objectPayloadSizeForTesting(); 2246 size_t afterGC = heap.objectPayloadSizeForTesting();
2243 EXPECT_EQ(afterGC, afterSetWasCreated); 2247 EXPECT_EQ(afterGC, afterSetWasCreated);
2244 2248
2245 // If the additions below cause garbage collections, these 2249 // If the additions below cause garbage collections, these
2246 // pointers should be found by conservative stack scanning. 2250 // pointers should be found by conservative stack scanning.
2247 IntWrapper* one(IntWrapper::create(1)); 2251 IntWrapper* one(IntWrapper::create(1));
2248 IntWrapper* anotherOne(IntWrapper::create(1)); 2252 IntWrapper* anotherOne(IntWrapper::create(1));
2249 2253
2250 map->add(one, one); 2254 map->add(one, one);
2251 2255
2252 size_t afterOneAdd = Heap::objectPayloadSizeForTesting(); 2256 size_t afterOneAdd = heap.objectPayloadSizeForTesting();
2253 EXPECT_TRUE(afterOneAdd > afterGC); 2257 EXPECT_TRUE(afterOneAdd > afterGC);
2254 2258
2255 HeapObjectIdentityMap::iterator it(map->begin()); 2259 HeapObjectIdentityMap::iterator it(map->begin());
2256 HeapObjectIdentityMap::iterator it2(map->begin()); 2260 HeapObjectIdentityMap::iterator it2(map->begin());
2257 ++it; 2261 ++it;
2258 ++it2; 2262 ++it2;
2259 2263
2260 map->add(anotherOne, one); 2264 map->add(anotherOne, one);
2261 2265
2262 // The addition above can cause an allocation of a new 2266 // The addition above can cause an allocation of a new
2263 // backing store. We therefore garbage collect before 2267 // backing store. We therefore garbage collect before
2264 // taking the heap stats in order to get rid of the old 2268 // taking the heap stats in order to get rid of the old
2265 // backing store. We make sure to not use conservative 2269 // backing store. We make sure to not use conservative
2266 // stack scanning as that could find a pointer to the 2270 // stack scanning as that could find a pointer to the
2267 // old backing. 2271 // old backing.
2268 preciselyCollectGarbage(); 2272 preciselyCollectGarbage();
2269 size_t afterAddAndGC = Heap::objectPayloadSizeForTesting(); 2273 size_t afterAddAndGC = heap.objectPayloadSizeForTesting();
2270 EXPECT_TRUE(afterAddAndGC >= afterOneAdd); 2274 EXPECT_TRUE(afterAddAndGC >= afterOneAdd);
2271 2275
2272 EXPECT_EQ(map->size(), 2u); // Two different wrappings of '1' are distin ct. 2276 EXPECT_EQ(map->size(), 2u); // Two different wrappings of '1' are distin ct.
2273 2277
2274 preciselyCollectGarbage(); 2278 preciselyCollectGarbage();
2275 EXPECT_TRUE(map->contains(one)); 2279 EXPECT_TRUE(map->contains(one));
2276 EXPECT_TRUE(map->contains(anotherOne)); 2280 EXPECT_TRUE(map->contains(anotherOne));
2277 2281
2278 IntWrapper* gotten(map->get(one)); 2282 IntWrapper* gotten(map->get(one));
2279 EXPECT_EQ(gotten->value(), one->value()); 2283 EXPECT_EQ(gotten->value(), one->value());
2280 EXPECT_EQ(gotten, one); 2284 EXPECT_EQ(gotten, one);
2281 2285
2282 size_t afterGC2 = Heap::objectPayloadSizeForTesting(); 2286 size_t afterGC2 = heap.objectPayloadSizeForTesting();
2283 EXPECT_EQ(afterGC2, afterAddAndGC); 2287 EXPECT_EQ(afterGC2, afterAddAndGC);
2284 2288
2285 IntWrapper* dozen = 0; 2289 IntWrapper* dozen = 0;
2286 2290
2287 for (int i = 1; i < 1000; i++) { // 999 iterations. 2291 for (int i = 1; i < 1000; i++) { // 999 iterations.
2288 IntWrapper* iWrapper(IntWrapper::create(i)); 2292 IntWrapper* iWrapper(IntWrapper::create(i));
2289 IntWrapper* iSquared(IntWrapper::create(i * i)); 2293 IntWrapper* iSquared(IntWrapper::create(i * i));
2290 map->add(iWrapper, iSquared); 2294 map->add(iWrapper, iSquared);
2291 if (i == 12) 2295 if (i == 12)
2292 dozen = iWrapper; 2296 dozen = iWrapper;
2293 } 2297 }
2294 size_t afterAdding1000 = Heap::objectPayloadSizeForTesting(); 2298 size_t afterAdding1000 = heap.objectPayloadSizeForTesting();
2295 EXPECT_TRUE(afterAdding1000 > afterGC2); 2299 EXPECT_TRUE(afterAdding1000 > afterGC2);
2296 2300
2297 IntWrapper* gross(map->get(dozen)); 2301 IntWrapper* gross(map->get(dozen));
2298 EXPECT_EQ(gross->value(), 144); 2302 EXPECT_EQ(gross->value(), 144);
2299 2303
2300 // This should clear out any junk backings created by all the adds. 2304 // This should clear out any junk backings created by all the adds.
2301 preciselyCollectGarbage(); 2305 preciselyCollectGarbage();
2302 size_t afterGC3 = Heap::objectPayloadSizeForTesting(); 2306 size_t afterGC3 = heap.objectPayloadSizeForTesting();
2303 EXPECT_TRUE(afterGC3 <= afterAdding1000); 2307 EXPECT_TRUE(afterGC3 <= afterAdding1000);
2304 } 2308 }
2305 2309
2306 preciselyCollectGarbage(); 2310 preciselyCollectGarbage();
2307 // The objects 'one', anotherOne, and the 999 other pairs. 2311 // The objects 'one', anotherOne, and the 999 other pairs.
2308 EXPECT_EQ(IntWrapper::s_destructorCalls, 2000); 2312 EXPECT_EQ(IntWrapper::s_destructorCalls, 2000);
2309 size_t afterGC4 = Heap::objectPayloadSizeForTesting(); 2313 size_t afterGC4 = heap.objectPayloadSizeForTesting();
2310 EXPECT_EQ(afterGC4, initialObjectPayloadSize); 2314 EXPECT_EQ(afterGC4, initialObjectPayloadSize);
2311 } 2315 }
2312 2316
2313 TEST(HeapTest, NestedAllocation) 2317 TEST(HeapTest, NestedAllocation)
2314 { 2318 {
2319 ThreadHeap& heap = ThreadState::current()->heap();
2315 clearOutOldGarbage(); 2320 clearOutOldGarbage();
2316 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); 2321 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting();
2317 { 2322 {
2318 Persistent<ConstructorAllocation> constructorAllocation = ConstructorAll ocation::create(); 2323 Persistent<ConstructorAllocation> constructorAllocation = ConstructorAll ocation::create();
2319 } 2324 }
2320 clearOutOldGarbage(); 2325 clearOutOldGarbage();
2321 size_t afterFree = Heap::objectPayloadSizeForTesting(); 2326 size_t afterFree = heap.objectPayloadSizeForTesting();
2322 EXPECT_TRUE(initialObjectPayloadSize == afterFree); 2327 EXPECT_TRUE(initialObjectPayloadSize == afterFree);
2323 } 2328 }
2324 2329
2325 TEST(HeapTest, LargeHeapObjects) 2330 TEST(HeapTest, LargeHeapObjects)
2326 { 2331 {
2332 ThreadHeap& heap = ThreadState::current()->heap();
2327 clearOutOldGarbage(); 2333 clearOutOldGarbage();
2328 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); 2334 size_t initialObjectPayloadSize = heap.objectPayloadSizeForTesting();
2329 size_t initialAllocatedSpace = Heap::allocatedSpace(); 2335 size_t initialAllocatedSpace = heap.heapStats().allocatedSpace();
2330 IntWrapper::s_destructorCalls = 0; 2336 IntWrapper::s_destructorCalls = 0;
2331 LargeHeapObject::s_destructorCalls = 0; 2337 LargeHeapObject::s_destructorCalls = 0;
2332 { 2338 {
2333 int slack = 8; // LargeHeapObject points to an IntWrapper that is also a llocated. 2339 int slack = 8; // LargeHeapObject points to an IntWrapper that is also a llocated.
2334 Persistent<LargeHeapObject> object = LargeHeapObject::create(); 2340 Persistent<LargeHeapObject> object = LargeHeapObject::create();
2335 ASSERT(ThreadState::current()->findPageFromAddress(object)); 2341 ASSERT(ThreadState::current()->findPageFromAddress(object));
2336 ASSERT(ThreadState::current()->findPageFromAddress(reinterpret_cast<char *>(object.get()) + sizeof(LargeHeapObject) - 1)); 2342 ASSERT(ThreadState::current()->findPageFromAddress(reinterpret_cast<char *>(object.get()) + sizeof(LargeHeapObject) - 1));
2337 clearOutOldGarbage(); 2343 clearOutOldGarbage();
2338 size_t afterAllocation = Heap::allocatedSpace(); 2344 size_t afterAllocation = heap.heapStats().allocatedSpace();
2339 { 2345 {
2340 object->set(0, 'a'); 2346 object->set(0, 'a');
2341 EXPECT_EQ('a', object->get(0)); 2347 EXPECT_EQ('a', object->get(0));
2342 object->set(object->length() - 1, 'b'); 2348 object->set(object->length() - 1, 'b');
2343 EXPECT_EQ('b', object->get(object->length() - 1)); 2349 EXPECT_EQ('b', object->get(object->length() - 1));
2344 size_t expectedLargeHeapObjectPayloadSize = Heap::allocationSizeFrom Size(sizeof(LargeHeapObject)); 2350 size_t expectedLargeHeapObjectPayloadSize = ThreadHeap::allocationSi zeFromSize(sizeof(LargeHeapObject));
2345 size_t expectedObjectPayloadSize = expectedLargeHeapObjectPayloadSiz e + sizeof(IntWrapper); 2351 size_t expectedObjectPayloadSize = expectedLargeHeapObjectPayloadSiz e + sizeof(IntWrapper);
2346 size_t actualObjectPayloadSize = Heap::objectPayloadSizeForTesting() - initialObjectPayloadSize; 2352 size_t actualObjectPayloadSize = heap.objectPayloadSizeForTesting() - initialObjectPayloadSize;
2347 CheckWithSlack(expectedObjectPayloadSize, actualObjectPayloadSize, s lack); 2353 CheckWithSlack(expectedObjectPayloadSize, actualObjectPayloadSize, s lack);
2348 // There is probably space for the IntWrapper in a heap page without 2354 // There is probably space for the IntWrapper in a heap page without
2349 // allocating extra pages. However, the IntWrapper allocation might cause 2355 // allocating extra pages. However, the IntWrapper allocation might cause
2350 // the addition of a heap page. 2356 // the addition of a heap page.
2351 size_t largeObjectAllocationSize = sizeof(LargeObjectPage) + expecte dLargeHeapObjectPayloadSize; 2357 size_t largeObjectAllocationSize = sizeof(LargeObjectPage) + expecte dLargeHeapObjectPayloadSize;
2352 size_t allocatedSpaceLowerBound = initialAllocatedSpace + largeObjec tAllocationSize; 2358 size_t allocatedSpaceLowerBound = initialAllocatedSpace + largeObjec tAllocationSize;
2353 size_t allocatedSpaceUpperBound = allocatedSpaceLowerBound + slack + blinkPageSize; 2359 size_t allocatedSpaceUpperBound = allocatedSpaceLowerBound + slack + blinkPageSize;
2354 EXPECT_LE(allocatedSpaceLowerBound, afterAllocation); 2360 EXPECT_LE(allocatedSpaceLowerBound, afterAllocation);
2355 EXPECT_LE(afterAllocation, allocatedSpaceUpperBound); 2361 EXPECT_LE(afterAllocation, allocatedSpaceUpperBound);
2356 EXPECT_EQ(0, IntWrapper::s_destructorCalls); 2362 EXPECT_EQ(0, IntWrapper::s_destructorCalls);
2357 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls); 2363 EXPECT_EQ(0, LargeHeapObject::s_destructorCalls);
2358 for (int i = 0; i < 10; i++) 2364 for (int i = 0; i < 10; i++)
2359 object = LargeHeapObject::create(); 2365 object = LargeHeapObject::create();
2360 } 2366 }
2361 clearOutOldGarbage(); 2367 clearOutOldGarbage();
2362 EXPECT_TRUE(Heap::allocatedSpace() == afterAllocation); 2368 EXPECT_TRUE(ProcessHeap::totalAllocatedSpace() == afterAllocation);
2363 EXPECT_EQ(10, IntWrapper::s_destructorCalls); 2369 EXPECT_EQ(10, IntWrapper::s_destructorCalls);
2364 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls); 2370 EXPECT_EQ(10, LargeHeapObject::s_destructorCalls);
2365 } 2371 }
2366 clearOutOldGarbage(); 2372 clearOutOldGarbage();
2367 EXPECT_TRUE(initialObjectPayloadSize == Heap::objectPayloadSizeForTesting()) ; 2373 EXPECT_TRUE(initialObjectPayloadSize == heap.objectPayloadSizeForTesting());
2368 EXPECT_TRUE(initialAllocatedSpace == Heap::allocatedSpace()); 2374 EXPECT_TRUE(initialAllocatedSpace == heap.heapStats().allocatedSpace());
2369 EXPECT_EQ(11, IntWrapper::s_destructorCalls); 2375 EXPECT_EQ(11, IntWrapper::s_destructorCalls);
2370 EXPECT_EQ(11, LargeHeapObject::s_destructorCalls); 2376 EXPECT_EQ(11, LargeHeapObject::s_destructorCalls);
2371 preciselyCollectGarbage(); 2377 preciselyCollectGarbage();
2372 } 2378 }
2373 2379
2374 typedef std::pair<Member<IntWrapper>, int> PairWrappedUnwrapped; 2380 typedef std::pair<Member<IntWrapper>, int> PairWrappedUnwrapped;
2375 typedef std::pair<int, Member<IntWrapper>> PairUnwrappedWrapped; 2381 typedef std::pair<int, Member<IntWrapper>> PairUnwrappedWrapped;
2376 typedef std::pair<WeakMember<IntWrapper>, Member<IntWrapper>> PairWeakStrong; 2382 typedef std::pair<WeakMember<IntWrapper>, Member<IntWrapper>> PairWeakStrong;
2377 typedef std::pair<Member<IntWrapper>, WeakMember<IntWrapper>> PairStrongWeak; 2383 typedef std::pair<Member<IntWrapper>, WeakMember<IntWrapper>> PairStrongWeak;
2378 typedef std::pair<WeakMember<IntWrapper>, int> PairWeakUnwrapped; 2384 typedef std::pair<WeakMember<IntWrapper>, int> PairWeakUnwrapped;
(...skipping 1380 matching lines...) Expand 10 before | Expand all | Expand 10 after
3759 { 3765 {
3760 Persistent<Bar> barPersistent = Bar::create(); 3766 Persistent<Bar> barPersistent = Bar::create();
3761 Persistent<Foo> fooPersistent = Foo::create(barPersistent); 3767 Persistent<Foo> fooPersistent = Foo::create(barPersistent);
3762 EXPECT_TRUE(barPersistent != fooPersistent); 3768 EXPECT_TRUE(barPersistent != fooPersistent);
3763 barPersistent = fooPersistent; 3769 barPersistent = fooPersistent;
3764 EXPECT_TRUE(barPersistent == fooPersistent); 3770 EXPECT_TRUE(barPersistent == fooPersistent);
3765 } 3771 }
3766 3772
3767 TEST(HeapTest, CheckAndMarkPointer) 3773 TEST(HeapTest, CheckAndMarkPointer)
3768 { 3774 {
3775 ThreadHeap& heap = ThreadState::current()->heap();
3769 clearOutOldGarbage(); 3776 clearOutOldGarbage();
3770 3777
3771 Vector<Address> objectAddresses; 3778 Vector<Address> objectAddresses;
3772 Vector<Address> endAddresses; 3779 Vector<Address> endAddresses;
3773 Address largeObjectAddress; 3780 Address largeObjectAddress;
3774 Address largeObjectEndAddress; 3781 Address largeObjectEndAddress;
3775 for (int i = 0; i < 10; i++) { 3782 for (int i = 0; i < 10; i++) {
3776 SimpleObject* object = SimpleObject::create(); 3783 SimpleObject* object = SimpleObject::create();
3777 Address objectAddress = reinterpret_cast<Address>(object); 3784 Address objectAddress = reinterpret_cast<Address>(object);
3778 objectAddresses.append(objectAddress); 3785 objectAddresses.append(objectAddress);
3779 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1); 3786 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1);
3780 } 3787 }
3781 LargeHeapObject* largeObject = LargeHeapObject::create(); 3788 LargeHeapObject* largeObject = LargeHeapObject::create();
3782 largeObjectAddress = reinterpret_cast<Address>(largeObject); 3789 largeObjectAddress = reinterpret_cast<Address>(largeObject);
3783 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1; 3790 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1;
3784 3791
3785 // This is a low-level test where we call checkAndMarkPointer. This method 3792 // This is a low-level test where we call checkAndMarkPointer. This method
3786 // causes the object start bitmap to be computed which requires the heap 3793 // causes the object start bitmap to be computed which requires the heap
3787 // to be in a consistent state (e.g. the free allocation area must be put 3794 // to be in a consistent state (e.g. the free allocation area must be put
3788 // into a free list header). However when we call makeConsistentForGC it 3795 // into a free list header). However when we call makeConsistentForGC it
3789 // also clears out the freelists so we have to rebuild those before trying 3796 // also clears out the freelists so we have to rebuild those before trying
3790 // to allocate anything again. We do this by forcing a GC after doing the 3797 // to allocate anything again. We do this by forcing a GC after doing the
3791 // checkAndMarkPointer tests. 3798 // checkAndMarkPointer tests.
3792 { 3799 {
3793 TestGCScope scope(BlinkGC::HeapPointersOnStack); 3800 TestGCScope scope(BlinkGC::HeapPointersOnStack);
3794 CountingVisitor visitor(ThreadState::current()); 3801 CountingVisitor visitor(ThreadState::current());
3795 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not park all threads. 3802 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not park all threads.
3796 Heap::flushHeapDoesNotContainCache(); 3803 heap.flushHeapDoesNotContainCache();
3797 for (size_t i = 0; i < objectAddresses.size(); i++) { 3804 for (size_t i = 0; i < objectAddresses.size(); i++) {
3798 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, objectAddresses[i])) ; 3805 EXPECT_TRUE(heap.checkAndMarkPointer(&visitor, objectAddresses[i]));
3799 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, endAddresses[i])); 3806 EXPECT_TRUE(heap.checkAndMarkPointer(&visitor, endAddresses[i]));
3800 } 3807 }
3801 EXPECT_EQ(objectAddresses.size() * 2, visitor.count()); 3808 EXPECT_EQ(objectAddresses.size() * 2, visitor.count());
3802 visitor.reset(); 3809 visitor.reset();
3803 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, largeObjectAddress)); 3810 EXPECT_TRUE(heap.checkAndMarkPointer(&visitor, largeObjectAddress));
3804 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, largeObjectEndAddress)); 3811 EXPECT_TRUE(heap.checkAndMarkPointer(&visitor, largeObjectEndAddress));
3805 EXPECT_EQ(2ul, visitor.count()); 3812 EXPECT_EQ(2ul, visitor.count());
3806 visitor.reset(); 3813 visitor.reset();
3807 } 3814 }
3808 // This forces a GC without stack scanning which results in the objects 3815 // This forces a GC without stack scanning which results in the objects
3809 // being collected. This will also rebuild the above mentioned freelists, 3816 // being collected. This will also rebuild the above mentioned freelists,
3810 // however we don't rely on that below since we don't have any allocations. 3817 // however we don't rely on that below since we don't have any allocations.
3811 clearOutOldGarbage(); 3818 clearOutOldGarbage();
3812 { 3819 {
3813 TestGCScope scope(BlinkGC::HeapPointersOnStack); 3820 TestGCScope scope(BlinkGC::HeapPointersOnStack);
3814 CountingVisitor visitor(ThreadState::current()); 3821 CountingVisitor visitor(ThreadState::current());
3815 EXPECT_TRUE(scope.allThreadsParked()); 3822 EXPECT_TRUE(scope.allThreadsParked());
3816 Heap::flushHeapDoesNotContainCache(); 3823 heap.flushHeapDoesNotContainCache();
3817 for (size_t i = 0; i < objectAddresses.size(); i++) { 3824 for (size_t i = 0; i < objectAddresses.size(); i++) {
3818 // We would like to assert that checkAndMarkPointer returned false 3825 // We would like to assert that checkAndMarkPointer returned false
3819 // here because the pointers no longer point into a valid object 3826 // here because the pointers no longer point into a valid object
3820 // (it's been freed by the GCs. But checkAndMarkPointer will return 3827 // (it's been freed by the GCs. But checkAndMarkPointer will return
3821 // true for any pointer that points into a heap page, regardless of 3828 // true for any pointer that points into a heap page, regardless of
3822 // whether it points at a valid object (this ensures the 3829 // whether it points at a valid object (this ensures the
3823 // correctness of the page-based on-heap address caches), so we 3830 // correctness of the page-based on-heap address caches), so we
3824 // can't make that assert. 3831 // can't make that assert.
3825 Heap::checkAndMarkPointer(&visitor, objectAddresses[i]); 3832 heap.checkAndMarkPointer(&visitor, objectAddresses[i]);
3826 Heap::checkAndMarkPointer(&visitor, endAddresses[i]); 3833 heap.checkAndMarkPointer(&visitor, endAddresses[i]);
3827 } 3834 }
3828 EXPECT_EQ(0ul, visitor.count()); 3835 EXPECT_EQ(0ul, visitor.count());
3829 Heap::checkAndMarkPointer(&visitor, largeObjectAddress); 3836 heap.checkAndMarkPointer(&visitor, largeObjectAddress);
3830 Heap::checkAndMarkPointer(&visitor, largeObjectEndAddress); 3837 heap.checkAndMarkPointer(&visitor, largeObjectEndAddress);
3831 EXPECT_EQ(0ul, visitor.count()); 3838 EXPECT_EQ(0ul, visitor.count());
3832 } 3839 }
3833 // This round of GC is important to make sure that the object start 3840 // This round of GC is important to make sure that the object start
3834 // bitmap are cleared out and that the free lists are rebuild. 3841 // bitmap are cleared out and that the free lists are rebuild.
3835 clearOutOldGarbage(); 3842 clearOutOldGarbage();
3836 } 3843 }
3837 3844
3838 TEST(HeapTest, PersistentHeapCollectionTypes) 3845 TEST(HeapTest, PersistentHeapCollectionTypes)
3839 { 3846 {
3840 IntWrapper::s_destructorCalls = 0; 3847 IntWrapper::s_destructorCalls = 0;
(...skipping 900 matching lines...) Expand 10 before | Expand all | Expand 10 after
4741 static void sleeperMainFunc() 4748 static void sleeperMainFunc()
4742 { 4749 {
4743 ThreadState::attach(); 4750 ThreadState::attach();
4744 s_sleeperRunning = true; 4751 s_sleeperRunning = true;
4745 4752
4746 // Simulate a long running op that is not entering a safepoint. 4753 // Simulate a long running op that is not entering a safepoint.
4747 while (!s_sleeperDone) { 4754 while (!s_sleeperDone) {
4748 testing::yieldCurrentThread(); 4755 testing::yieldCurrentThread();
4749 } 4756 }
4750 4757
4751 ThreadState::detach(); 4758 ThreadState::detachCurrentThread();
4752 s_sleeperRunning = false; 4759 s_sleeperRunning = false;
4753 } 4760 }
4754 4761
4755 static volatile bool s_sleeperRunning; 4762 static volatile bool s_sleeperRunning;
4756 static volatile bool s_sleeperDone; 4763 static volatile bool s_sleeperDone;
4757 }; 4764 };
4758 4765
4759 volatile bool GCParkingThreadTester::s_sleeperRunning = false; 4766 volatile bool GCParkingThreadTester::s_sleeperRunning = false;
4760 volatile bool GCParkingThreadTester::s_sleeperDone = false; 4767 volatile bool GCParkingThreadTester::s_sleeperDone = false;
4761 4768
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
5019 5026
5020 // These special traits will remove a set from a map when the set is empty. 5027 // These special traits will remove a set from a map when the set is empty.
5021 struct EmptyClearingHashSetTraits : HashTraits<WeakSet> { 5028 struct EmptyClearingHashSetTraits : HashTraits<WeakSet> {
5022 static const WTF::WeakHandlingFlag weakHandlingFlag = WTF::WeakHandlingInCol lections; 5029 static const WTF::WeakHandlingFlag weakHandlingFlag = WTF::WeakHandlingInCol lections;
5023 template<typename VisitorDispatcher> 5030 template<typename VisitorDispatcher>
5024 static bool traceInCollection(VisitorDispatcher visitor, WeakSet& set, WTF:: ShouldWeakPointersBeMarkedStrongly strongify) 5031 static bool traceInCollection(VisitorDispatcher visitor, WeakSet& set, WTF:: ShouldWeakPointersBeMarkedStrongly strongify)
5025 { 5032 {
5026 bool liveEntriesFound = false; 5033 bool liveEntriesFound = false;
5027 WeakSet::iterator end = set.end(); 5034 WeakSet::iterator end = set.end();
5028 for (WeakSet::iterator it = set.begin(); it != end; ++it) { 5035 for (WeakSet::iterator it = set.begin(); it != end; ++it) {
5029 if (Heap::isHeapObjectAlive(*it)) { 5036 if (ThreadHeap::isHeapObjectAlive(*it)) {
5030 liveEntriesFound = true; 5037 liveEntriesFound = true;
5031 break; 5038 break;
5032 } 5039 }
5033 } 5040 }
5034 // If there are live entries in the set then the set cannot be removed 5041 // If there are live entries in the set then the set cannot be removed
5035 // from the map it is contained in, and we need to mark it (and its 5042 // from the map it is contained in, and we need to mark it (and its
5036 // backing) live. We just trace normally, which will invoke the normal 5043 // backing) live. We just trace normally, which will invoke the normal
5037 // weak handling for any entries that are not live. 5044 // weak handling for any entries that are not live.
5038 if (liveEntriesFound) 5045 if (liveEntriesFound)
5039 set.trace(visitor); 5046 set.trace(visitor);
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after
5452 5459
5453 // Wake up the main thread when done sweeping. 5460 // Wake up the main thread when done sweeping.
5454 wakeMainThread(); 5461 wakeMainThread();
5455 5462
5456 // Wait with detach until the main thread says so. This is not strictly 5463 // Wait with detach until the main thread says so. This is not strictly
5457 // necessary, but it means the worker thread will not do its thread loca l 5464 // necessary, but it means the worker thread will not do its thread loca l
5458 // GCs just yet, making it easier to reason about that no new GC has occ urred 5465 // GCs just yet, making it easier to reason about that no new GC has occ urred
5459 // and the above sweep was the one finalizing the worker object. 5466 // and the above sweep was the one finalizing the worker object.
5460 parkWorkerThread(); 5467 parkWorkerThread();
5461 5468
5462 ThreadState::detach(); 5469 ThreadState::detachCurrentThread();
5463 } 5470 }
5464 5471
5465 static volatile uintptr_t s_workerObjectPointer; 5472 static volatile uintptr_t s_workerObjectPointer;
5466 }; 5473 };
5467 5474
5468 volatile uintptr_t DeadBitTester::s_workerObjectPointer = 0; 5475 volatile uintptr_t DeadBitTester::s_workerObjectPointer = 0;
5469 5476
5470 TEST(HeapTest, ObjectDeadBit) 5477 TEST(HeapTest, ObjectDeadBit)
5471 { 5478 {
5472 DeadBitTester::test(); 5479 DeadBitTester::test();
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
5565 EXPECT_EQ(32, it->value->value()); 5572 EXPECT_EQ(32, it->value->value());
5566 } 5573 }
5567 5574
5568 // Disregarding the iterator but keeping the collection alive 5575 // Disregarding the iterator but keeping the collection alive
5569 // with a persistent should lead to weak processing. 5576 // with a persistent should lead to weak processing.
5570 preciselyCollectGarbage(); 5577 preciselyCollectGarbage();
5571 EXPECT_EQ(0u, collection->size()); 5578 EXPECT_EQ(0u, collection->size());
5572 } 5579 }
5573 5580
5574 wakeMainThread(); 5581 wakeMainThread();
5575 ThreadState::detach(); 5582 ThreadState::detachCurrentThread();
5576 } 5583 }
5577 5584
5578 static volatile uintptr_t s_workerObjectPointer; 5585 static volatile uintptr_t s_workerObjectPointer;
5579 }; 5586 };
5580 5587
5581 TEST(HeapTest, ThreadedStrongification) 5588 TEST(HeapTest, ThreadedStrongification)
5582 { 5589 {
5583 ThreadedStrongificationTester::test(); 5590 ThreadedStrongificationTester::test();
5584 } 5591 }
5585 5592
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
5733 parkWorkerThread(); 5740 parkWorkerThread();
5734 SafePointAwareMutexLocker recursiveLocker(recursiveMutex(), BlinkGC::NoH eapPointersOnStack); 5741 SafePointAwareMutexLocker recursiveLocker(recursiveMutex(), BlinkGC::NoH eapPointersOnStack);
5735 5742
5736 // We won't get here unless the lock is recursive since the sweep done 5743 // We won't get here unless the lock is recursive since the sweep done
5737 // in the constructor of SafePointAwareMutexLocker after 5744 // in the constructor of SafePointAwareMutexLocker after
5738 // getting the lock will not complete given the "dlo" destructor is 5745 // getting the lock will not complete given the "dlo" destructor is
5739 // waiting to get the same lock. 5746 // waiting to get the same lock.
5740 // Tell the main thread the worker has done its sweep. 5747 // Tell the main thread the worker has done its sweep.
5741 wakeMainThread(); 5748 wakeMainThread();
5742 5749
5743 ThreadState::detach(); 5750 ThreadState::detachCurrentThread();
5744 } 5751 }
5745 5752
5746 static volatile IntWrapper* s_workerObjectPointer; 5753 static volatile IntWrapper* s_workerObjectPointer;
5747 }; 5754 };
5748 5755
5749 TEST(HeapTest, RecursiveMutex) 5756 TEST(HeapTest, RecursiveMutex)
5750 { 5757 {
5751 RecursiveLockingTester::test(); 5758 RecursiveLockingTester::test();
5752 } 5759 }
5753 5760
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
5947 { 5954 {
5948 } 5955 }
5949 }; 5956 };
5950 5957
5951 // Regression test for crbug.com/404511. Tests conservative marking of 5958 // Regression test for crbug.com/404511. Tests conservative marking of
5952 // an object with an uninitialized vtable. 5959 // an object with an uninitialized vtable.
5953 TEST(HeapTest, AllocationInSuperConstructorArgument) 5960 TEST(HeapTest, AllocationInSuperConstructorArgument)
5954 { 5961 {
5955 AllocInSuperConstructorArgument* object = new AllocInSuperConstructorArgumen t(); 5962 AllocInSuperConstructorArgument* object = new AllocInSuperConstructorArgumen t();
5956 EXPECT_TRUE(object); 5963 EXPECT_TRUE(object);
5957 Heap::collectAllGarbage(); 5964 ThreadHeap::collectAllGarbage();
5958 } 5965 }
5959 5966
5960 class NonNodeAllocatingNodeInDestructor : public GarbageCollectedFinalized<NonNo deAllocatingNodeInDestructor> { 5967 class NonNodeAllocatingNodeInDestructor : public GarbageCollectedFinalized<NonNo deAllocatingNodeInDestructor> {
5961 public: 5968 public:
5962 ~NonNodeAllocatingNodeInDestructor() 5969 ~NonNodeAllocatingNodeInDestructor()
5963 { 5970 {
5964 s_node = new Persistent<IntNode>(IntNode::create(10)); 5971 s_node = new Persistent<IntNode>(IntNode::create(10));
5965 } 5972 }
5966 5973
5967 DEFINE_INLINE_TRACE() { } 5974 DEFINE_INLINE_TRACE() { }
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after
6399 void workerThreadMainForCrossThreadWeakPersistentTest(DestructorLockingObject** object) 6406 void workerThreadMainForCrossThreadWeakPersistentTest(DestructorLockingObject** object)
6400 { 6407 {
6401 // Step 2: Create an object and store the pointer. 6408 // Step 2: Create an object and store the pointer.
6402 MutexLocker locker(workerThreadMutex()); 6409 MutexLocker locker(workerThreadMutex());
6403 ThreadState::attach(); 6410 ThreadState::attach();
6404 *object = DestructorLockingObject::create(); 6411 *object = DestructorLockingObject::create();
6405 wakeMainThread(); 6412 wakeMainThread();
6406 parkWorkerThread(); 6413 parkWorkerThread();
6407 6414
6408 // Step 4: Run a GC. 6415 // Step 4: Run a GC.
6409 Heap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithSweep, B linkGC::ForcedGC); 6416 ThreadHeap::collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithSw eep, BlinkGC::ForcedGC);
6410 wakeMainThread(); 6417 wakeMainThread();
6411 parkWorkerThread(); 6418 parkWorkerThread();
6412 6419
6413 // Step 6: Finish. 6420 // Step 6: Finish.
6414 ThreadState::detach(); 6421 ThreadState::detachCurrentThread();
6415 wakeMainThread(); 6422 wakeMainThread();
6416 } 6423 }
6417 6424
6418 } // anonymous namespace 6425 } // anonymous namespace
6419 6426
6420 TEST(HeapTest, CrossThreadWeakPersistent) 6427 TEST(HeapTest, CrossThreadWeakPersistent)
6421 { 6428 {
6422 // Create an object in the worker thread, have a CrossThreadWeakPersistent p ointing to it on the main thread, 6429 // Create an object in the worker thread, have a CrossThreadWeakPersistent p ointing to it on the main thread,
6423 // clear the reference in the worker thread, run a GC in the worker thread, and see if the 6430 // clear the reference in the worker thread, run a GC in the worker thread, and see if the
6424 // CrossThreadWeakPersistent is cleared. 6431 // CrossThreadWeakPersistent is cleared.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
6483 EXPECT_EQ(1u, vector2.size()); 6490 EXPECT_EQ(1u, vector2.size());
6484 // TODO(Oilpan): when Vector.h's contiguous container support no longer disables 6491 // TODO(Oilpan): when Vector.h's contiguous container support no longer disables
6485 // Vector<>s with inline capacity, remove. 6492 // Vector<>s with inline capacity, remove.
6486 #if !defined(ANNOTATE_CONTIGUOUS_CONTAINER) 6493 #if !defined(ANNOTATE_CONTIGUOUS_CONTAINER)
6487 EXPECT_EQ(16u, vector1.capacity()); 6494 EXPECT_EQ(16u, vector1.capacity());
6488 EXPECT_EQ(16u, vector2.capacity()); 6495 EXPECT_EQ(16u, vector2.capacity());
6489 #endif 6496 #endif
6490 } 6497 }
6491 6498
6492 } // namespace blink 6499 } // namespace blink
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698