OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 EXPECT_TRUE(weakMap2.isEmpty()); | 542 EXPECT_TRUE(weakMap2.isEmpty()); |
543 } | 543 } |
544 ThreadState::SafePointScope scope(ThreadState::NoHeapPointersOnStack
); | 544 ThreadState::SafePointScope scope(ThreadState::NoHeapPointersOnStack
); |
545 Platform::current()->yieldCurrentThread(); | 545 Platform::current()->yieldCurrentThread(); |
546 } | 546 } |
547 ThreadState::detach(); | 547 ThreadState::detach(); |
548 atomicDecrement(&m_threadsToFinish); | 548 atomicDecrement(&m_threadsToFinish); |
549 } | 549 } |
550 }; | 550 }; |
551 | 551 |
| 552 class ThreadPersistentHeapTester : public ThreadedTesterBase { |
| 553 public: |
| 554 static void test() |
| 555 { |
| 556 ThreadedTesterBase::test(new ThreadPersistentHeapTester); |
| 557 } |
| 558 |
| 559 protected: |
| 560 class Local final : public GarbageCollected<Local> { |
| 561 public: |
| 562 Local() { } |
| 563 |
| 564 void trace(Visitor* visitor) { } |
| 565 }; |
| 566 |
| 567 class BookEnd; |
| 568 |
| 569 class PersistentStore { |
| 570 public: |
| 571 static PersistentStore* create(int count, int* gcCount, BookEnd* bookend
) |
| 572 { |
| 573 return new PersistentStore(count, gcCount, bookend); |
| 574 } |
| 575 |
| 576 void advance() |
| 577 { |
| 578 (*m_gcCount)++; |
| 579 m_store.removeLast(); |
| 580 // Remove reference to BookEnd when there are no Persistent<Local>s
left. |
| 581 // The BookEnd object will then be swept out at the next GC, and pre
-finalized, |
| 582 // causing this PersistentStore instance to be destructed, along wit
h |
| 583 // the Persistent<BookEnd>. It being the very last Persistent<>, cau
sing the |
| 584 // GC loop in ThreadState::detach() to terminate. |
| 585 if (!m_store.size()) |
| 586 m_bookend = nullptr; |
| 587 } |
| 588 |
| 589 private: |
| 590 PersistentStore(int count, int* gcCount, BookEnd* bookend) |
| 591 { |
| 592 m_gcCount = gcCount; |
| 593 m_bookend = bookend; |
| 594 for (int i = 0; i < count; ++i) |
| 595 m_store.append(Persistent<ThreadPersistentHeapTester::Local>(new
ThreadPersistentHeapTester::Local())); |
| 596 } |
| 597 |
| 598 Vector<Persistent<Local>> m_store; |
| 599 Persistent<BookEnd> m_bookend; |
| 600 int* m_gcCount; |
| 601 }; |
| 602 |
| 603 class BookEnd final : public GarbageCollected<BookEnd> { |
| 604 USING_PRE_FINALIZER(BookEnd, dispose); |
| 605 public: |
| 606 BookEnd() |
| 607 : m_store(nullptr) |
| 608 { |
| 609 ThreadState::current()->registerPreFinalizer(*this); |
| 610 } |
| 611 |
| 612 void initialize(PersistentStore* store) |
| 613 { |
| 614 m_store = store; |
| 615 } |
| 616 |
| 617 void dispose() |
| 618 { |
| 619 delete m_store; |
| 620 } |
| 621 |
| 622 void trace(Visitor* visitor) |
| 623 { |
| 624 ASSERT(m_store); |
| 625 m_store->advance(); |
| 626 } |
| 627 |
| 628 private: |
| 629 PersistentStore* m_store; |
| 630 }; |
| 631 |
| 632 virtual void runThread() override |
| 633 { |
| 634 ThreadState::attach(); |
| 635 |
| 636 const int iterations = 5; |
| 637 int gcCount = 0; |
| 638 BookEnd* bookend = new BookEnd(); |
| 639 PersistentStore* store = PersistentStore::create(iterations, &gcCount, b
ookend); |
| 640 bookend->initialize(store); |
| 641 |
| 642 bookend = nullptr; |
| 643 store = nullptr; |
| 644 |
| 645 // Upon thread detach, GCs will run until all persistents have been |
| 646 // released. We verify that the draining of persistents proceeds |
| 647 // as expected by dropping one Persistent<> per GC until there |
| 648 // are none left. |
| 649 ThreadState::detach(); |
| 650 EXPECT_EQ(iterations, gcCount); |
| 651 atomicDecrement(&m_threadsToFinish); |
| 652 } |
| 653 }; |
| 654 |
552 // The accounting for memory includes the memory used by rounding up object | 655 // The accounting for memory includes the memory used by rounding up object |
553 // sizes. This is done in a different way on 32 bit and 64 bit, so we have to | 656 // sizes. This is done in a different way on 32 bit and 64 bit, so we have to |
554 // have some slack in the tests. | 657 // have some slack in the tests. |
555 template<typename T> | 658 template<typename T> |
556 void CheckWithSlack(T expected, T actual, int slack) | 659 void CheckWithSlack(T expected, T actual, int slack) |
557 { | 660 { |
558 EXPECT_LE(expected, actual); | 661 EXPECT_LE(expected, actual); |
559 EXPECT_GE((intptr_t)expected + slack, (intptr_t)actual); | 662 EXPECT_GE((intptr_t)expected + slack, (intptr_t)actual); |
560 } | 663 } |
561 | 664 |
(...skipping 949 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1511 TEST(HeapTest, Threading) | 1614 TEST(HeapTest, Threading) |
1512 { | 1615 { |
1513 ThreadedHeapTester::test(); | 1616 ThreadedHeapTester::test(); |
1514 } | 1617 } |
1515 | 1618 |
1516 TEST(HeapTest, ThreadedWeakness) | 1619 TEST(HeapTest, ThreadedWeakness) |
1517 { | 1620 { |
1518 ThreadedWeaknessTester::test(); | 1621 ThreadedWeaknessTester::test(); |
1519 } | 1622 } |
1520 | 1623 |
| 1624 TEST(HeapTest, ThreadPersistent) |
| 1625 { |
| 1626 ThreadPersistentHeapTester::test(); |
| 1627 } |
| 1628 |
1521 TEST(HeapTest, BasicFunctionality) | 1629 TEST(HeapTest, BasicFunctionality) |
1522 { | 1630 { |
1523 clearOutOldGarbage(); | 1631 clearOutOldGarbage(); |
1524 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); | 1632 size_t initialObjectPayloadSize = Heap::objectPayloadSizeForTesting(); |
1525 { | 1633 { |
1526 size_t slack = 0; | 1634 size_t slack = 0; |
1527 | 1635 |
1528 // When the test starts there may already have been leaked some memory | 1636 // When the test starts there may already have been leaked some memory |
1529 // on the heap, so we establish a base line. | 1637 // on the heap, so we establish a base line. |
1530 size_t baseLevel = initialObjectPayloadSize; | 1638 size_t baseLevel = initialObjectPayloadSize; |
(...skipping 3810 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5341 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack); | 5449 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack); |
5342 | 5450 |
5343 // Verify that the DeepEagerly chain isn't completely unravelled | 5451 // Verify that the DeepEagerly chain isn't completely unravelled |
5344 // by performing eager trace() calls, but the explicit mark | 5452 // by performing eager trace() calls, but the explicit mark |
5345 // stack is switched once some nesting limit is exceeded. | 5453 // stack is switched once some nesting limit is exceeded. |
5346 EXPECT_GT(DeepEagerly::sTraceLazy, 2); | 5454 EXPECT_GT(DeepEagerly::sTraceLazy, 2); |
5347 #endif | 5455 #endif |
5348 } | 5456 } |
5349 | 5457 |
5350 } // namespace blink | 5458 } // namespace blink |
OLD | NEW |