| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 505 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 516 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 516 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 517 Platform::current()->yieldCurrentThread(); | 517 Platform::current()->yieldCurrentThread(); |
| 518 } | 518 } |
| 519 | 519 |
| 520 if (gcCount < gcPerThread) { | 520 if (gcCount < gcPerThread) { |
| 521 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); | 521 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); |
| 522 gcCount++; | 522 gcCount++; |
| 523 atomicIncrement(&m_gcCount); | 523 atomicIncrement(&m_gcCount); |
| 524 } | 524 } |
| 525 | 525 |
| 526 // Taking snapshot shouldn't have any bad side effect. |
| 527 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::TakeSnapshot, Heap::ForcedGC); |
| 526 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); | 528 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); |
| 527 EXPECT_EQ(wrapper->value(), 0x0bbac0de); | 529 EXPECT_EQ(wrapper->value(), 0x0bbac0de); |
| 528 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb); | 530 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb); |
| 529 } | 531 } |
| 530 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 532 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 531 Platform::current()->yieldCurrentThread(); | 533 Platform::current()->yieldCurrentThread(); |
| 532 } | 534 } |
| 533 ThreadState::detach(); | 535 ThreadState::detach(); |
| 534 atomicDecrement(&m_threadsToFinish); | 536 atomicDecrement(&m_threadsToFinish); |
| 535 } | 537 } |
| (...skipping 24 matching lines...) Expand all Loading... |
| 560 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 562 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 561 Platform::current()->yieldCurrentThread(); | 563 Platform::current()->yieldCurrentThread(); |
| 562 } | 564 } |
| 563 | 565 |
| 564 if (gcCount < gcPerThread) { | 566 if (gcCount < gcPerThread) { |
| 565 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); | 567 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); |
| 566 gcCount++; | 568 gcCount++; |
| 567 atomicIncrement(&m_gcCount); | 569 atomicIncrement(&m_gcCount); |
| 568 } | 570 } |
| 569 | 571 |
| 572 // Taking snapshot shouldn't have any bad side effect. |
| 573 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::TakeSnapshot, Heap::ForcedGC); |
| 570 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); | 574 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); |
| 571 EXPECT_TRUE(weakMap->isEmpty()); | 575 EXPECT_TRUE(weakMap->isEmpty()); |
| 572 EXPECT_TRUE(weakMap2.isEmpty()); | 576 EXPECT_TRUE(weakMap2.isEmpty()); |
| 573 } | 577 } |
| 574 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 578 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 575 Platform::current()->yieldCurrentThread(); | 579 Platform::current()->yieldCurrentThread(); |
| 576 } | 580 } |
| 577 ThreadState::detach(); | 581 ThreadState::detach(); |
| 578 atomicDecrement(&m_threadsToFinish); | 582 atomicDecrement(&m_threadsToFinish); |
| 579 } | 583 } |
| (...skipping 3048 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3628 objectAddresses.append(objectAddress); | 3632 objectAddresses.append(objectAddress); |
| 3629 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1); | 3633 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1); |
| 3630 } | 3634 } |
| 3631 LargeHeapObject* largeObject = LargeHeapObject::create(); | 3635 LargeHeapObject* largeObject = LargeHeapObject::create(); |
| 3632 largeObjectAddress = reinterpret_cast<Address>(largeObject); | 3636 largeObjectAddress = reinterpret_cast<Address>(largeObject); |
| 3633 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1; | 3637 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1; |
| 3634 | 3638 |
| 3635 // This is a low-level test where we call checkAndMarkPointer. This method | 3639 // This is a low-level test where we call checkAndMarkPointer. This method |
| 3636 // causes the object start bitmap to be computed which requires the heap | 3640 // causes the object start bitmap to be computed which requires the heap |
| 3637 // to be in a consistent state (e.g. the free allocation area must be put | 3641 // to be in a consistent state (e.g. the free allocation area must be put |
| 3638 // into a free list header). However when we call makeConsistentForSweeping
it | 3642 // into a free list header). However when we call makeConsistentForGC it |
| 3639 // also clears out the freelists so we have to rebuild those before trying | 3643 // also clears out the freelists so we have to rebuild those before trying |
| 3640 // to allocate anything again. We do this by forcing a GC after doing the | 3644 // to allocate anything again. We do this by forcing a GC after doing the |
| 3641 // checkAndMarkPointer tests. | 3645 // checkAndMarkPointer tests. |
| 3642 { | 3646 { |
| 3643 TestGCScope scope(ThreadState::HeapPointersOnStack); | 3647 TestGCScope scope(ThreadState::HeapPointersOnStack); |
| 3644 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not
park all threads. | 3648 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not
park all threads. |
| 3645 Heap::flushHeapDoesNotContainCache(); | 3649 Heap::flushHeapDoesNotContainCache(); |
| 3646 for (size_t i = 0; i < objectAddresses.size(); i++) { | 3650 for (size_t i = 0; i < objectAddresses.size(); i++) { |
| 3647 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, objectAddresses[i]))
; | 3651 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, objectAddresses[i]))
; |
| 3648 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, endAddresses[i])); | 3652 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, endAddresses[i])); |
| (...skipping 2461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6110 { | 6114 { |
| 6111 Persistent<ClassWithMember> object = ClassWithMember::create(); | 6115 Persistent<ClassWithMember> object = ClassWithMember::create(); |
| 6112 EXPECT_EQ(0, object->traceCount()); | 6116 EXPECT_EQ(0, object->traceCount()); |
| 6113 TestMixinAllocatingObject* mixin = TestMixinAllocatingObject::create(object.
get()); | 6117 TestMixinAllocatingObject* mixin = TestMixinAllocatingObject::create(object.
get()); |
| 6114 EXPECT_TRUE(mixin); | 6118 EXPECT_TRUE(mixin); |
| 6115 EXPECT_GT(object->traceCount(), 0); | 6119 EXPECT_GT(object->traceCount(), 0); |
| 6116 EXPECT_GT(mixin->traceCount(), 0); | 6120 EXPECT_GT(mixin->traceCount(), 0); |
| 6117 } | 6121 } |
| 6118 | 6122 |
| 6119 } // namespace blink | 6123 } // namespace blink |
| OLD | NEW |