| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 504 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 515 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 515 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 516 Platform::current()->yieldCurrentThread(); | 516 Platform::current()->yieldCurrentThread(); |
| 517 } | 517 } |
| 518 | 518 |
| 519 if (gcCount < gcPerThread) { | 519 if (gcCount < gcPerThread) { |
| 520 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); | 520 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); |
| 521 gcCount++; | 521 gcCount++; |
| 522 atomicIncrement(&m_gcCount); | 522 atomicIncrement(&m_gcCount); |
| 523 } | 523 } |
| 524 | 524 |
| 525 // Taking snapshot shouldn't have any bad side effect. |
| 526 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::TakeSnapshot, Heap::ForcedGC); |
| 525 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); | 527 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); |
| 526 EXPECT_EQ(wrapper->value(), 0x0bbac0de); | 528 EXPECT_EQ(wrapper->value(), 0x0bbac0de); |
| 527 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb); | 529 EXPECT_EQ((*globalPersistent)->value(), 0x0ed0cabb); |
| 528 } | 530 } |
| 529 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 531 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 530 Platform::current()->yieldCurrentThread(); | 532 Platform::current()->yieldCurrentThread(); |
| 531 } | 533 } |
| 532 ThreadState::detach(); | 534 ThreadState::detach(); |
| 533 atomicDecrement(&m_threadsToFinish); | 535 atomicDecrement(&m_threadsToFinish); |
| 534 } | 536 } |
| (...skipping 24 matching lines...) Expand all Loading... |
| 559 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 561 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 560 Platform::current()->yieldCurrentThread(); | 562 Platform::current()->yieldCurrentThread(); |
| 561 } | 563 } |
| 562 | 564 |
| 563 if (gcCount < gcPerThread) { | 565 if (gcCount < gcPerThread) { |
| 564 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); | 566 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, Thr
eadState::GCWithSweep, Heap::ForcedGC); |
| 565 gcCount++; | 567 gcCount++; |
| 566 atomicIncrement(&m_gcCount); | 568 atomicIncrement(&m_gcCount); |
| 567 } | 569 } |
| 568 | 570 |
| 571 // Taking snapshot shouldn't have any bad side effect. |
| 572 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::TakeSnapshot, Heap::ForcedGC); |
| 569 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); | 573 Heap::collectGarbage(ThreadState::NoHeapPointersOnStack, ThreadS
tate::GCWithSweep, Heap::ForcedGC); |
| 570 EXPECT_TRUE(weakMap->isEmpty()); | 574 EXPECT_TRUE(weakMap->isEmpty()); |
| 571 EXPECT_TRUE(weakMap2.isEmpty()); | 575 EXPECT_TRUE(weakMap2.isEmpty()); |
| 572 } | 576 } |
| 573 SafePointScope scope(ThreadState::NoHeapPointersOnStack); | 577 SafePointScope scope(ThreadState::NoHeapPointersOnStack); |
| 574 Platform::current()->yieldCurrentThread(); | 578 Platform::current()->yieldCurrentThread(); |
| 575 } | 579 } |
| 576 ThreadState::detach(); | 580 ThreadState::detach(); |
| 577 atomicDecrement(&m_threadsToFinish); | 581 atomicDecrement(&m_threadsToFinish); |
| 578 } | 582 } |
| (...skipping 3049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3628 objectAddresses.append(objectAddress); | 3632 objectAddresses.append(objectAddress); |
| 3629 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1); | 3633 endAddresses.append(objectAddress + sizeof(SimpleObject) - 1); |
| 3630 } | 3634 } |
| 3631 LargeHeapObject* largeObject = LargeHeapObject::create(); | 3635 LargeHeapObject* largeObject = LargeHeapObject::create(); |
| 3632 largeObjectAddress = reinterpret_cast<Address>(largeObject); | 3636 largeObjectAddress = reinterpret_cast<Address>(largeObject); |
| 3633 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1; | 3637 largeObjectEndAddress = largeObjectAddress + sizeof(LargeHeapObject) - 1; |
| 3634 | 3638 |
| 3635 // This is a low-level test where we call checkAndMarkPointer. This method | 3639 // This is a low-level test where we call checkAndMarkPointer. This method |
| 3636 // causes the object start bitmap to be computed which requires the heap | 3640 // causes the object start bitmap to be computed which requires the heap |
| 3637 // to be in a consistent state (e.g. the free allocation area must be put | 3641 // to be in a consistent state (e.g. the free allocation area must be put |
| 3638 // into a free list header). However when we call makeConsistentForSweeping
it | 3642 // into a free list header). However when we call makeConsistentForGC it |
| 3639 // also clears out the freelists so we have to rebuild those before trying | 3643 // also clears out the freelists so we have to rebuild those before trying |
| 3640 // to allocate anything again. We do this by forcing a GC after doing the | 3644 // to allocate anything again. We do this by forcing a GC after doing the |
| 3641 // checkAndMarkPointer tests. | 3645 // checkAndMarkPointer tests. |
| 3642 { | 3646 { |
| 3643 TestGCScope scope(ThreadState::HeapPointersOnStack); | 3647 TestGCScope scope(ThreadState::HeapPointersOnStack); |
| 3644 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not
park all threads. | 3648 EXPECT_TRUE(scope.allThreadsParked()); // Fail the test if we could not
park all threads. |
| 3645 Heap::flushHeapDoesNotContainCache(); | 3649 Heap::flushHeapDoesNotContainCache(); |
| 3646 for (size_t i = 0; i < objectAddresses.size(); i++) { | 3650 for (size_t i = 0; i < objectAddresses.size(); i++) { |
| 3647 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, objectAddresses[i]))
; | 3651 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, objectAddresses[i]))
; |
| 3648 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, endAddresses[i])); | 3652 EXPECT_TRUE(Heap::checkAndMarkPointer(&visitor, endAddresses[i])); |
| (...skipping 2461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6110 { | 6114 { |
| 6111 Persistent<ClassWithMember> object = ClassWithMember::create(); | 6115 Persistent<ClassWithMember> object = ClassWithMember::create(); |
| 6112 EXPECT_EQ(0, object->traceCount()); | 6116 EXPECT_EQ(0, object->traceCount()); |
| 6113 TestMixinAllocatingObject* mixin = TestMixinAllocatingObject::create(object.
get()); | 6117 TestMixinAllocatingObject* mixin = TestMixinAllocatingObject::create(object.
get()); |
| 6114 EXPECT_TRUE(mixin); | 6118 EXPECT_TRUE(mixin); |
| 6115 EXPECT_GT(object->traceCount(), 0); | 6119 EXPECT_GT(object->traceCount(), 0); |
| 6116 EXPECT_GT(mixin->traceCount(), 0); | 6120 EXPECT_GT(mixin->traceCount(), 0); |
| 6117 } | 6121 } |
| 6118 | 6122 |
| 6119 } // namespace blink | 6123 } // namespace blink |
| OLD | NEW |