Chromium Code Reviews| Index: runtime/vm/heap.cc |
| diff --git a/runtime/vm/heap.cc b/runtime/vm/heap.cc |
| index 27d39d345e3d9e9be463032d02b7fbd9716ac90b..1f166388e802c2b94fd0d1e2abc247f026b2eb35 100644 |
| --- a/runtime/vm/heap.cc |
| +++ b/runtime/vm/heap.cc |
| @@ -219,6 +219,38 @@ void Heap::VisitObjects(ObjectVisitor* visitor) const { |
| } |
| +HeapIterationScope::HeapIterationScope() |
| + : StackResource(Thread::Current()->isolate()), |
| + old_space_(isolate()->heap()->old_space()) { |
| + // It's not yet safe to iterate over a paged space while it's concurrently |
| + // sweeping, so wait for any such task to complete first. |
| + MonitorLocker ml(old_space_->tasks_lock()); |
| + while (old_space_->tasks() > 0) { |
| + ml.Wait(); |
| + } |
| + old_space_->set_tasks(1); |
|
siva
2015/07/07 18:10:24
How do we ensure that people don't end up with nes
koda
2015/07/07 22:46:39
Done.
|
| +} |
| + |
| + |
| +HeapIterationScope::~HeapIterationScope() { |
| + old_space_->set_tasks(0); |
|
siva
2015/07/07 18:10:24
Doesn't this need to be set under a lock as well?
koda
2015/07/07 22:46:39
Done.
|
| +} |
| + |
| + |
| +void Heap::IterateObjects(ObjectVisitor* visitor) const { |
| + // The visitor must not allocate from the heap. |
| + NoSafepointScope no_safepoint_scope_; |
| + new_space_->VisitObjects(visitor); |
| + IterateOldObjects(visitor); |
| +} |
| + |
| + |
| +void Heap::IterateOldObjects(ObjectVisitor* visitor) const { |
| + HeapIterationScope heap_iteration_scope; |
| + old_space_->VisitObjects(visitor); |
| +} |
| + |
| + |
| void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { |
| new_space_->VisitObjectPointers(visitor); |
| old_space_->VisitObjectPointers(visitor); |
| @@ -235,11 +267,7 @@ RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { |
| RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { |
| - // Wait for any concurrent GC tasks to finish before walking. |
| - MonitorLocker ml(old_space_->tasks_lock()); |
| - while (old_space_->tasks() > 0) { |
| - ml.Wait(); |
| - } |
| + HeapIterationScope heap_iteration_scope; |
| return old_space_->FindObject(visitor, HeapPage::kData); |
| } |
| @@ -250,7 +278,8 @@ RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { |
| RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { |
| - ASSERT(isolate()->no_safepoint_scope_depth() != 0); |
| + // The visitor must not allocate from the heap. |
| + NoSafepointScope no_safepoint_scope; |
| RawObject* raw_obj = FindNewObject(visitor); |
| if (raw_obj != Object::null()) { |
| return raw_obj; |
| @@ -489,6 +518,12 @@ ObjectSet* Heap::CreateAllocatedObjectSet( |
| bool Heap::Verify(MarkExpectation mark_expectation) const { |
| + HeapIterationScope heap_iteration_scope; |
| + return VerifyGC(mark_expectation); |
| +} |
| + |
| + |
| +bool Heap::VerifyGC(MarkExpectation mark_expectation) const { |
| ObjectSet* allocated_set = CreateAllocatedObjectSet(mark_expectation); |
| VerifyPointersVisitor visitor(isolate(), allocated_set); |
| VisitObjectPointers(&visitor); |