Index: runtime/vm/handles_impl.h |
diff --git a/runtime/vm/handles_impl.h b/runtime/vm/handles_impl.h |
index 327c28bfa2848eb42a708737b39b475ea4831809..06e3aca3b814816069c3e4a0a76eeca7de6e6793 100644 |
--- a/runtime/vm/handles_impl.h |
+++ b/runtime/vm/handles_impl.h |
@@ -5,6 +5,8 @@ |
#ifndef VM_HANDLES_IMPL_H_ |
#define VM_HANDLES_IMPL_H_ |
+#include "vm/heap.h" |
+#include "vm/heap_trace.h" |
#include "vm/visitor.h" |
namespace dart { |
@@ -24,7 +26,16 @@ void Handles<kHandleSizeInWords, |
} |
// Visit all scoped handles. |
- block = &first_scoped_block_; |
+ VisitScopedHandles(visitor); |
+} |
+ |
+ |
+template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
+void Handles<kHandleSizeInWords, |
+ kHandlesPerChunk, |
+ kOffsetOfRawPtr>::VisitScopedHandles( |
+ ObjectPointerVisitor* visitor) { |
+ HandlesBlock* block = &first_scoped_block_; |
do { |
block->VisitObjectPointers(visitor); |
block = block->next_block(); |
@@ -35,6 +46,23 @@ void Handles<kHandleSizeInWords, |
template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
void Handles<kHandleSizeInWords, |
kHandlesPerChunk, |
+ kOffsetOfRawPtr>::VisitUnvisitedScopedHandles( |
+ ObjectPointerVisitor* visitor) { |
+ HandlesBlock* block = &first_scoped_block_; |
+ while (block != NULL && block != last_visited_block_) { |
+ block->VisitUnvisitedObjectPointers(visitor); |
+ block = block->next_block(); |
+ } |
+ // We want this to point to first_scoped_block.next, |
+ // Because pointers are still being added to first_scoped_block |
+ // So it may be "partially new", and require a partial scan. |
+ last_visited_block_ = first_scoped_block_.next_block(); |
+} |
+ |
+ |
+template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
+void Handles<kHandleSizeInWords, |
+ kHandlesPerChunk, |
kOffsetOfRawPtr>::Visit(HandleVisitor* visitor) { |
// Visit all zone handles. |
HandlesBlock* block = zone_blocks_; |
@@ -83,7 +111,12 @@ uword Handles<kHandleSizeInWords, |
ASSERT(isolate->no_handle_scope_depth() == 0); |
Handles* handles = isolate->current_zone()->handles(); |
ASSERT(handles != NULL); |
- return handles->AllocateHandleInZone(); |
+ uword address = handles->AllocateHandleInZone(); |
+ if (HeapTrace::is_enabled()) { |
+ uword zone_addr = reinterpret_cast<uword>(isolate->current_zone()); |
+ isolate->heap()->trace()->TraceAllocateZoneHandle(address, zone_addr); |
+ } |
+ return address; |
} |
@@ -109,10 +142,20 @@ void Handles<kHandleSizeInWords, |
kHandlesPerChunk, |
kOffsetOfRawPtr>::DeleteAll() { |
// Delete all the zone allocated handle blocks. |
+ // GCTrace does not need to trace this call to DeleteHandleBlocks, |
+ // since the individual zone deletions will be caught |
+ // by instrumentation in the BaseZone destructor. |
DeleteHandleBlocks(zone_blocks_); |
zone_blocks_ = NULL; |
// Delete all the scoped handle blocks. |
+ // Do not trace if there is no current isolate. This can happen during |
+ // isolate shutdown. |
+ if (HeapTrace::is_enabled() && Isolate::Current() != NULL) { |
+ Isolate::Current()->heap()->trace()->TraceDeleteScopedHandles(); |
+ } |
+ |
+ |
scoped_blocks_ = first_scoped_block_.next_block(); |
DeleteHandleBlocks(scoped_blocks_); |
first_scoped_block_.ReInit(); |
@@ -302,6 +345,24 @@ void Handles<kHandleSizeInWords, |
template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
void Handles<kHandleSizeInWords, |
kHandlesPerChunk, |
+ kOffsetOfRawPtr>::HandlesBlock::VisitUnvisitedObjectPointers( |
+ ObjectPointerVisitor* visitor) { |
+ ASSERT(visitor != NULL); |
+ |
+ // last_visited_handle_ picks up where we were last time, |
+ // so there is nothing in the intialization position of this for loop. |
+ |
+ while (last_visited_handle_ < next_handle_slot_) { |
+ last_visited_handle_ += kHandleSizeInWords; |
+ uword* addr = &data_[last_visited_handle_ + kOffsetOfRawPtr / kWordSize]; |
+ visitor->VisitPointer(reinterpret_cast<RawObject**>(addr)); |
+ } |
+} |
+ |
+ |
+template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
+void Handles<kHandleSizeInWords, |
+ kHandlesPerChunk, |
kOffsetOfRawPtr>::HandlesBlock::Visit(HandleVisitor* visitor) { |
ASSERT(visitor != NULL); |
for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |