Index: src/profile-generator.cc |
=================================================================== |
--- src/profile-generator.cc (revision 9327) |
+++ src/profile-generator.cc (working copy) |
@@ -1812,12 +1812,13 @@ |
} |
-int V8HeapExplorer::EstimateObjectsCount() { |
- HeapIterator iterator(HeapIterator::kFilterUnreachable); |
+int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) { |
int objects_count = 0; |
- for (HeapObject* obj = iterator.next(); |
+ for (HeapObject* obj = iterator->next(); |
obj != NULL; |
- obj = iterator.next(), ++objects_count) {} |
+ obj = iterator->next()) { |
+ objects_count++; |
+ } |
return objects_count; |
} |
@@ -1945,6 +1946,14 @@ |
"descriptors", map->instance_descriptors(), |
Map::kInstanceDescriptorsOrBitField3Offset); |
} |
+ if (map->prototype_transitions() != heap_->empty_fixed_array()) { |
+ TagObject(map->prototype_transitions(), "(prototype transitions)"); |
+ SetInternalReference(obj, |
+ entry, |
+ "prototype_transitions", |
+ map->prototype_transitions(), |
+ Map::kPrototypeTransitionsOffset); |
+ } |
SetInternalReference(obj, entry, |
"code_cache", map->code_cache(), |
Map::kCodeCacheOffset); |
@@ -2175,9 +2184,11 @@ |
bool V8HeapExplorer::IterateAndExtractReferences( |
SnapshotFillerInterface* filler) { |
+ HeapIterator iterator(HeapIterator::kFilterUnreachable); |
+ |
filler_ = filler; |
- HeapIterator iterator(HeapIterator::kFilterUnreachable); |
bool interrupted = false; |
+ |
// Heap iteration with filtering must be finished in any case. |
for (HeapObject* obj = iterator.next(); |
obj != NULL; |
@@ -2743,13 +2754,43 @@ |
bool HeapSnapshotGenerator::GenerateSnapshot() { |
v8_heap_explorer_.TagGlobalObjects(); |
+ // TODO(1562) Profiler assumes that any object that is in the heap after |
+ // full GC is reachable from the root when computing dominators. |
+ // This is not true for weakly reachable objects. |
+ // As a temporary solution we call GC twice. |
+ Isolate::Current()->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
+ Isolate::Current()->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
+ |
+#ifdef DEBUG |
+ Heap* debug_heap = Isolate::Current()->heap(); |
+ ASSERT(!debug_heap->old_data_space()->was_swept_conservatively()); |
+ ASSERT(!debug_heap->old_pointer_space()->was_swept_conservatively()); |
+ ASSERT(!debug_heap->code_space()->was_swept_conservatively()); |
+ ASSERT(!debug_heap->cell_space()->was_swept_conservatively()); |
+ ASSERT(!debug_heap->map_space()->was_swept_conservatively()); |
+#endif |
+ |
+ // The following code uses heap iterators, so we want the heap to be |
+ // stable. It should follow TagGlobalObjects as that can allocate. |
AssertNoAllocation no_alloc; |
+#ifdef DEBUG |
+ debug_heap->Verify(); |
+#endif |
+ |
SetProgressTotal(4); // 2 passes + dominators + sizes. |
+#ifdef DEBUG |
+ debug_heap->Verify(); |
+#endif |
+ |
// Pass 1. Iterate heap contents to count entries and references. |
if (!CountEntriesAndReferences()) return false; |
+#ifdef DEBUG |
+ debug_heap->Verify(); |
+#endif |
+ |
// Allocate and fill entries in the snapshot, allocate references. |
snapshot_->AllocateEntries(entries_.entries_count(), |
entries_.total_children_count(), |
@@ -2787,8 +2828,9 @@ |
void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) { |
if (control_ == NULL) return; |
+ HeapIterator iterator(HeapIterator::kFilterUnreachable); |
progress_total_ = ( |
- v8_heap_explorer_.EstimateObjectsCount() + |
+ v8_heap_explorer_.EstimateObjectsCount(&iterator) + |
dom_explorer_.EstimateObjectsCount()) * iterations_count; |
progress_counter_ = 0; |
} |
@@ -2838,7 +2880,7 @@ |
nodes_to_visit.RemoveLast(); |
} |
} |
- entries->Truncate(current_entry); |
+ ASSERT_EQ(current_entry, entries->length()); |
} |