OLD | NEW |
1 // Copyright 2009-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2009-2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 10 matching lines...) Expand all Loading... |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #include "heap-profiler.h" | 30 #include "heap-profiler.h" |
| 31 |
| 32 #include "allocation-tracker.h" |
31 #include "heap-snapshot-generator-inl.h" | 33 #include "heap-snapshot-generator-inl.h" |
32 | 34 |
33 namespace v8 { | 35 namespace v8 { |
34 namespace internal { | 36 namespace internal { |
35 | 37 |
36 HeapProfiler::HeapProfiler(Heap* heap) | 38 HeapProfiler::HeapProfiler(Heap* heap) |
37 : snapshots_(new HeapSnapshotsCollection(heap)), | 39 : ids_(new HeapObjectsMap(heap)), |
| 40 names_(new StringsStorage(heap)), |
38 next_snapshot_uid_(1), | 41 next_snapshot_uid_(1), |
39 is_tracking_allocations_(false), | |
40 is_tracking_object_moves_(false) { | 42 is_tracking_object_moves_(false) { |
41 } | 43 } |
42 | 44 |
43 | 45 |
| 46 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { |
| 47 delete *snapshot_ptr; |
| 48 } |
| 49 |
| 50 |
44 HeapProfiler::~HeapProfiler() { | 51 HeapProfiler::~HeapProfiler() { |
45 delete snapshots_; | 52 snapshots_.Iterate(DeleteHeapSnapshot); |
| 53 snapshots_.Clear(); |
46 } | 54 } |
47 | 55 |
48 | 56 |
49 void HeapProfiler::DeleteAllSnapshots() { | 57 void HeapProfiler::DeleteAllSnapshots() { |
50 Heap* the_heap = heap(); | 58 snapshots_.Iterate(DeleteHeapSnapshot); |
51 delete snapshots_; | 59 snapshots_.Clear(); |
52 snapshots_ = new HeapSnapshotsCollection(the_heap); | 60 names_.Reset(new StringsStorage(heap())); |
53 } | 61 } |
54 | 62 |
55 | 63 |
| 64 void HeapProfiler::RemoveSnapshot(HeapSnapshot* snapshot) { |
| 65 snapshots_.RemoveElement(snapshot); |
| 66 } |
| 67 |
| 68 |
56 void HeapProfiler::DefineWrapperClass( | 69 void HeapProfiler::DefineWrapperClass( |
57 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) { | 70 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) { |
58 ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId); | 71 ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId); |
59 if (wrapper_callbacks_.length() <= class_id) { | 72 if (wrapper_callbacks_.length() <= class_id) { |
60 wrapper_callbacks_.AddBlock( | 73 wrapper_callbacks_.AddBlock( |
61 NULL, class_id - wrapper_callbacks_.length() + 1); | 74 NULL, class_id - wrapper_callbacks_.length() + 1); |
62 } | 75 } |
63 wrapper_callbacks_[class_id] = callback; | 76 wrapper_callbacks_[class_id] = callback; |
64 } | 77 } |
65 | 78 |
66 | 79 |
67 v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback( | 80 v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback( |
68 uint16_t class_id, Object** wrapper) { | 81 uint16_t class_id, Object** wrapper) { |
69 if (wrapper_callbacks_.length() <= class_id) return NULL; | 82 if (wrapper_callbacks_.length() <= class_id) return NULL; |
70 return wrapper_callbacks_[class_id]( | 83 return wrapper_callbacks_[class_id]( |
71 class_id, Utils::ToLocal(Handle<Object>(wrapper))); | 84 class_id, Utils::ToLocal(Handle<Object>(wrapper))); |
72 } | 85 } |
73 | 86 |
74 | 87 |
75 HeapSnapshot* HeapProfiler::TakeSnapshot( | 88 HeapSnapshot* HeapProfiler::TakeSnapshot( |
76 const char* name, | 89 const char* name, |
77 v8::ActivityControl* control, | 90 v8::ActivityControl* control, |
78 v8::HeapProfiler::ObjectNameResolver* resolver) { | 91 v8::HeapProfiler::ObjectNameResolver* resolver) { |
79 HeapSnapshot* result = snapshots_->NewSnapshot(name, next_snapshot_uid_++); | 92 HeapSnapshot* result = new HeapSnapshot(this, name, next_snapshot_uid_++); |
80 { | 93 { |
81 HeapSnapshotGenerator generator(result, control, resolver, heap()); | 94 HeapSnapshotGenerator generator(result, control, resolver, heap()); |
82 if (!generator.GenerateSnapshot()) { | 95 if (!generator.GenerateSnapshot()) { |
83 delete result; | 96 delete result; |
84 result = NULL; | 97 result = NULL; |
| 98 } else { |
| 99 snapshots_.Add(result); |
85 } | 100 } |
86 } | 101 } |
87 snapshots_->SnapshotGenerationFinished(result); | 102 ids_->RemoveDeadEntries(); |
88 is_tracking_object_moves_ = true; | 103 is_tracking_object_moves_ = true; |
89 return result; | 104 return result; |
90 } | 105 } |
91 | 106 |
92 | 107 |
93 HeapSnapshot* HeapProfiler::TakeSnapshot( | 108 HeapSnapshot* HeapProfiler::TakeSnapshot( |
94 String* name, | 109 String* name, |
95 v8::ActivityControl* control, | 110 v8::ActivityControl* control, |
96 v8::HeapProfiler::ObjectNameResolver* resolver) { | 111 v8::HeapProfiler::ObjectNameResolver* resolver) { |
97 return TakeSnapshot(snapshots_->names()->GetName(name), control, resolver); | 112 return TakeSnapshot(names_->GetName(name), control, resolver); |
98 } | 113 } |
99 | 114 |
100 | 115 |
101 void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) { | 116 void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) { |
102 snapshots_->StartHeapObjectsTracking(track_allocations); | 117 ids_->UpdateHeapObjectsMap(); |
103 is_tracking_object_moves_ = true; | 118 is_tracking_object_moves_ = true; |
104 ASSERT(!is_tracking_allocations_); | 119 ASSERT(!is_tracking_allocations()); |
105 if (track_allocations) { | 120 if (track_allocations) { |
| 121 allocation_tracker_.Reset(new AllocationTracker(*ids_, *names_)); |
106 heap()->DisableInlineAllocation(); | 122 heap()->DisableInlineAllocation(); |
107 is_tracking_allocations_ = true; | |
108 } | 123 } |
109 } | 124 } |
110 | 125 |
111 | 126 |
112 SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream) { | 127 SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream) { |
113 return snapshots_->PushHeapObjectsStats(stream); | 128 return ids_->PushHeapObjectsStats(stream); |
114 } | 129 } |
115 | 130 |
116 | 131 |
117 void HeapProfiler::StopHeapObjectsTracking() { | 132 void HeapProfiler::StopHeapObjectsTracking() { |
118 snapshots_->StopHeapObjectsTracking(); | 133 ids_->StopHeapObjectsTracking(); |
119 if (is_tracking_allocations_) { | 134 if (is_tracking_allocations()) { |
| 135 allocation_tracker_.Reset(NULL); |
120 heap()->EnableInlineAllocation(); | 136 heap()->EnableInlineAllocation(); |
121 is_tracking_allocations_ = false; | |
122 } | 137 } |
123 } | 138 } |
124 | 139 |
125 | 140 |
126 size_t HeapProfiler::GetMemorySizeUsedByProfiler() { | 141 size_t HeapProfiler::GetMemorySizeUsedByProfiler() { |
127 return snapshots_->GetUsedMemorySize(); | 142 size_t size = sizeof(*this); |
| 143 size += names_->GetUsedMemorySize(); |
| 144 size += ids_->GetUsedMemorySize(); |
| 145 size += GetMemoryUsedByList(snapshots_); |
| 146 for (int i = 0; i < snapshots_.length(); ++i) { |
| 147 size += snapshots_[i]->RawSnapshotSize(); |
| 148 } |
| 149 return size; |
128 } | 150 } |
129 | 151 |
130 | 152 |
131 int HeapProfiler::GetSnapshotsCount() { | 153 int HeapProfiler::GetSnapshotsCount() { |
132 return snapshots_->snapshots()->length(); | 154 return snapshots_.length(); |
133 } | 155 } |
134 | 156 |
135 | 157 |
136 HeapSnapshot* HeapProfiler::GetSnapshot(int index) { | 158 HeapSnapshot* HeapProfiler::GetSnapshot(int index) { |
137 return snapshots_->snapshots()->at(index); | 159 return snapshots_.at(index); |
138 } | 160 } |
139 | 161 |
140 | 162 |
141 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) { | 163 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) { |
142 if (!obj->IsHeapObject()) | 164 if (!obj->IsHeapObject()) |
143 return v8::HeapProfiler::kUnknownObjectId; | 165 return v8::HeapProfiler::kUnknownObjectId; |
144 return snapshots_->FindObjectId(HeapObject::cast(*obj)->address()); | 166 return ids_->FindEntry(HeapObject::cast(*obj)->address()); |
145 } | 167 } |
146 | 168 |
147 | 169 |
148 void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) { | 170 void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) { |
149 snapshots_->ObjectMoveEvent(from, to, size); | 171 ids_->MoveObject(from, to, size); |
150 } | 172 } |
151 | 173 |
152 | 174 |
153 void HeapProfiler::AllocationEvent(Address addr, int size) { | 175 void HeapProfiler::AllocationEvent(Address addr, int size) { |
154 snapshots_->AllocationEvent(addr, size); | 176 DisallowHeapAllocation no_allocation; |
| 177 if (!allocation_tracker_.is_empty()) { |
| 178 allocation_tracker_->AllocationEvent(addr, size); |
| 179 } |
155 } | 180 } |
156 | 181 |
157 | 182 |
158 void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) { | 183 void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) { |
159 snapshots_->UpdateObjectSizeEvent(addr, size); | 184 ids_->UpdateObjectSize(addr, size); |
160 } | 185 } |
161 | 186 |
162 | 187 |
163 void HeapProfiler::SetRetainedObjectInfo(UniqueId id, | 188 void HeapProfiler::SetRetainedObjectInfo(UniqueId id, |
164 RetainedObjectInfo* info) { | 189 RetainedObjectInfo* info) { |
165 // TODO(yurus, marja): Don't route this information through GlobalHandles. | 190 // TODO(yurus, marja): Don't route this information through GlobalHandles. |
166 heap()->isolate()->global_handles()->SetRetainedObjectInfo(id, info); | 191 heap()->isolate()->global_handles()->SetRetainedObjectInfo(id, info); |
167 } | 192 } |
168 | 193 |
169 | 194 |
| 195 Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) { |
| 196 heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
| 197 "HeapProfiler::FindHeapObjectById"); |
| 198 DisallowHeapAllocation no_allocation; |
| 199 HeapObject* object = NULL; |
| 200 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); |
| 201 // Make sure that object with the given id is still reachable. |
| 202 for (HeapObject* obj = iterator.next(); |
| 203 obj != NULL; |
| 204 obj = iterator.next()) { |
| 205 if (ids_->FindEntry(obj->address()) == id) { |
| 206 ASSERT(object == NULL); |
| 207 object = obj; |
| 208 // Can't break -- kFilterUnreachable requires full heap traversal. |
| 209 } |
| 210 } |
| 211 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); |
| 212 } |
| 213 |
| 214 |
170 } } // namespace v8::internal | 215 } } // namespace v8::internal |
OLD | NEW |