OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 #include "vm/heap_trace.h" | |
6 | |
7 #include "include/dart_api.h" | |
8 #include "vm/dart_api_state.h" | |
9 #include "vm/debugger.h" | |
10 #include "vm/isolate.h" | |
11 #include "vm/object.h" | |
12 #include "vm/object_set.h" | |
13 #include "vm/object_store.h" | |
14 #include "vm/os.h" | |
15 #include "vm/stack_frame.h" | |
16 #include "vm/unicode.h" | |
17 | |
18 namespace dart { | |
19 | |
20 DEFINE_FLAG(bool, heap_trace, false, "Enable heap tracing."); | |
21 | |
22 Dart_FileOpenCallback HeapTrace::open_callback_ = NULL; | |
23 Dart_FileWriteCallback HeapTrace::write_callback_ = NULL; | |
24 Dart_FileCloseCallback HeapTrace::close_callback_ = NULL; | |
25 bool HeapTrace::is_enabled_ = false; | |
26 | |
27 class HeapTraceVisitor : public ObjectPointerVisitor { | |
28 public: | |
29 HeapTraceVisitor(Isolate* isolate, | |
30 HeapTrace* heap_trace, | |
31 ObjectSet* object_set) | |
32 : ObjectPointerVisitor(isolate), | |
33 heap_trace_(heap_trace), | |
34 vm_isolate_(Dart::vm_isolate()), | |
35 object_set_(object_set) { | |
36 } | |
37 | |
38 void VisitPointers(RawObject** first, RawObject** last) { | |
39 for (RawObject** current = first; current <= last; current++) { | |
40 RawObject* raw_obj = *current; | |
41 | |
42 // We only care about objects in the heap | |
43 // Also, since this visitor will frequently be encountering redudant | |
44 // roots, we use an object_set to skip the duplicates. | |
45 if (raw_obj->IsHeapObject() && | |
46 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
47 raw_obj != reinterpret_cast<RawObject*>(0xabababab) && | |
48 !object_set_->Contains(raw_obj) && | |
49 !vm_isolate_->heap()->Contains(RawObject::ToAddr(raw_obj))) { | |
50 object_set_->Add(raw_obj); | |
51 uword addr = RawObject::ToAddr(raw_obj); | |
52 heap_trace_->TraceSingleRoot(addr); | |
53 } | |
54 } | |
55 } | |
56 | |
57 private: | |
58 HeapTrace* heap_trace_; | |
59 Isolate* vm_isolate_; | |
60 // TODO(cshapiro): replace with a sparse data structure. | |
61 ObjectSet* object_set_; | |
62 DISALLOW_COPY_AND_ASSIGN(HeapTraceVisitor); | |
63 }; | |
64 | |
65 | |
66 class HeapTraceScopedHandleVisitor : public ObjectPointerVisitor { | |
67 public: | |
68 HeapTraceScopedHandleVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
69 : ObjectPointerVisitor(isolate), heap_trace_(heap_trace) { | |
70 } | |
71 | |
72 void VisitPointers(RawObject** first, RawObject** last) { | |
73 for (RawObject** current = first; current <= last; current++) { | |
74 RawObject* raw_obj = *current; | |
75 Heap* heap = isolate()->heap(); | |
76 | |
77 // We only care about objects in the heap | |
78 if (raw_obj->IsHeapObject() && | |
79 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
80 raw_obj != reinterpret_cast<RawObject*>(0xabababab) && | |
81 heap->Contains(RawObject::ToAddr(raw_obj))) { | |
82 uword addr = RawObject::ToAddr(raw_obj); | |
83 heap_trace_->TraceScopedHandle(addr); | |
84 } | |
85 } | |
86 } | |
87 | |
88 private: | |
89 HeapTrace* heap_trace_; | |
90 DISALLOW_COPY_AND_ASSIGN(HeapTraceScopedHandleVisitor); | |
91 }; | |
92 | |
93 | |
94 class HeapTraceObjectStoreVisitor : public ObjectPointerVisitor { | |
95 public: | |
96 HeapTraceObjectStoreVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
97 : ObjectPointerVisitor(isolate), heap_trace_(heap_trace) { | |
98 } | |
99 | |
100 void VisitPointers(RawObject** first, RawObject** last) { | |
101 for (RawObject** current = first; current <= last; current++) { | |
102 RawObject* raw_obj = *current; | |
103 | |
104 // We only care about obects in the heap. | |
105 if (raw_obj->IsHeapObject() && | |
106 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
107 raw_obj != reinterpret_cast<RawObject*>(0xabababab)) { | |
108 uword addr = RawObject::ToAddr(raw_obj); | |
109 heap_trace_->TraceObjectStorePointer(addr); | |
110 } | |
111 } | |
112 } | |
113 | |
114 private: | |
115 HeapTrace* heap_trace_; | |
116 DISALLOW_COPY_AND_ASSIGN(HeapTraceObjectStoreVisitor); | |
117 }; | |
118 | |
119 | |
120 class HeapTraceInitialHeapVisitor : public ObjectVisitor { | |
121 public: | |
122 HeapTraceInitialHeapVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
123 : ObjectVisitor(isolate), heap_trace_(heap_trace) {} | |
124 | |
125 void VisitObject(RawObject* raw_obj) { | |
126 heap_trace_->TraceSnapshotAlloc(raw_obj, raw_obj->Size()); | |
127 } | |
128 | |
129 private: | |
130 HeapTrace* heap_trace_; | |
131 DISALLOW_COPY_AND_ASSIGN(HeapTraceInitialHeapVisitor); | |
132 }; | |
133 | |
134 | |
135 HeapTrace::HeapTrace() : isolate_initialized_(false), output_stream_(NULL) { | |
136 } | |
137 | |
138 | |
139 HeapTrace::~HeapTrace() { | |
140 if (isolate_initialized_) { | |
141 (*close_callback_)(output_stream_); | |
142 } | |
143 } | |
144 | |
145 | |
146 void HeapTrace::InitOnce(Dart_FileOpenCallback open_callback, | |
147 Dart_FileWriteCallback write_callback, | |
148 Dart_FileCloseCallback close_callback) { | |
149 ASSERT(open_callback != NULL); | |
150 ASSERT(write_callback != NULL); | |
151 ASSERT(close_callback != NULL); | |
152 HeapTrace::open_callback_ = open_callback; | |
153 HeapTrace::write_callback_ = write_callback; | |
154 HeapTrace::close_callback_ = close_callback; | |
155 HeapTrace::is_enabled_ = true; | |
156 } | |
157 | |
158 | |
159 ObjectSet* HeapTrace::CreateEmptyObjectSet() const { | |
160 Isolate* isolate = Isolate::Current(); | |
161 uword start, end; | |
162 isolate->heap()->StartEndAddress(&start, &end); | |
163 | |
164 Isolate* vm_isolate = Dart::vm_isolate(); | |
165 uword vm_start, vm_end; | |
166 vm_isolate->heap()->StartEndAddress(&vm_start, &vm_end); | |
167 | |
168 ObjectSet* allocated_set = new ObjectSet(Utils::Minimum(start, vm_start), | |
169 Utils::Maximum(end, vm_end)); | |
170 | |
171 return allocated_set; | |
172 } | |
173 | |
174 | |
175 void HeapTrace::ResizeObjectSet() { | |
176 Isolate* isolate = Isolate::Current(); | |
177 uword start, end; | |
178 isolate->heap()->StartEndAddress(&start, &end); | |
179 Isolate* vm_isolate = Dart::vm_isolate(); | |
180 uword vm_start, vm_end; | |
181 vm_isolate->heap()->StartEndAddress(&vm_start, &vm_end); | |
182 object_set_.Resize(Utils::Minimum(start, vm_start), | |
183 Utils::Maximum(end, vm_end)); | |
184 } | |
185 | |
186 | |
187 void HeapTrace::Init(Isolate* isolate) { | |
188 // Do not trace the VM isolate | |
189 if (isolate == Dart::vm_isolate()) { | |
190 return; | |
191 } | |
192 ASSERT(isolate_initialized_ == false); | |
193 const char* format = "%s.htrace"; | |
194 intptr_t len = OS::SNPrint(NULL, 0, format, isolate->name()); | |
195 char* filename = new char[len + 1]; | |
196 OS::SNPrint(filename, len + 1, format, isolate->name()); | |
197 output_stream_ = (*open_callback_)(filename); | |
198 ASSERT(output_stream_ != NULL); | |
199 delete[] filename; | |
200 isolate_initialized_ = true; | |
201 | |
202 HeapTraceObjectStoreVisitor object_store_visitor(isolate, this); | |
203 isolate->object_store()->VisitObjectPointers(&object_store_visitor); | |
204 | |
205 // Visit any objects that may have been allocated during startup, | |
206 // before we started tracing. | |
207 HeapTraceInitialHeapVisitor heap_visitor(isolate, this); | |
208 isolate->heap()->IterateObjects(&heap_visitor); | |
209 TraceRoots(isolate); | |
210 } | |
211 | |
212 | |
213 // Allocation Record - 'A' (0x41) | |
214 // | |
215 // Format: | |
216 // 'A' | |
217 // uword - address of allocated object | |
218 // uword - size of allocated object | |
219 void HeapTrace::TraceAllocation(uword addr, intptr_t size) { | |
220 if (isolate_initialized_) { | |
221 { | |
222 AllocationRecord rec(this); | |
223 rec.Write(addr); | |
224 rec.Write(size); | |
225 } | |
226 TraceRoots(Isolate::Current()); | |
227 } | |
228 } | |
229 | |
230 | |
231 // Snapshot Allocation Record - 'B' (0x41) | |
232 // | |
233 // Format: | |
234 // 'B' | |
235 // uword - address of allocated object | |
236 // uword - size of allocated object | |
237 void HeapTrace::TraceSnapshotAlloc(RawObject* obj, intptr_t size) { | |
238 if (isolate_initialized_) { | |
239 SnapshotAllocationRecord rec(this); | |
240 rec.Write(RawObject::ToAddr(obj)); | |
241 rec.Write(static_cast<uword>(size)); | |
242 } | |
243 } | |
244 | |
245 | |
246 // Allocate Zone Handle Record - 'Z' (0x5a) | |
247 // | |
248 // Format: | |
249 // 'Z' | |
250 // uword - handle address (where the handle is pointing) | |
251 // uword - zone address (address of the zone the handle is in) | |
252 void HeapTrace::TraceAllocateZoneHandle(uword handle, uword zone_addr) { | |
253 if (isolate_initialized_) { | |
254 AllocZoneHandleRecord rec(this); | |
255 rec.Write(handle); | |
256 rec.Write(zone_addr); | |
257 } | |
258 } | |
259 | |
260 | |
261 // Delete Zone Record - 'z' (0x7a) | |
262 // | |
263 // Format: | |
264 // 'z' | |
265 // uword - zone address (all the handles in that zone are now gone) | |
266 void HeapTrace::TraceDeleteZone(Zone* zone) { | |
267 if (isolate_initialized_) { | |
268 DeleteZoneRecord rec(this); | |
269 rec.Write(reinterpret_cast<uword>(zone)); | |
270 } | |
271 } | |
272 | |
273 | |
274 // Delete Scoped Hanldes Record - 's' (0x73) | |
275 // | |
276 // Format: | |
277 // 's' | |
278 void HeapTrace::TraceDeleteScopedHandles() { | |
279 if (isolate_initialized_) { | |
280 DeleteScopedHandlesRecord rec(this); | |
281 } | |
282 } | |
283 | |
284 | |
285 // Copy Record - 'C' (0x43) | |
286 // | |
287 // Format: | |
288 // 'C' | |
289 // uword - old address | |
290 // uword - new address | |
291 void HeapTrace::TraceCopy(uword from_addr, uword to_addr) { | |
292 if (isolate_initialized_) { | |
293 CopyRecord rec(this); | |
294 rec.Write(from_addr); | |
295 rec.Write(to_addr); | |
296 } | |
297 } | |
298 | |
299 | |
300 // Object Store Recorda - 'O'(0x4f) | |
301 // | |
302 // Format: | |
303 // 'O' | |
304 // uword - address | |
305 void HeapTrace::TraceObjectStorePointer(uword addr) { | |
306 if (isolate_initialized_) { | |
307 ObjectStoreRecord rec(this); | |
308 rec.Write(addr); | |
309 } | |
310 } | |
311 | |
312 | |
313 // Promotion Records - 'P' (0x50) | |
314 // | |
315 // Format: | |
316 // 'P' | |
317 // uword - old address | |
318 // uword - new address | |
319 void HeapTrace::TracePromotion(uword old_addr, uword promoted_addr) { | |
320 if (isolate_initialized_) { | |
321 PromotionRecord rec(this); | |
322 rec.Write(old_addr); | |
323 rec.Write(promoted_addr); | |
324 } | |
325 } | |
326 | |
327 | |
328 // Death Range Record - 'L' (0x4c) | |
329 // | |
330 // Format: | |
331 // 'L' | |
332 // uword - inclusive start address of the space being left | |
333 // uword - exclusive end address of the space being left | |
334 void HeapTrace::TraceDeathRange(uword inclusive_start, uword exclusive_end) { | |
335 if (isolate_initialized_) { | |
336 DeathRangeRecord rec(this); | |
337 rec.Write(inclusive_start); | |
338 rec.Write(exclusive_end); | |
339 } | |
340 } | |
341 | |
342 | |
343 // Register Class Record - 'K' (0x4b) | |
344 // | |
345 // Format: | |
346 // 'K' | |
347 // uword - address ( the address of the class) | |
348 void HeapTrace::TraceRegisterClass(const Class& cls) { | |
349 if (isolate_initialized_) { | |
350 RegisterClassRecord rec(this); | |
351 rec.Write(RawObject::ToAddr(cls.raw())); | |
352 } | |
353 } | |
354 | |
355 | |
356 // Scoped Handle Record - 'H' (0x48) | |
357 // | |
358 // Format: | |
359 // 'H' | |
360 // uword - adress of the scoped handle (where it is pointing) | |
361 void HeapTrace::TraceScopedHandle(uword handle) { | |
362 if (isolate_initialized_) { | |
363 AllocScopedHandleRecord rec(this); | |
364 rec.Write(handle); | |
365 } | |
366 } | |
367 | |
368 | |
369 // Root Record - 'R' (0x52) | |
370 // | |
371 // Format: | |
372 // 'R' | |
373 // uword - address | |
374 void HeapTrace::TraceSingleRoot(uword root_addr) { | |
375 if (isolate_initialized_) { | |
376 RootRecord rec(this); | |
377 rec.Write(root_addr); | |
378 } | |
379 } | |
380 | |
381 | |
382 // Sweep Record - 'S' | |
383 // | |
384 // Format: | |
385 // 'S' | |
386 // uword - address | |
387 void HeapTrace::TraceSweep(uword sweept_addr) { | |
388 if (isolate_initialized_) { | |
389 SweepRecord rec(this); | |
390 rec.Write(sweept_addr); | |
391 } | |
392 } | |
393 | |
394 | |
395 // Does not output any records directly, | |
396 // but does call TraceSingleRoot | |
397 void HeapTrace::TraceRoots(Isolate* isolate) { | |
398 if (isolate_initialized_) { | |
399 ResizeObjectSet(); | |
400 HeapTraceVisitor visitor(isolate, this, &object_set_); | |
401 HeapTraceScopedHandleVisitor handle_visitor(isolate, this); | |
402 | |
403 bool visit_prologue_weak_handles = true; | |
404 bool validate_frames = false; | |
405 | |
406 // Visit objects in per isolate stubs. | |
407 StubCode::VisitObjectPointers(&visitor); | |
408 | |
409 // stack | |
410 StackFrameIterator frames_iterator(validate_frames); | |
411 StackFrame* frame = frames_iterator.NextFrame(); | |
412 while (frame != NULL) { | |
413 frame->VisitObjectPointers(&visitor); | |
414 frame = frames_iterator.NextFrame(); | |
415 } | |
416 | |
417 if (isolate->api_state() != NULL) { | |
418 isolate->api_state()->VisitObjectPointers(&visitor, | |
419 visit_prologue_weak_handles); | |
420 } | |
421 | |
422 // Visit the top context which is stored in the isolate. | |
423 RawContext* top_context = isolate->top_context(); | |
424 visitor.VisitPointer(reinterpret_cast<RawObject**>(&top_context)); | |
425 | |
426 // Visit the currently active IC data array. | |
427 RawArray* ic_data_array = isolate->ic_data_array(); | |
428 visitor.VisitPointer(reinterpret_cast<RawObject**>(&ic_data_array)); | |
429 | |
430 // Visit objects in the debugger. | |
431 isolate->debugger()->VisitObjectPointers(&visitor); | |
432 | |
433 isolate->current_zone()->handles()-> | |
434 VisitUnvisitedScopedHandles(&handle_visitor); | |
435 | |
436 object_set_.FastClear(); | |
437 } | |
438 } | |
439 | |
440 | |
441 // Store Record - 'U' (0x55) | |
442 // | |
443 // Format: | |
444 // 'U' | |
445 // uword - originating object address (where a pointer is being stored) | |
446 // uword - byte offset into origin where the pointer is being stored | |
447 // uword - value of the pointer being stored | |
448 void HeapTrace::TraceStoreIntoObject(uword object, | |
449 uword field_addr, | |
450 uword value) { | |
451 if (isolate_initialized_) { | |
452 // We don't care about pointers into the VM_Islate heap, so skip them. | |
453 // There should not be any pointers /out/ of the VM isolate; so we | |
454 // do not check object. | |
455 if (Isolate::Current()->heap()->Contains(value)) { | |
456 StoreRecord rec(this); | |
457 uword slot_offset = field_addr - object; | |
458 | |
459 rec.Write(object); | |
460 rec.Write(slot_offset); | |
461 rec.Write(value); | |
462 } | |
463 } | |
464 } | |
465 | |
466 | |
467 // Mark Sweep Start Record - '{' (0x7b) | |
468 // | |
469 // Format: | |
470 // '{' | |
471 void HeapTrace::TraceMarkSweepStart() { | |
472 if (isolate_initialized_) { | |
473 MarkSweepStartRecord rec(this); | |
474 } | |
475 } | |
476 | |
477 | |
478 // Mark Sweep Finish Record - '}' (0x7d) | |
479 // | |
480 // Format: | |
481 // '}' | |
482 void HeapTrace::TraceMarkSweepFinish() { | |
483 if (isolate_initialized_) { | |
484 MarkSweepFinishRecord rec(this); | |
485 } | |
486 } | |
487 | |
488 } // namespace dart | |
OLD | NEW |