OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef RUNTIME_VM_PROFILER_H_ | 5 #ifndef RUNTIME_VM_PROFILER_H_ |
6 #define RUNTIME_VM_PROFILER_H_ | 6 #define RUNTIME_VM_PROFILER_H_ |
7 | 7 |
8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
9 #include "vm/bitfield.h" | 9 #include "vm/bitfield.h" |
10 #include "vm/code_observers.h" | 10 #include "vm/code_observers.h" |
11 #include "vm/globals.h" | 11 #include "vm/globals.h" |
12 #include "vm/growable_array.h" | 12 #include "vm/growable_array.h" |
13 #include "vm/malloc_hooks.h" | 13 #include "vm/malloc_hooks.h" |
14 #include "vm/native_symbol.h" | 14 #include "vm/native_symbol.h" |
15 #include "vm/object.h" | 15 #include "vm/object.h" |
16 #include "vm/tags.h" | 16 #include "vm/tags.h" |
17 #include "vm/thread_interrupter.h" | 17 #include "vm/thread_interrupter.h" |
18 | 18 |
19 // Profiler sampling and stack walking support. | 19 // Profiler sampling and stack walking support. |
20 // NOTE: For service related code, see profile_service.h. | 20 // NOTE: For service related code, see profile_service.h. |
21 | 21 |
22 namespace dart { | 22 namespace dart { |
23 | 23 |
24 // Forward declarations. | 24 // Forward declarations. |
25 class ProcessedSample; | 25 class ProcessedSample; |
26 class ProcessedSampleBuffer; | 26 class ProcessedSampleBuffer; |
27 | 27 |
28 class Sample; | 28 class Sample; |
29 class AllocationSampleBuffer; | |
29 class SampleBuffer; | 30 class SampleBuffer; |
30 class ProfileTrieNode; | 31 class ProfileTrieNode; |
31 | 32 |
32 struct ProfilerCounters { | 33 struct ProfilerCounters { |
33 // Count of bail out reasons: | 34 // Count of bail out reasons: |
34 int64_t bail_out_unknown_task; | 35 int64_t bail_out_unknown_task; |
35 int64_t bail_out_jump_to_exception_handler; | 36 int64_t bail_out_jump_to_exception_handler; |
36 int64_t bail_out_check_isolate; | 37 int64_t bail_out_check_isolate; |
37 // Count of single frame sampling reasons: | 38 // Count of single frame sampling reasons: |
38 int64_t single_frame_sample_deoptimizing; | 39 int64_t single_frame_sample_deoptimizing; |
39 int64_t single_frame_sample_register_check; | 40 int64_t single_frame_sample_register_check; |
40 int64_t single_frame_sample_get_and_validate_stack_bounds; | 41 int64_t single_frame_sample_get_and_validate_stack_bounds; |
41 // Count of stack walkers used: | 42 // Count of stack walkers used: |
42 int64_t stack_walker_native; | 43 int64_t stack_walker_native; |
43 int64_t stack_walker_dart_exit; | 44 int64_t stack_walker_dart_exit; |
44 int64_t stack_walker_dart; | 45 int64_t stack_walker_dart; |
45 int64_t stack_walker_none; | 46 int64_t stack_walker_none; |
46 // Count of failed checks: | 47 // Count of failed checks: |
47 int64_t failure_native_allocation_sample; | 48 int64_t failure_native_allocation_sample; |
48 }; | 49 }; |
49 | 50 |
50 | 51 |
51 class Profiler : public AllStatic { | 52 class Profiler : public AllStatic { |
52 public: | 53 public: |
53 static void InitOnce(); | 54 static void InitOnce(); |
55 static void InitAllocationSampleBuffer(); | |
54 static void Shutdown(); | 56 static void Shutdown(); |
55 | 57 |
56 static void SetSampleDepth(intptr_t depth); | 58 static void SetSampleDepth(intptr_t depth); |
57 static void SetSamplePeriod(intptr_t period); | 59 static void SetSamplePeriod(intptr_t period); |
58 | 60 |
59 static SampleBuffer* sample_buffer() { return sample_buffer_; } | 61 static SampleBuffer* sample_buffer() { return sample_buffer_; } |
62 static AllocationSampleBuffer* allocation_sample_buffer() { | |
63 return allocation_sample_buffer_; | |
64 } | |
60 | 65 |
61 static void DumpStackTrace(void* context); | 66 static void DumpStackTrace(void* context); |
62 static void DumpStackTrace(bool for_crash = true); | 67 static void DumpStackTrace(bool for_crash = true); |
63 | 68 |
64 static void SampleAllocation(Thread* thread, intptr_t cid); | 69 static void SampleAllocation(Thread* thread, intptr_t cid); |
65 static Sample* SampleNativeAllocation(intptr_t skip_count, | 70 static Sample* SampleNativeAllocation(intptr_t skip_count, |
66 uword address, | 71 uword address, |
67 uintptr_t allocation_size); | 72 uintptr_t allocation_size); |
68 | 73 |
69 // SampleThread is called from inside the signal handler and hence it is very | 74 // SampleThread is called from inside the signal handler and hence it is very |
(...skipping 12 matching lines...) Expand all Loading... | |
82 } | 87 } |
83 | 88 |
84 private: | 89 private: |
85 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash); | 90 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash); |
86 | 91 |
87 // Does not walk the thread's stack. | 92 // Does not walk the thread's stack. |
88 static void SampleThreadSingleFrame(Thread* thread, uintptr_t pc); | 93 static void SampleThreadSingleFrame(Thread* thread, uintptr_t pc); |
89 static bool initialized_; | 94 static bool initialized_; |
90 | 95 |
91 static SampleBuffer* sample_buffer_; | 96 static SampleBuffer* sample_buffer_; |
97 static AllocationSampleBuffer* allocation_sample_buffer_; | |
92 | 98 |
93 static ProfilerCounters counters_; | 99 static ProfilerCounters counters_; |
94 | 100 |
95 friend class Thread; | 101 friend class Thread; |
96 }; | 102 }; |
97 | 103 |
98 | 104 |
99 class SampleVisitor : public ValueObject { | 105 class SampleVisitor : public ValueObject { |
100 public: | 106 public: |
101 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {} | 107 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {} |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
185 stack_buffer_[i] = 0; | 191 stack_buffer_[i] = 0; |
186 } | 192 } |
187 vm_tag_ = VMTag::kInvalidTagId; | 193 vm_tag_ = VMTag::kInvalidTagId; |
188 user_tag_ = UserTags::kDefaultUserTag; | 194 user_tag_ = UserTags::kDefaultUserTag; |
189 lr_ = 0; | 195 lr_ = 0; |
190 metadata_ = 0; | 196 metadata_ = 0; |
191 state_ = 0; | 197 state_ = 0; |
192 native_allocation_address_ = 0; | 198 native_allocation_address_ = 0; |
193 native_allocation_size_bytes_ = 0; | 199 native_allocation_size_bytes_ = 0; |
194 continuation_index_ = -1; | 200 continuation_index_ = -1; |
201 next_free_ = NULL; | |
195 uword* pcs = GetPCArray(); | 202 uword* pcs = GetPCArray(); |
196 for (intptr_t i = 0; i < pcs_length_; i++) { | 203 for (intptr_t i = 0; i < pcs_length_; i++) { |
197 pcs[i] = 0; | 204 pcs[i] = 0; |
198 } | 205 } |
199 set_head_sample(true); | 206 set_head_sample(true); |
200 } | 207 } |
201 | 208 |
202 // Timestamp sample was taken at. | 209 // Timestamp sample was taken at. |
203 int64_t timestamp() const { return timestamp_; } | 210 int64_t timestamp() const { return timestamp_; } |
204 | 211 |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
286 } | 293 } |
287 | 294 |
288 bool is_allocation_sample() const { | 295 bool is_allocation_sample() const { |
289 return ClassAllocationSampleBit::decode(state_); | 296 return ClassAllocationSampleBit::decode(state_); |
290 } | 297 } |
291 | 298 |
292 void set_is_allocation_sample(bool allocation_sample) { | 299 void set_is_allocation_sample(bool allocation_sample) { |
293 state_ = ClassAllocationSampleBit::update(allocation_sample, state_); | 300 state_ = ClassAllocationSampleBit::update(allocation_sample, state_); |
294 } | 301 } |
295 | 302 |
296 bool is_native_allocation_sample() const { | 303 uword native_allocation_address() const { return native_allocation_address_; } |
297 return NativeAllocationSampleBit::decode(state_); | |
298 } | |
299 | |
300 void set_is_native_allocation_sample(bool native_allocation_sample) { | |
301 state_ = | |
302 NativeAllocationSampleBit::update(native_allocation_sample, state_); | |
303 } | |
304 | 304 |
305 void set_native_allocation_address(uword address) { | 305 void set_native_allocation_address(uword address) { |
306 native_allocation_address_ = address; | 306 native_allocation_address_ = address; |
307 } | 307 } |
308 | 308 |
309 uword native_allocation_address() const { return native_allocation_address_; } | |
310 | |
311 uintptr_t native_allocation_size_bytes() const { | 309 uintptr_t native_allocation_size_bytes() const { |
312 return native_allocation_size_bytes_; | 310 return native_allocation_size_bytes_; |
313 } | 311 } |
314 | 312 |
315 void set_native_allocation_size_bytes(uintptr_t size) { | 313 void set_native_allocation_size_bytes(uintptr_t size) { |
316 native_allocation_size_bytes_ = size; | 314 native_allocation_size_bytes_ = size; |
317 } | 315 } |
318 | 316 |
317 Sample* next_free() const { return next_free_; } | |
318 void set_next_free(Sample* next_free) { next_free_ = next_free; } | |
319 | |
319 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); } | 320 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); } |
320 | 321 |
321 void set_thread_task(Thread::TaskKind task) { | 322 void set_thread_task(Thread::TaskKind task) { |
322 state_ = ThreadTaskBit::update(task, state_); | 323 state_ = ThreadTaskBit::update(task, state_); |
323 } | 324 } |
324 | 325 |
325 bool is_continuation_sample() const { | 326 bool is_continuation_sample() const { |
326 return ContinuationSampleBit::decode(state_); | 327 return ContinuationSampleBit::decode(state_); |
327 } | 328 } |
328 | 329 |
(...skipping 21 matching lines...) Expand all Loading... | |
350 | 351 |
351 bool head_sample() const { return HeadSampleBit::decode(state_); } | 352 bool head_sample() const { return HeadSampleBit::decode(state_); } |
352 | 353 |
353 void set_metadata(intptr_t metadata) { metadata_ = metadata; } | 354 void set_metadata(intptr_t metadata) { metadata_ = metadata; } |
354 | 355 |
355 void SetAllocationCid(intptr_t cid) { | 356 void SetAllocationCid(intptr_t cid) { |
356 set_is_allocation_sample(true); | 357 set_is_allocation_sample(true); |
357 set_metadata(cid); | 358 set_metadata(cid); |
358 } | 359 } |
359 | 360 |
361 | |
zra
2017/06/30 18:49:17
rm extra newline
bkonyi
2017/07/05 18:20:52
Done.
| |
360 static void InitOnce(); | 362 static void InitOnce(); |
361 | 363 |
362 static intptr_t instance_size() { return instance_size_; } | 364 static intptr_t instance_size() { return instance_size_; } |
363 | 365 |
364 uword* GetPCArray() const; | 366 uword* GetPCArray() const; |
365 | 367 |
366 static const int kStackBufferSizeInWords = 2; | 368 static const int kStackBufferSizeInWords = 2; |
367 uword* GetStackBuffer() { return &stack_buffer_[0]; } | 369 uword* GetStackBuffer() { return &stack_buffer_[0]; } |
368 | 370 |
369 private: | 371 protected: |
zra
2017/06/30 18:49:17
I'm having trouble finding why this needs to be pr
bkonyi
2017/07/05 18:20:52
This is just something I forgot to change back sin
| |
370 static intptr_t instance_size_; | 372 static intptr_t instance_size_; |
371 static intptr_t pcs_length_; | 373 static intptr_t pcs_length_; |
372 enum StateBits { | 374 enum StateBits { |
373 kHeadSampleBit = 0, | 375 kHeadSampleBit = 0, |
374 kLeafFrameIsDartBit = 1, | 376 kLeafFrameIsDartBit = 1, |
375 kIgnoreBit = 2, | 377 kIgnoreBit = 2, |
376 kExitFrameBit = 3, | 378 kExitFrameBit = 3, |
377 kMissingFrameInsertedBit = 4, | 379 kMissingFrameInsertedBit = 4, |
378 kTruncatedTraceBit = 5, | 380 kTruncatedTraceBit = 5, |
379 kClassAllocationSampleBit = 6, | 381 kClassAllocationSampleBit = 6, |
380 kContinuationSampleBit = 7, | 382 kContinuationSampleBit = 7, |
381 kThreadTaskBit = 8, // 5 bits. | 383 kThreadTaskBit = 8, // 5 bits. |
382 kNativeAllocationSampleBit = 13, | 384 kNextFreeBit = 13, |
383 kNextFreeBit = 14, | |
384 }; | 385 }; |
385 class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {}; | 386 class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {}; |
386 class LeafFrameIsDart : public BitField<uword, bool, kLeafFrameIsDartBit, 1> { | 387 class LeafFrameIsDart : public BitField<uword, bool, kLeafFrameIsDartBit, 1> { |
387 }; | 388 }; |
388 class IgnoreBit : public BitField<uword, bool, kIgnoreBit, 1> {}; | 389 class IgnoreBit : public BitField<uword, bool, kIgnoreBit, 1> {}; |
389 class ExitFrameBit : public BitField<uword, bool, kExitFrameBit, 1> {}; | 390 class ExitFrameBit : public BitField<uword, bool, kExitFrameBit, 1> {}; |
390 class MissingFrameInsertedBit | 391 class MissingFrameInsertedBit |
391 : public BitField<uword, bool, kMissingFrameInsertedBit, 1> {}; | 392 : public BitField<uword, bool, kMissingFrameInsertedBit, 1> {}; |
392 class TruncatedTraceBit | 393 class TruncatedTraceBit |
393 : public BitField<uword, bool, kTruncatedTraceBit, 1> {}; | 394 : public BitField<uword, bool, kTruncatedTraceBit, 1> {}; |
394 class ClassAllocationSampleBit | 395 class ClassAllocationSampleBit |
395 : public BitField<uword, bool, kClassAllocationSampleBit, 1> {}; | 396 : public BitField<uword, bool, kClassAllocationSampleBit, 1> {}; |
396 class ContinuationSampleBit | 397 class ContinuationSampleBit |
397 : public BitField<uword, bool, kContinuationSampleBit, 1> {}; | 398 : public BitField<uword, bool, kContinuationSampleBit, 1> {}; |
398 class ThreadTaskBit | 399 class ThreadTaskBit |
399 : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 5> {}; | 400 : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 5> {}; |
400 class NativeAllocationSampleBit | |
401 : public BitField<uword, bool, kNativeAllocationSampleBit, 1> {}; | |
402 | 401 |
403 int64_t timestamp_; | 402 int64_t timestamp_; |
404 ThreadId tid_; | 403 ThreadId tid_; |
405 Dart_Port port_; | 404 Dart_Port port_; |
406 uword pc_marker_; | 405 uword pc_marker_; |
407 uword stack_buffer_[kStackBufferSizeInWords]; | 406 uword stack_buffer_[kStackBufferSizeInWords]; |
408 uword vm_tag_; | 407 uword vm_tag_; |
409 uword user_tag_; | 408 uword user_tag_; |
410 uword metadata_; | 409 uword metadata_; |
411 uword lr_; | 410 uword lr_; |
412 uword state_; | 411 uword state_; |
413 uword native_allocation_address_; | 412 uword native_allocation_address_; |
414 uintptr_t native_allocation_size_bytes_; | 413 uintptr_t native_allocation_size_bytes_; |
415 intptr_t continuation_index_; | 414 intptr_t continuation_index_; |
415 Sample* next_free_; | |
416 | 416 |
417 /* There are a variable number of words that follow, the words hold the | 417 /* There are a variable number of words that follow, the words hold the |
418 * sampled pc values. Access via GetPCArray() */ | 418 * sampled pc values. Access via GetPCArray() */ |
419 | 419 private: |
420 DISALLOW_COPY_AND_ASSIGN(Sample); | 420 DISALLOW_COPY_AND_ASSIGN(Sample); |
421 }; | 421 }; |
422 | 422 |
423 | 423 |
424 class NativeAllocationSampleFilter : public SampleFilter { | 424 class NativeAllocationSampleFilter : public SampleFilter { |
425 public: | 425 public: |
426 NativeAllocationSampleFilter(int64_t time_origin_micros, | 426 NativeAllocationSampleFilter(int64_t time_origin_micros, |
427 int64_t time_extent_micros) | 427 int64_t time_extent_micros) |
428 : SampleFilter(ILLEGAL_PORT, | 428 : SampleFilter(ILLEGAL_PORT, |
429 SampleFilter::kNoTaskFilter, | 429 SampleFilter::kNoTaskFilter, |
430 time_origin_micros, | 430 time_origin_micros, |
431 time_extent_micros) {} | 431 time_extent_micros) {} |
432 | 432 |
433 bool FilterSample(Sample* sample) { | 433 bool FilterSample(Sample* sample) { |
434 if (!sample->is_native_allocation_sample()) { | |
435 return false; | |
436 } | |
437 // If the sample is an allocation sample, we need to check that the | 434 // If the sample is an allocation sample, we need to check that the |
zra
2017/06/30 18:49:17
Maybe assert that the sample is an allocation samp
bkonyi
2017/07/05 18:20:52
I'll assert that the sample has a valid allocation
| |
438 // memory at the address hasn't been freed, and if the address associated | 435 // memory at the address hasn't been freed, and if the address associated |
439 // with the allocation has been freed and then reissued. | 436 // with the allocation has been freed and then reissued. |
440 void* alloc_address = | 437 void* alloc_address = |
441 reinterpret_cast<void*>(sample->native_allocation_address()); | 438 reinterpret_cast<void*>(sample->native_allocation_address()); |
442 Sample* recorded_sample = MallocHooks::GetSample(alloc_address); | 439 Sample* recorded_sample = MallocHooks::GetSample(alloc_address); |
443 return (sample == recorded_sample); | 440 return (sample == recorded_sample); |
444 } | 441 } |
445 }; | 442 }; |
446 | 443 |
447 | 444 |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
514 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable); | 511 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable); |
515 }; | 512 }; |
516 | 513 |
517 | 514 |
518 // Ring buffer of Samples that is (usually) shared by many isolates. | 515 // Ring buffer of Samples that is (usually) shared by many isolates. |
519 class SampleBuffer { | 516 class SampleBuffer { |
520 public: | 517 public: |
521 static const intptr_t kDefaultBufferCapacity = 120000; // 2 minutes @ 1000hz. | 518 static const intptr_t kDefaultBufferCapacity = 120000; // 2 minutes @ 1000hz. |
522 | 519 |
523 explicit SampleBuffer(intptr_t capacity = kDefaultBufferCapacity); | 520 explicit SampleBuffer(intptr_t capacity = kDefaultBufferCapacity); |
524 ~SampleBuffer(); | 521 virtual ~SampleBuffer(); |
525 | 522 |
526 intptr_t capacity() const { return capacity_; } | 523 intptr_t capacity() const { return capacity_; } |
527 | 524 |
528 Sample* At(intptr_t idx) const; | 525 Sample* At(intptr_t idx) const; |
529 intptr_t ReserveSampleSlot(); | 526 virtual intptr_t ReserveSampleSlot(); |
530 Sample* ReserveSample(); | 527 virtual Sample* ReserveSample(); |
531 Sample* ReserveSampleAndLink(Sample* previous); | 528 virtual Sample* ReserveSampleAndLink(Sample* previous); |
532 | 529 |
533 void VisitSamples(SampleVisitor* visitor) { | 530 void VisitSamples(SampleVisitor* visitor) { |
534 ASSERT(visitor != NULL); | 531 ASSERT(visitor != NULL); |
535 const intptr_t length = capacity(); | 532 const intptr_t length = capacity(); |
536 for (intptr_t i = 0; i < length; i++) { | 533 for (intptr_t i = 0; i < length; i++) { |
537 Sample* sample = At(i); | 534 Sample* sample = At(i); |
538 if (!sample->head_sample()) { | 535 if (!sample->head_sample()) { |
539 // An inner sample in a chain of samples. | 536 // An inner sample in a chain of samples. |
540 continue; | 537 continue; |
541 } | 538 } |
(...skipping 13 matching lines...) Expand all Loading... | |
555 // No frames. | 552 // No frames. |
556 continue; | 553 continue; |
557 } | 554 } |
558 visitor->IncrementVisited(); | 555 visitor->IncrementVisited(); |
559 visitor->VisitSample(sample); | 556 visitor->VisitSample(sample); |
560 } | 557 } |
561 } | 558 } |
562 | 559 |
563 ProcessedSampleBuffer* BuildProcessedSampleBuffer(SampleFilter* filter); | 560 ProcessedSampleBuffer* BuildProcessedSampleBuffer(SampleFilter* filter); |
564 | 561 |
565 private: | 562 protected: |
566 ProcessedSample* BuildProcessedSample(Sample* sample, | 563 ProcessedSample* BuildProcessedSample(Sample* sample, |
567 const CodeLookupTable& clt); | 564 const CodeLookupTable& clt); |
568 Sample* Next(Sample* sample); | 565 Sample* Next(Sample* sample); |
569 | 566 |
570 VirtualMemory* memory_; | 567 VirtualMemory* memory_; |
571 Sample* samples_; | 568 Sample* samples_; |
572 intptr_t capacity_; | 569 intptr_t capacity_; |
573 uintptr_t cursor_; | 570 uintptr_t cursor_; |
574 | 571 |
572 private: | |
575 DISALLOW_COPY_AND_ASSIGN(SampleBuffer); | 573 DISALLOW_COPY_AND_ASSIGN(SampleBuffer); |
576 }; | 574 }; |
577 | 575 |
578 | 576 |
577 class AllocationSampleBuffer : public SampleBuffer { | |
578 public: | |
579 explicit AllocationSampleBuffer(intptr_t capacity = kDefaultBufferCapacity); | |
580 virtual ~AllocationSampleBuffer(); | |
581 | |
582 virtual intptr_t ReserveSampleSlot(); | |
583 virtual Sample* ReserveSample(); | |
584 virtual Sample* ReserveSampleAndLink(Sample* previous); | |
585 void FreeAllocationSample(Sample* sample); | |
586 | |
587 private: | |
588 Mutex* mutex_; | |
589 Sample* free_sample_list_; | |
590 | |
591 DISALLOW_COPY_AND_ASSIGN(AllocationSampleBuffer); | |
592 }; | |
593 | |
594 | |
579 // A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have | 595 // A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have |
580 // been merged into a logical sample. The raw data may have been processed to | 596 // been merged into a logical sample. The raw data may have been processed to |
581 // improve the quality of the stack trace. | 597 // improve the quality of the stack trace. |
582 class ProcessedSample : public ZoneAllocated { | 598 class ProcessedSample : public ZoneAllocated { |
583 public: | 599 public: |
584 ProcessedSample(); | 600 ProcessedSample(); |
585 | 601 |
586 // Add |pc| to stack trace. | 602 // Add |pc| to stack trace. |
587 void Add(uword pc) { pcs_.Add(pc); } | 603 void Add(uword pc) { pcs_.Add(pc); } |
588 | 604 |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
692 private: | 708 private: |
693 ZoneGrowableArray<ProcessedSample*> samples_; | 709 ZoneGrowableArray<ProcessedSample*> samples_; |
694 CodeLookupTable* code_lookup_table_; | 710 CodeLookupTable* code_lookup_table_; |
695 | 711 |
696 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer); | 712 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer); |
697 }; | 713 }; |
698 | 714 |
699 } // namespace dart | 715 } // namespace dart |
700 | 716 |
701 #endif // RUNTIME_VM_PROFILER_H_ | 717 #endif // RUNTIME_VM_PROFILER_H_ |
OLD | NEW |