| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef RUNTIME_VM_PROFILER_H_ | 5 #ifndef RUNTIME_VM_PROFILER_H_ |
| 6 #define RUNTIME_VM_PROFILER_H_ | 6 #define RUNTIME_VM_PROFILER_H_ |
| 7 | 7 |
| 8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
| 9 #include "vm/bitfield.h" | 9 #include "vm/bitfield.h" |
| 10 #include "vm/code_observers.h" | 10 #include "vm/code_observers.h" |
| 11 #include "vm/globals.h" | 11 #include "vm/globals.h" |
| 12 #include "vm/growable_array.h" | 12 #include "vm/growable_array.h" |
| 13 #include "vm/malloc_hooks.h" | 13 #include "vm/malloc_hooks.h" |
| 14 #include "vm/native_symbol.h" | 14 #include "vm/native_symbol.h" |
| 15 #include "vm/object.h" | 15 #include "vm/object.h" |
| 16 #include "vm/tags.h" | 16 #include "vm/tags.h" |
| 17 #include "vm/thread_interrupter.h" | 17 #include "vm/thread_interrupter.h" |
| 18 | 18 |
| 19 // Profiler sampling and stack walking support. | 19 // Profiler sampling and stack walking support. |
| 20 // NOTE: For service related code, see profile_service.h. | 20 // NOTE: For service related code, see profile_service.h. |
| 21 | 21 |
| 22 namespace dart { | 22 namespace dart { |
| 23 | 23 |
| 24 // Forward declarations. | 24 // Forward declarations. |
| 25 class ProcessedSample; | 25 class ProcessedSample; |
| 26 class ProcessedSampleBuffer; | 26 class ProcessedSampleBuffer; |
| 27 | 27 |
| 28 class Sample; | 28 class Sample; |
| 29 class AllocationSampleBuffer; |
| 29 class SampleBuffer; | 30 class SampleBuffer; |
| 30 class ProfileTrieNode; | 31 class ProfileTrieNode; |
| 31 | 32 |
| 32 struct ProfilerCounters { | 33 struct ProfilerCounters { |
| 33 // Count of bail out reasons: | 34 // Count of bail out reasons: |
| 34 int64_t bail_out_unknown_task; | 35 int64_t bail_out_unknown_task; |
| 35 int64_t bail_out_jump_to_exception_handler; | 36 int64_t bail_out_jump_to_exception_handler; |
| 36 int64_t bail_out_check_isolate; | 37 int64_t bail_out_check_isolate; |
| 37 // Count of single frame sampling reasons: | 38 // Count of single frame sampling reasons: |
| 38 int64_t single_frame_sample_deoptimizing; | 39 int64_t single_frame_sample_deoptimizing; |
| 39 int64_t single_frame_sample_register_check; | 40 int64_t single_frame_sample_register_check; |
| 40 int64_t single_frame_sample_get_and_validate_stack_bounds; | 41 int64_t single_frame_sample_get_and_validate_stack_bounds; |
| 41 // Count of stack walkers used: | 42 // Count of stack walkers used: |
| 42 int64_t stack_walker_native; | 43 int64_t stack_walker_native; |
| 43 int64_t stack_walker_dart_exit; | 44 int64_t stack_walker_dart_exit; |
| 44 int64_t stack_walker_dart; | 45 int64_t stack_walker_dart; |
| 45 int64_t stack_walker_none; | 46 int64_t stack_walker_none; |
| 46 // Count of failed checks: | 47 // Count of failed checks: |
| 47 int64_t failure_native_allocation_sample; | 48 int64_t failure_native_allocation_sample; |
| 48 }; | 49 }; |
| 49 | 50 |
| 50 | 51 |
| 51 class Profiler : public AllStatic { | 52 class Profiler : public AllStatic { |
| 52 public: | 53 public: |
| 53 static void InitOnce(); | 54 static void InitOnce(); |
| 55 static void InitAllocationSampleBuffer(); |
| 54 static void Shutdown(); | 56 static void Shutdown(); |
| 55 | 57 |
| 56 static void SetSampleDepth(intptr_t depth); | 58 static void SetSampleDepth(intptr_t depth); |
| 57 static void SetSamplePeriod(intptr_t period); | 59 static void SetSamplePeriod(intptr_t period); |
| 58 | 60 |
| 59 static SampleBuffer* sample_buffer() { return sample_buffer_; } | 61 static SampleBuffer* sample_buffer() { return sample_buffer_; } |
| 62 static AllocationSampleBuffer* allocation_sample_buffer() { |
| 63 return allocation_sample_buffer_; |
| 64 } |
| 60 | 65 |
| 61 static void DumpStackTrace(void* context); | 66 static void DumpStackTrace(void* context); |
| 62 static void DumpStackTrace(bool for_crash = true); | 67 static void DumpStackTrace(bool for_crash = true); |
| 63 | 68 |
| 64 static void SampleAllocation(Thread* thread, intptr_t cid); | 69 static void SampleAllocation(Thread* thread, intptr_t cid); |
| 65 static Sample* SampleNativeAllocation(intptr_t skip_count, | 70 static Sample* SampleNativeAllocation(intptr_t skip_count, |
| 66 uword address, | 71 uword address, |
| 67 uintptr_t allocation_size); | 72 uintptr_t allocation_size); |
| 68 | 73 |
| 69 // SampleThread is called from inside the signal handler and hence it is very | 74 // SampleThread is called from inside the signal handler and hence it is very |
| (...skipping 12 matching lines...) Expand all Loading... |
| 82 } | 87 } |
| 83 | 88 |
| 84 private: | 89 private: |
| 85 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash); | 90 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash); |
| 86 | 91 |
| 87 // Does not walk the thread's stack. | 92 // Does not walk the thread's stack. |
| 88 static void SampleThreadSingleFrame(Thread* thread, uintptr_t pc); | 93 static void SampleThreadSingleFrame(Thread* thread, uintptr_t pc); |
| 89 static bool initialized_; | 94 static bool initialized_; |
| 90 | 95 |
| 91 static SampleBuffer* sample_buffer_; | 96 static SampleBuffer* sample_buffer_; |
| 97 static AllocationSampleBuffer* allocation_sample_buffer_; |
| 92 | 98 |
| 93 static ProfilerCounters counters_; | 99 static ProfilerCounters counters_; |
| 94 | 100 |
| 95 friend class Thread; | 101 friend class Thread; |
| 96 }; | 102 }; |
| 97 | 103 |
| 98 | 104 |
| 99 class SampleVisitor : public ValueObject { | 105 class SampleVisitor : public ValueObject { |
| 100 public: | 106 public: |
| 101 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {} | 107 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {} |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 185 stack_buffer_[i] = 0; | 191 stack_buffer_[i] = 0; |
| 186 } | 192 } |
| 187 vm_tag_ = VMTag::kInvalidTagId; | 193 vm_tag_ = VMTag::kInvalidTagId; |
| 188 user_tag_ = UserTags::kDefaultUserTag; | 194 user_tag_ = UserTags::kDefaultUserTag; |
| 189 lr_ = 0; | 195 lr_ = 0; |
| 190 metadata_ = 0; | 196 metadata_ = 0; |
| 191 state_ = 0; | 197 state_ = 0; |
| 192 native_allocation_address_ = 0; | 198 native_allocation_address_ = 0; |
| 193 native_allocation_size_bytes_ = 0; | 199 native_allocation_size_bytes_ = 0; |
| 194 continuation_index_ = -1; | 200 continuation_index_ = -1; |
| 201 next_free_ = NULL; |
| 195 uword* pcs = GetPCArray(); | 202 uword* pcs = GetPCArray(); |
| 196 for (intptr_t i = 0; i < pcs_length_; i++) { | 203 for (intptr_t i = 0; i < pcs_length_; i++) { |
| 197 pcs[i] = 0; | 204 pcs[i] = 0; |
| 198 } | 205 } |
| 199 set_head_sample(true); | 206 set_head_sample(true); |
| 200 } | 207 } |
| 201 | 208 |
| 202 // Timestamp sample was taken at. | 209 // Timestamp sample was taken at. |
| 203 int64_t timestamp() const { return timestamp_; } | 210 int64_t timestamp() const { return timestamp_; } |
| 204 | 211 |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 286 } | 293 } |
| 287 | 294 |
| 288 bool is_allocation_sample() const { | 295 bool is_allocation_sample() const { |
| 289 return ClassAllocationSampleBit::decode(state_); | 296 return ClassAllocationSampleBit::decode(state_); |
| 290 } | 297 } |
| 291 | 298 |
| 292 void set_is_allocation_sample(bool allocation_sample) { | 299 void set_is_allocation_sample(bool allocation_sample) { |
| 293 state_ = ClassAllocationSampleBit::update(allocation_sample, state_); | 300 state_ = ClassAllocationSampleBit::update(allocation_sample, state_); |
| 294 } | 301 } |
| 295 | 302 |
| 296 bool is_native_allocation_sample() const { | 303 uword native_allocation_address() const { return native_allocation_address_; } |
| 297 return NativeAllocationSampleBit::decode(state_); | |
| 298 } | |
| 299 | |
| 300 void set_is_native_allocation_sample(bool native_allocation_sample) { | |
| 301 state_ = | |
| 302 NativeAllocationSampleBit::update(native_allocation_sample, state_); | |
| 303 } | |
| 304 | 304 |
| 305 void set_native_allocation_address(uword address) { | 305 void set_native_allocation_address(uword address) { |
| 306 native_allocation_address_ = address; | 306 native_allocation_address_ = address; |
| 307 } | 307 } |
| 308 | 308 |
| 309 uword native_allocation_address() const { return native_allocation_address_; } | |
| 310 | |
| 311 uintptr_t native_allocation_size_bytes() const { | 309 uintptr_t native_allocation_size_bytes() const { |
| 312 return native_allocation_size_bytes_; | 310 return native_allocation_size_bytes_; |
| 313 } | 311 } |
| 314 | 312 |
| 315 void set_native_allocation_size_bytes(uintptr_t size) { | 313 void set_native_allocation_size_bytes(uintptr_t size) { |
| 316 native_allocation_size_bytes_ = size; | 314 native_allocation_size_bytes_ = size; |
| 317 } | 315 } |
| 318 | 316 |
| 317 Sample* next_free() const { return next_free_; } |
| 318 void set_next_free(Sample* next_free) { next_free_ = next_free; } |
| 319 |
| 319 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); } | 320 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); } |
| 320 | 321 |
| 321 void set_thread_task(Thread::TaskKind task) { | 322 void set_thread_task(Thread::TaskKind task) { |
| 322 state_ = ThreadTaskBit::update(task, state_); | 323 state_ = ThreadTaskBit::update(task, state_); |
| 323 } | 324 } |
| 324 | 325 |
| 325 bool is_continuation_sample() const { | 326 bool is_continuation_sample() const { |
| 326 return ContinuationSampleBit::decode(state_); | 327 return ContinuationSampleBit::decode(state_); |
| 327 } | 328 } |
| 328 | 329 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 372 enum StateBits { | 373 enum StateBits { |
| 373 kHeadSampleBit = 0, | 374 kHeadSampleBit = 0, |
| 374 kLeafFrameIsDartBit = 1, | 375 kLeafFrameIsDartBit = 1, |
| 375 kIgnoreBit = 2, | 376 kIgnoreBit = 2, |
| 376 kExitFrameBit = 3, | 377 kExitFrameBit = 3, |
| 377 kMissingFrameInsertedBit = 4, | 378 kMissingFrameInsertedBit = 4, |
| 378 kTruncatedTraceBit = 5, | 379 kTruncatedTraceBit = 5, |
| 379 kClassAllocationSampleBit = 6, | 380 kClassAllocationSampleBit = 6, |
| 380 kContinuationSampleBit = 7, | 381 kContinuationSampleBit = 7, |
| 381 kThreadTaskBit = 8, // 5 bits. | 382 kThreadTaskBit = 8, // 5 bits. |
| 382 kNativeAllocationSampleBit = 13, | 383 kNextFreeBit = 13, |
| 383 kNextFreeBit = 14, | |
| 384 }; | 384 }; |
| 385 class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {}; | 385 class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {}; |
| 386 class LeafFrameIsDart : public BitField<uword, bool, kLeafFrameIsDartBit, 1> { | 386 class LeafFrameIsDart : public BitField<uword, bool, kLeafFrameIsDartBit, 1> { |
| 387 }; | 387 }; |
| 388 class IgnoreBit : public BitField<uword, bool, kIgnoreBit, 1> {}; | 388 class IgnoreBit : public BitField<uword, bool, kIgnoreBit, 1> {}; |
| 389 class ExitFrameBit : public BitField<uword, bool, kExitFrameBit, 1> {}; | 389 class ExitFrameBit : public BitField<uword, bool, kExitFrameBit, 1> {}; |
| 390 class MissingFrameInsertedBit | 390 class MissingFrameInsertedBit |
| 391 : public BitField<uword, bool, kMissingFrameInsertedBit, 1> {}; | 391 : public BitField<uword, bool, kMissingFrameInsertedBit, 1> {}; |
| 392 class TruncatedTraceBit | 392 class TruncatedTraceBit |
| 393 : public BitField<uword, bool, kTruncatedTraceBit, 1> {}; | 393 : public BitField<uword, bool, kTruncatedTraceBit, 1> {}; |
| 394 class ClassAllocationSampleBit | 394 class ClassAllocationSampleBit |
| 395 : public BitField<uword, bool, kClassAllocationSampleBit, 1> {}; | 395 : public BitField<uword, bool, kClassAllocationSampleBit, 1> {}; |
| 396 class ContinuationSampleBit | 396 class ContinuationSampleBit |
| 397 : public BitField<uword, bool, kContinuationSampleBit, 1> {}; | 397 : public BitField<uword, bool, kContinuationSampleBit, 1> {}; |
| 398 class ThreadTaskBit | 398 class ThreadTaskBit |
| 399 : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 5> {}; | 399 : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 5> {}; |
| 400 class NativeAllocationSampleBit | |
| 401 : public BitField<uword, bool, kNativeAllocationSampleBit, 1> {}; | |
| 402 | 400 |
| 403 int64_t timestamp_; | 401 int64_t timestamp_; |
| 404 ThreadId tid_; | 402 ThreadId tid_; |
| 405 Dart_Port port_; | 403 Dart_Port port_; |
| 406 uword pc_marker_; | 404 uword pc_marker_; |
| 407 uword stack_buffer_[kStackBufferSizeInWords]; | 405 uword stack_buffer_[kStackBufferSizeInWords]; |
| 408 uword vm_tag_; | 406 uword vm_tag_; |
| 409 uword user_tag_; | 407 uword user_tag_; |
| 410 uword metadata_; | 408 uword metadata_; |
| 411 uword lr_; | 409 uword lr_; |
| 412 uword state_; | 410 uword state_; |
| 413 uword native_allocation_address_; | 411 uword native_allocation_address_; |
| 414 uintptr_t native_allocation_size_bytes_; | 412 uintptr_t native_allocation_size_bytes_; |
| 415 intptr_t continuation_index_; | 413 intptr_t continuation_index_; |
| 414 Sample* next_free_; |
| 416 | 415 |
| 417 /* There are a variable number of words that follow, the words hold the | 416 /* There are a variable number of words that follow, the words hold the |
| 418 * sampled pc values. Access via GetPCArray() */ | 417 * sampled pc values. Access via GetPCArray() */ |
| 419 | |
| 420 DISALLOW_COPY_AND_ASSIGN(Sample); | 418 DISALLOW_COPY_AND_ASSIGN(Sample); |
| 421 }; | 419 }; |
| 422 | 420 |
| 423 | 421 |
| 424 class NativeAllocationSampleFilter : public SampleFilter { | 422 class NativeAllocationSampleFilter : public SampleFilter { |
| 425 public: | 423 public: |
| 426 NativeAllocationSampleFilter(int64_t time_origin_micros, | 424 NativeAllocationSampleFilter(int64_t time_origin_micros, |
| 427 int64_t time_extent_micros) | 425 int64_t time_extent_micros) |
| 428 : SampleFilter(ILLEGAL_PORT, | 426 : SampleFilter(ILLEGAL_PORT, |
| 429 SampleFilter::kNoTaskFilter, | 427 SampleFilter::kNoTaskFilter, |
| 430 time_origin_micros, | 428 time_origin_micros, |
| 431 time_extent_micros) {} | 429 time_extent_micros) {} |
| 432 | 430 |
| 433 bool FilterSample(Sample* sample) { | 431 bool FilterSample(Sample* sample) { |
| 434 if (!sample->is_native_allocation_sample()) { | |
| 435 return false; | |
| 436 } | |
| 437 // If the sample is an allocation sample, we need to check that the | 432 // If the sample is an allocation sample, we need to check that the |
| 438 // memory at the address hasn't been freed, and if the address associated | 433 // memory at the address hasn't been freed, and if the address associated |
| 439 // with the allocation has been freed and then reissued. | 434 // with the allocation has been freed and then reissued. |
| 440 void* alloc_address = | 435 void* alloc_address = |
| 441 reinterpret_cast<void*>(sample->native_allocation_address()); | 436 reinterpret_cast<void*>(sample->native_allocation_address()); |
| 437 ASSERT(alloc_address != NULL); |
| 442 Sample* recorded_sample = MallocHooks::GetSample(alloc_address); | 438 Sample* recorded_sample = MallocHooks::GetSample(alloc_address); |
| 443 return (sample == recorded_sample); | 439 return (sample == recorded_sample); |
| 444 } | 440 } |
| 445 }; | 441 }; |
| 446 | 442 |
| 447 | 443 |
| 448 // A Code object descriptor. | 444 // A Code object descriptor. |
| 449 class CodeDescriptor : public ZoneAllocated { | 445 class CodeDescriptor : public ZoneAllocated { |
| 450 public: | 446 public: |
| 451 explicit CodeDescriptor(const Code& code); | 447 explicit CodeDescriptor(const Code& code); |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 514 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable); | 510 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable); |
| 515 }; | 511 }; |
| 516 | 512 |
| 517 | 513 |
| 518 // Ring buffer of Samples that is (usually) shared by many isolates. | 514 // Ring buffer of Samples that is (usually) shared by many isolates. |
| 519 class SampleBuffer { | 515 class SampleBuffer { |
| 520 public: | 516 public: |
| 521 static const intptr_t kDefaultBufferCapacity = 120000; // 2 minutes @ 1000hz. | 517 static const intptr_t kDefaultBufferCapacity = 120000; // 2 minutes @ 1000hz. |
| 522 | 518 |
| 523 explicit SampleBuffer(intptr_t capacity = kDefaultBufferCapacity); | 519 explicit SampleBuffer(intptr_t capacity = kDefaultBufferCapacity); |
| 524 ~SampleBuffer(); | 520 virtual ~SampleBuffer(); |
| 525 | 521 |
| 526 intptr_t capacity() const { return capacity_; } | 522 intptr_t capacity() const { return capacity_; } |
| 527 | 523 |
| 528 Sample* At(intptr_t idx) const; | 524 Sample* At(intptr_t idx) const; |
| 529 intptr_t ReserveSampleSlot(); | 525 intptr_t ReserveSampleSlot(); |
| 530 Sample* ReserveSample(); | 526 virtual Sample* ReserveSample(); |
| 531 Sample* ReserveSampleAndLink(Sample* previous); | 527 virtual Sample* ReserveSampleAndLink(Sample* previous); |
| 532 | 528 |
| 533 void VisitSamples(SampleVisitor* visitor) { | 529 void VisitSamples(SampleVisitor* visitor) { |
| 534 ASSERT(visitor != NULL); | 530 ASSERT(visitor != NULL); |
| 535 const intptr_t length = capacity(); | 531 const intptr_t length = capacity(); |
| 536 for (intptr_t i = 0; i < length; i++) { | 532 for (intptr_t i = 0; i < length; i++) { |
| 537 Sample* sample = At(i); | 533 Sample* sample = At(i); |
| 538 if (!sample->head_sample()) { | 534 if (!sample->head_sample()) { |
| 539 // An inner sample in a chain of samples. | 535 // An inner sample in a chain of samples. |
| 540 continue; | 536 continue; |
| 541 } | 537 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 555 // No frames. | 551 // No frames. |
| 556 continue; | 552 continue; |
| 557 } | 553 } |
| 558 visitor->IncrementVisited(); | 554 visitor->IncrementVisited(); |
| 559 visitor->VisitSample(sample); | 555 visitor->VisitSample(sample); |
| 560 } | 556 } |
| 561 } | 557 } |
| 562 | 558 |
| 563 ProcessedSampleBuffer* BuildProcessedSampleBuffer(SampleFilter* filter); | 559 ProcessedSampleBuffer* BuildProcessedSampleBuffer(SampleFilter* filter); |
| 564 | 560 |
| 565 private: | 561 protected: |
| 566 ProcessedSample* BuildProcessedSample(Sample* sample, | 562 ProcessedSample* BuildProcessedSample(Sample* sample, |
| 567 const CodeLookupTable& clt); | 563 const CodeLookupTable& clt); |
| 568 Sample* Next(Sample* sample); | 564 Sample* Next(Sample* sample); |
| 569 | 565 |
| 570 VirtualMemory* memory_; | 566 VirtualMemory* memory_; |
| 571 Sample* samples_; | 567 Sample* samples_; |
| 572 intptr_t capacity_; | 568 intptr_t capacity_; |
| 573 uintptr_t cursor_; | 569 uintptr_t cursor_; |
| 574 | 570 |
| 571 private: |
| 575 DISALLOW_COPY_AND_ASSIGN(SampleBuffer); | 572 DISALLOW_COPY_AND_ASSIGN(SampleBuffer); |
| 576 }; | 573 }; |
| 577 | 574 |
| 578 | 575 |
| 576 class AllocationSampleBuffer : public SampleBuffer { |
| 577 public: |
| 578 explicit AllocationSampleBuffer(intptr_t capacity = kDefaultBufferCapacity); |
| 579 virtual ~AllocationSampleBuffer(); |
| 580 |
| 581 intptr_t ReserveSampleSlotLocked(); |
| 582 virtual Sample* ReserveSample(); |
| 583 virtual Sample* ReserveSampleAndLink(Sample* previous); |
| 584 void FreeAllocationSample(Sample* sample); |
| 585 |
| 586 private: |
| 587 Mutex* mutex_; |
| 588 Sample* free_sample_list_; |
| 589 |
| 590 DISALLOW_COPY_AND_ASSIGN(AllocationSampleBuffer); |
| 591 }; |
| 592 |
| 593 |
| 579 // A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have | 594 // A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have |
| 580 // been merged into a logical sample. The raw data may have been processed to | 595 // been merged into a logical sample. The raw data may have been processed to |
| 581 // improve the quality of the stack trace. | 596 // improve the quality of the stack trace. |
| 582 class ProcessedSample : public ZoneAllocated { | 597 class ProcessedSample : public ZoneAllocated { |
| 583 public: | 598 public: |
| 584 ProcessedSample(); | 599 ProcessedSample(); |
| 585 | 600 |
| 586 // Add |pc| to stack trace. | 601 // Add |pc| to stack trace. |
| 587 void Add(uword pc) { pcs_.Add(pc); } | 602 void Add(uword pc) { pcs_.Add(pc); } |
| 588 | 603 |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 692 private: | 707 private: |
| 693 ZoneGrowableArray<ProcessedSample*> samples_; | 708 ZoneGrowableArray<ProcessedSample*> samples_; |
| 694 CodeLookupTable* code_lookup_table_; | 709 CodeLookupTable* code_lookup_table_; |
| 695 | 710 |
| 696 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer); | 711 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer); |
| 697 }; | 712 }; |
| 698 | 713 |
| 699 } // namespace dart | 714 } // namespace dart |
| 700 | 715 |
| 701 #endif // RUNTIME_VM_PROFILER_H_ | 716 #endif // RUNTIME_VM_PROFILER_H_ |
| OLD | NEW |