OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "platform/address_sanitizer.h" | 5 #include "platform/address_sanitizer.h" |
6 #include "platform/memory_sanitizer.h" | 6 #include "platform/memory_sanitizer.h" |
7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
8 | 8 |
9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
10 #include "vm/atomic.h" | 10 #include "vm/atomic.h" |
(...skipping 20 matching lines...) Expand all Loading... | |
31 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); | 31 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); |
32 | 32 |
33 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ | 33 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ |
34 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) | 34 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) |
35 DEFINE_FLAG(int, profile_period, 10000, | 35 DEFINE_FLAG(int, profile_period, 10000, |
36 "Time between profiler samples in microseconds. Minimum 50."); | 36 "Time between profiler samples in microseconds. Minimum 50."); |
37 #else | 37 #else |
38 DEFINE_FLAG(int, profile_period, 1000, | 38 DEFINE_FLAG(int, profile_period, 1000, |
39 "Time between profiler samples in microseconds. Minimum 50."); | 39 "Time between profiler samples in microseconds. Minimum 50."); |
40 #endif | 40 #endif |
41 DEFINE_FLAG(int, profile_depth, 8, | 41 DEFINE_FLAG(int, max_profile_depth, 32, |
42 "Maximum number stack frames walked. Minimum 1. Maximum 255."); | 42 "Maximum number stack frames walked. Minimum 1. Maximum 255."); |
43 #if defined(USING_SIMULATOR) | 43 #if defined(USING_SIMULATOR) |
44 DEFINE_FLAG(bool, profile_vm, true, | 44 DEFINE_FLAG(bool, profile_vm, true, |
45 "Always collect native stack traces."); | 45 "Always collect native stack traces."); |
46 #else | 46 #else |
47 DEFINE_FLAG(bool, profile_vm, false, | 47 DEFINE_FLAG(bool, profile_vm, false, |
48 "Always collect native stack traces."); | 48 "Always collect native stack traces."); |
49 #endif | 49 #endif |
50 | 50 |
51 bool Profiler::initialized_ = false; | 51 bool Profiler::initialized_ = false; |
52 SampleBuffer* Profiler::sample_buffer_ = NULL; | 52 SampleBuffer* Profiler::sample_buffer_ = NULL; |
53 | 53 static const intptr_t kSampleSize = 8; |
54 static intptr_t NumberOfFramesToCollect() { | |
55 if (FLAG_profile_depth <= 0) { | |
56 return 0; | |
57 } | |
58 // Subtract to reserve space for the possible missing frame. | |
59 return FLAG_profile_depth - 1; | |
60 } | |
61 | 54 |
62 void Profiler::InitOnce() { | 55 void Profiler::InitOnce() { |
63 // Place some sane restrictions on user controlled flags. | 56 // Place some sane restrictions on user controlled flags. |
64 SetSamplePeriod(FLAG_profile_period); | 57 SetSamplePeriod(FLAG_profile_period); |
65 SetSampleDepth(FLAG_profile_depth); | 58 SetSampleDepth(FLAG_max_profile_depth); |
66 Sample::InitOnce(); | 59 Sample::InitOnce(); |
67 if (!FLAG_profile) { | 60 if (!FLAG_profile) { |
68 return; | 61 return; |
69 } | 62 } |
70 ASSERT(!initialized_); | 63 ASSERT(!initialized_); |
71 sample_buffer_ = new SampleBuffer(); | 64 sample_buffer_ = new SampleBuffer(); |
72 NativeSymbolResolver::InitOnce(); | 65 NativeSymbolResolver::InitOnce(); |
73 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period); | 66 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period); |
74 ThreadInterrupter::Startup(); | 67 ThreadInterrupter::Startup(); |
75 initialized_ = true; | 68 initialized_ = true; |
76 } | 69 } |
77 | 70 |
78 | 71 |
79 void Profiler::Shutdown() { | 72 void Profiler::Shutdown() { |
80 if (!FLAG_profile) { | 73 if (!FLAG_profile) { |
81 return; | 74 return; |
82 } | 75 } |
83 ASSERT(initialized_); | 76 ASSERT(initialized_); |
84 ThreadInterrupter::Shutdown(); | 77 ThreadInterrupter::Shutdown(); |
85 NativeSymbolResolver::ShutdownOnce(); | 78 NativeSymbolResolver::ShutdownOnce(); |
86 } | 79 } |
87 | 80 |
88 | 81 |
89 void Profiler::SetSampleDepth(intptr_t depth) { | 82 void Profiler::SetSampleDepth(intptr_t depth) { |
90 const int kMinimumDepth = 2; | 83 const int kMinimumDepth = 2; |
91 const int kMaximumDepth = 255; | 84 const int kMaximumDepth = 255; |
92 if (depth < kMinimumDepth) { | 85 if (depth < kMinimumDepth) { |
93 FLAG_profile_depth = kMinimumDepth; | 86 FLAG_max_profile_depth = kMinimumDepth; |
94 } else if (depth > kMaximumDepth) { | 87 } else if (depth > kMaximumDepth) { |
95 FLAG_profile_depth = kMaximumDepth; | 88 FLAG_max_profile_depth = kMaximumDepth; |
96 } else { | 89 } else { |
97 FLAG_profile_depth = depth; | 90 FLAG_max_profile_depth = depth; |
98 } | 91 } |
99 } | 92 } |
100 | 93 |
101 | 94 |
102 void Profiler::SetSamplePeriod(intptr_t period) { | 95 void Profiler::SetSamplePeriod(intptr_t period) { |
103 const int kMinimumProfilePeriod = 50; | 96 const int kMinimumProfilePeriod = 50; |
104 if (period < kMinimumProfilePeriod) { | 97 if (period < kMinimumProfilePeriod) { |
105 FLAG_profile_period = kMinimumProfilePeriod; | 98 FLAG_profile_period = kMinimumProfilePeriod; |
106 } else { | 99 } else { |
107 FLAG_profile_period = period; | 100 FLAG_profile_period = period; |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
219 ThreadInterrupter::WakeUp(); | 212 ThreadInterrupter::WakeUp(); |
220 } | 213 } |
221 } | 214 } |
222 | 215 |
223 | 216 |
224 intptr_t Sample::pcs_length_ = 0; | 217 intptr_t Sample::pcs_length_ = 0; |
225 intptr_t Sample::instance_size_ = 0; | 218 intptr_t Sample::instance_size_ = 0; |
226 | 219 |
227 | 220 |
228 void Sample::InitOnce() { | 221 void Sample::InitOnce() { |
229 ASSERT(FLAG_profile_depth >= 2); | 222 pcs_length_ = kSampleSize; |
230 pcs_length_ = FLAG_profile_depth; | |
231 instance_size_ = | 223 instance_size_ = |
232 sizeof(Sample) + (sizeof(uword) * pcs_length_); // NOLINT. | 224 sizeof(Sample) + (sizeof(uword) * pcs_length_); // NOLINT. |
233 } | 225 } |
234 | 226 |
235 | 227 |
236 uword* Sample::GetPCArray() const { | 228 uword* Sample::GetPCArray() const { |
237 return reinterpret_cast<uword*>( | 229 return reinterpret_cast<uword*>( |
238 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); | 230 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); |
239 } | 231 } |
240 | 232 |
(...skipping 23 matching lines...) Expand all Loading... | |
264 | 256 |
265 | 257 |
266 Sample* SampleBuffer::ReserveSample() { | 258 Sample* SampleBuffer::ReserveSample() { |
267 ASSERT(samples_ != NULL); | 259 ASSERT(samples_ != NULL); |
268 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); | 260 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); |
269 // Map back into sample buffer range. | 261 // Map back into sample buffer range. |
270 cursor = cursor % capacity_; | 262 cursor = cursor % capacity_; |
271 return At(cursor); | 263 return At(cursor); |
272 } | 264 } |
273 | 265 |
266 | |
267 Sample* SampleBuffer::ReserveSampleAndLink(Sample* previous) { | |
268 ASSERT(previous != NULL); | |
269 Sample* next = ReserveSample(); | |
siva
2015/08/04 21:33:09
Why not have a private function reserveSampleSlot
Cutch
2015/08/05 13:49:13
Done.
| |
270 next->Init(previous->isolate(), previous->timestamp(), previous->tid()); | |
271 next->set_head_sample(false); | |
272 // Mark that previous continues at next. | |
273 previous->SetContinuationIndex(IndexOf(next)); | |
274 return next; | |
275 } | |
276 | |
277 | |
278 intptr_t SampleBuffer::IndexOf(Sample* sample) const { | |
279 const uintptr_t samples_addr = reinterpret_cast<uintptr_t>(samples_); | |
siva
2015/08/04 21:33:09
The name samples_addr makes this code very difficu
Cutch
2015/08/05 13:49:13
This is gone now.
| |
280 const uintptr_t sample_addr = reinterpret_cast<uintptr_t>(sample); | |
281 #if defined(DEBUG) | |
282 // Verify that we are within the sample buffer. | |
283 const uintptr_t samples_addr_end = samples_addr + | |
284 (capacity_ * Sample::instance_size()); | |
285 const uintptr_t sample_addr_end = sample_addr + Sample::instance_size(); | |
286 ASSERT(sample_addr >= samples_addr); | |
287 ASSERT(sample_addr_end <= samples_addr_end); | |
288 #endif | |
siva
2015/08/04 21:33:09
Considering that you have
ASSERT((offset % Sample:
Cutch
2015/08/05 13:49:13
Acknowledged.
| |
289 const uintptr_t offset = sample_addr - samples_addr; | |
290 // Verify that we are aligned. | |
291 ASSERT((offset % Sample::instance_size()) == 0); | |
292 const intptr_t index = offset / Sample::instance_size(); | |
293 ASSERT(index >= 0); | |
294 ASSERT(index < capacity_); | |
295 return index; | |
296 } | |
297 | |
298 | |
274 // Attempts to find the true return address when a Dart frame is being setup | 299 // Attempts to find the true return address when a Dart frame is being setup |
275 // or torn down. | 300 // or torn down. |
276 // NOTE: Architecture specific implementations below. | 301 // NOTE: Architecture specific implementations below. |
277 class ReturnAddressLocator : public ValueObject { | 302 class ReturnAddressLocator : public ValueObject { |
278 public: | 303 public: |
279 ReturnAddressLocator(Sample* sample, const Code& code) | 304 ReturnAddressLocator(Sample* sample, const Code& code) |
280 : stack_buffer_(sample->GetStackBuffer()), | 305 : stack_buffer_(sample->GetStackBuffer()), |
281 pc_(sample->pc()), | 306 pc_(sample->pc()), |
282 code_(Code::ZoneHandle(code.raw())) { | 307 code_(Code::ZoneHandle(code.raw())) { |
283 ASSERT(!code_.IsNull()); | 308 ASSERT(!code_.IsNull()); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
398 ClearProfileVisitor::ClearProfileVisitor(Isolate* isolate) | 423 ClearProfileVisitor::ClearProfileVisitor(Isolate* isolate) |
399 : SampleVisitor(isolate) { | 424 : SampleVisitor(isolate) { |
400 } | 425 } |
401 | 426 |
402 | 427 |
403 void ClearProfileVisitor::VisitSample(Sample* sample) { | 428 void ClearProfileVisitor::VisitSample(Sample* sample) { |
404 sample->Clear(); | 429 sample->Clear(); |
405 } | 430 } |
406 | 431 |
407 | 432 |
433 class ProfilerStackWalker : public ValueObject { | |
434 public: | |
435 ProfilerStackWalker(Isolate* isolate, | |
436 Sample* head_sample, | |
437 SampleBuffer* sample_buffer) | |
438 : isolate_(isolate), | |
439 sample_(head_sample), | |
440 sample_buffer_(sample_buffer), | |
441 frame_index_(0), | |
442 total_frames_(0) { | |
443 ASSERT(isolate_ != NULL); | |
444 ASSERT(sample_ != NULL); | |
445 ASSERT(sample_buffer_ != NULL); | |
446 ASSERT(sample_->head_sample()); | |
447 } | |
448 | |
449 bool Append(uword pc) { | |
450 if (total_frames_ >= FLAG_max_profile_depth) { | |
451 sample_->set_truncated_trace(true); | |
452 return false; | |
453 } | |
454 ASSERT(sample_ != NULL); | |
455 if (frame_index_ == kSampleSize) { | |
456 Sample* new_sample = sample_buffer_->ReserveSampleAndLink(sample_); | |
457 if (new_sample == NULL) { | |
458 // Could not reserve new sample- mark this as truncated. | |
459 sample_->set_truncated_trace(true); | |
460 return false; | |
461 } | |
462 frame_index_ = 0; | |
463 sample_ = new_sample; | |
464 } | |
465 ASSERT(frame_index_ < kSampleSize); | |
466 sample_->SetAt(frame_index_, pc); | |
467 frame_index_++; | |
468 total_frames_++; | |
469 return true; | |
470 } | |
471 | |
472 protected: | |
473 Isolate* isolate_; | |
474 Sample* sample_; | |
475 SampleBuffer* sample_buffer_; | |
476 intptr_t frame_index_; | |
477 intptr_t total_frames_; | |
478 }; | |
479 | |
480 | |
408 // Given an exit frame, walk the Dart stack. | 481 // Given an exit frame, walk the Dart stack. |
409 class ProfilerDartExitStackWalker : public ValueObject { | 482 class ProfilerDartExitStackWalker : public ProfilerStackWalker { |
410 public: | 483 public: |
411 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) | 484 ProfilerDartExitStackWalker(Isolate* isolate, |
412 : sample_(sample), | 485 Sample* sample, |
486 SampleBuffer* sample_buffer) | |
487 : ProfilerStackWalker(isolate, sample, sample_buffer), | |
413 frame_iterator_(isolate) { | 488 frame_iterator_(isolate) { |
414 ASSERT(sample_ != NULL); | |
415 } | 489 } |
416 | 490 |
417 void walk() { | 491 void walk() { |
418 // Mark that this sample was collected from an exit frame. | 492 // Mark that this sample was collected from an exit frame. |
419 sample_->set_exit_frame_sample(true); | 493 sample_->set_exit_frame_sample(true); |
420 intptr_t frame_index = 0; | 494 |
421 StackFrame* frame = frame_iterator_.NextFrame(); | 495 StackFrame* frame = frame_iterator_.NextFrame(); |
422 while (frame != NULL) { | 496 while (frame != NULL) { |
423 sample_->SetAt(frame_index, frame->pc()); | 497 if (!Append(frame->pc())) { |
424 frame_index++; | 498 return; |
425 if (frame_index >= NumberOfFramesToCollect()) { | |
426 sample_->set_truncated_trace(true); | |
427 break; | |
428 } | 499 } |
429 frame = frame_iterator_.NextFrame(); | 500 frame = frame_iterator_.NextFrame(); |
430 } | 501 } |
431 } | 502 } |
432 | 503 |
433 private: | 504 private: |
434 Sample* sample_; | |
435 DartFrameIterator frame_iterator_; | 505 DartFrameIterator frame_iterator_; |
436 }; | 506 }; |
437 | 507 |
438 | 508 |
439 // Executing Dart code, walk the stack. | 509 // Executing Dart code, walk the stack. |
440 class ProfilerDartStackWalker : public ValueObject { | 510 class ProfilerDartStackWalker : public ProfilerStackWalker { |
441 public: | 511 public: |
442 ProfilerDartStackWalker(Sample* sample, | 512 ProfilerDartStackWalker(Isolate* isolate, |
513 Sample* sample, | |
514 SampleBuffer* sample_buffer, | |
443 uword stack_lower, | 515 uword stack_lower, |
444 uword stack_upper, | 516 uword stack_upper, |
445 uword pc, | 517 uword pc, |
446 uword fp, | 518 uword fp, |
447 uword sp) | 519 uword sp) |
448 : sample_(sample), | 520 : ProfilerStackWalker(isolate, sample, sample_buffer), |
449 stack_upper_(stack_upper), | 521 stack_upper_(stack_upper), |
450 stack_lower_(stack_lower) { | 522 stack_lower_(stack_lower) { |
451 ASSERT(sample_ != NULL); | |
452 pc_ = reinterpret_cast<uword*>(pc); | 523 pc_ = reinterpret_cast<uword*>(pc); |
453 fp_ = reinterpret_cast<uword*>(fp); | 524 fp_ = reinterpret_cast<uword*>(fp); |
454 sp_ = reinterpret_cast<uword*>(sp); | 525 sp_ = reinterpret_cast<uword*>(sp); |
455 } | 526 } |
456 | 527 |
457 void walk() { | 528 void walk() { |
458 sample_->set_exit_frame_sample(false); | 529 sample_->set_exit_frame_sample(false); |
459 if (!ValidFramePointer()) { | 530 if (!ValidFramePointer()) { |
460 sample_->set_ignore_sample(true); | 531 sample_->set_ignore_sample(true); |
461 return; | 532 return; |
462 } | 533 } |
463 ASSERT(ValidFramePointer()); | 534 ASSERT(ValidFramePointer()); |
464 uword return_pc = InitialReturnAddress(); | 535 uword return_pc = InitialReturnAddress(); |
465 if (StubCode::InInvocationStub(return_pc)) { | 536 if (StubCode::InInvocationStub(return_pc)) { |
466 // Edge case- we have called out from the Invocation Stub but have not | 537 // Edge case- we have called out from the Invocation Stub but have not |
467 // created the stack frame of the callee. Attempt to locate the exit | 538 // created the stack frame of the callee. Attempt to locate the exit |
468 // frame before walking the stack. | 539 // frame before walking the stack. |
469 if (!NextExit() || !ValidFramePointer()) { | 540 if (!NextExit() || !ValidFramePointer()) { |
470 // Nothing to sample. | 541 // Nothing to sample. |
471 sample_->set_ignore_sample(true); | 542 sample_->set_ignore_sample(true); |
472 return; | 543 return; |
473 } | 544 } |
474 } | 545 } |
475 for (int i = 0; i < NumberOfFramesToCollect(); i++) { | 546 while (true) { |
476 sample_->SetAt(i, reinterpret_cast<uword>(pc_)); | 547 if (!Append(reinterpret_cast<uword>(pc_))) { |
548 return; | |
549 } | |
477 if (!Next()) { | 550 if (!Next()) { |
478 return; | 551 return; |
479 } | 552 } |
480 } | 553 } |
481 sample_->set_truncated_trace(true); | |
482 } | 554 } |
483 | 555 |
484 private: | 556 private: |
485 bool Next() { | 557 bool Next() { |
486 if (!ValidFramePointer()) { | 558 if (!ValidFramePointer()) { |
487 return false; | 559 return false; |
488 } | 560 } |
489 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) { | 561 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) { |
490 // In invocation stub. | 562 // In invocation stub. |
491 return NextExit(); | 563 return NextExit(); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
574 return false; | 646 return false; |
575 } | 647 } |
576 uword cursor = reinterpret_cast<uword>(fp); | 648 uword cursor = reinterpret_cast<uword>(fp); |
577 cursor += sizeof(fp); | 649 cursor += sizeof(fp); |
578 return (cursor >= stack_lower_) && (cursor < stack_upper_); | 650 return (cursor >= stack_lower_) && (cursor < stack_upper_); |
579 } | 651 } |
580 | 652 |
581 uword* pc_; | 653 uword* pc_; |
582 uword* fp_; | 654 uword* fp_; |
583 uword* sp_; | 655 uword* sp_; |
584 Sample* sample_; | |
585 const uword stack_upper_; | 656 const uword stack_upper_; |
586 uword stack_lower_; | 657 uword stack_lower_; |
587 }; | 658 }; |
588 | 659 |
589 | 660 |
590 // If the VM is compiled without frame pointers (which is the default on | 661 // If the VM is compiled without frame pointers (which is the default on |
591 // recent GCC versions with optimizing enabled) the stack walking code may | 662 // recent GCC versions with optimizing enabled) the stack walking code may |
592 // fail. | 663 // fail. |
593 // | 664 // |
594 class ProfilerNativeStackWalker : public ValueObject { | 665 class ProfilerNativeStackWalker : public ProfilerStackWalker { |
595 public: | 666 public: |
596 ProfilerNativeStackWalker(Sample* sample, | 667 ProfilerNativeStackWalker(Isolate* isolate, |
668 Sample* sample, | |
669 SampleBuffer* sample_buffer, | |
597 uword stack_lower, | 670 uword stack_lower, |
598 uword stack_upper, | 671 uword stack_upper, |
599 uword pc, | 672 uword pc, |
600 uword fp, | 673 uword fp, |
601 uword sp) | 674 uword sp) |
602 : sample_(sample), | 675 : ProfilerStackWalker(isolate, sample, sample_buffer), |
603 stack_upper_(stack_upper), | 676 stack_upper_(stack_upper), |
604 original_pc_(pc), | 677 original_pc_(pc), |
605 original_fp_(fp), | 678 original_fp_(fp), |
606 original_sp_(sp), | 679 original_sp_(sp), |
607 lower_bound_(stack_lower) { | 680 lower_bound_(stack_lower) { |
608 ASSERT(sample_ != NULL); | |
609 } | 681 } |
610 | 682 |
611 void walk() { | 683 void walk() { |
612 const uword kMaxStep = VirtualMemory::PageSize(); | 684 const uword kMaxStep = VirtualMemory::PageSize(); |
613 | 685 |
614 sample_->SetAt(0, original_pc_); | 686 Append(original_pc_); |
615 | 687 |
616 uword* pc = reinterpret_cast<uword*>(original_pc_); | 688 uword* pc = reinterpret_cast<uword*>(original_pc_); |
617 uword* fp = reinterpret_cast<uword*>(original_fp_); | 689 uword* fp = reinterpret_cast<uword*>(original_fp_); |
618 uword* previous_fp = fp; | 690 uword* previous_fp = fp; |
619 | 691 |
620 uword gap = original_fp_ - original_sp_; | 692 uword gap = original_fp_ - original_sp_; |
621 if (gap >= kMaxStep) { | 693 if (gap >= kMaxStep) { |
622 // Gap between frame pointer and stack pointer is | 694 // Gap between frame pointer and stack pointer is |
623 // too large. | 695 // too large. |
624 return; | 696 return; |
625 } | 697 } |
626 | 698 |
627 if (!ValidFramePointer(fp)) { | 699 if (!ValidFramePointer(fp)) { |
628 return; | 700 return; |
629 } | 701 } |
630 | 702 |
631 for (int i = 0; i < NumberOfFramesToCollect(); i++) { | 703 while (true) { |
632 sample_->SetAt(i, reinterpret_cast<uword>(pc)); | 704 if (!Append(reinterpret_cast<uword>(pc))) { |
705 return; | |
706 } | |
633 | 707 |
634 pc = CallerPC(fp); | 708 pc = CallerPC(fp); |
635 previous_fp = fp; | 709 previous_fp = fp; |
636 fp = CallerFP(fp); | 710 fp = CallerFP(fp); |
637 | 711 |
638 if (fp == NULL) { | 712 if (fp == NULL) { |
639 return; | 713 return; |
640 } | 714 } |
641 | 715 |
642 if (fp <= previous_fp) { | 716 if (fp <= previous_fp) { |
643 // Frame pointer did not move to a higher address. | 717 // Frame pointer did not move to a higher address. |
644 return; | 718 return; |
645 } | 719 } |
646 | 720 |
647 gap = fp - previous_fp; | 721 gap = fp - previous_fp; |
648 if (gap >= kMaxStep) { | 722 if (gap >= kMaxStep) { |
649 // Frame pointer step is too large. | 723 // Frame pointer step is too large. |
650 return; | 724 return; |
651 } | 725 } |
652 | 726 |
653 if (!ValidFramePointer(fp)) { | 727 if (!ValidFramePointer(fp)) { |
654 // Frame pointer is outside of isolate stack boundary. | 728 // Frame pointer is outside of isolate stack boundary. |
655 return; | 729 return; |
656 } | 730 } |
657 | 731 |
658 // Move the lower bound up. | 732 // Move the lower bound up. |
659 lower_bound_ = reinterpret_cast<uword>(fp); | 733 lower_bound_ = reinterpret_cast<uword>(fp); |
660 } | 734 } |
661 | |
662 sample_->set_truncated_trace(true); | |
663 } | 735 } |
664 | 736 |
665 private: | 737 private: |
666 uword* CallerPC(uword* fp) const { | 738 uword* CallerPC(uword* fp) const { |
667 ASSERT(fp != NULL); | 739 ASSERT(fp != NULL); |
668 uword* caller_pc_ptr = fp + kSavedCallerPcSlotFromFp; | 740 uword* caller_pc_ptr = fp + kSavedCallerPcSlotFromFp; |
669 // This may actually be uninitialized, by design (see class comment above). | 741 // This may actually be uninitialized, by design (see class comment above). |
670 MSAN_UNPOISON(caller_pc_ptr, kWordSize); | 742 MSAN_UNPOISON(caller_pc_ptr, kWordSize); |
671 ASAN_UNPOISON(caller_pc_ptr, kWordSize); | 743 ASAN_UNPOISON(caller_pc_ptr, kWordSize); |
672 return reinterpret_cast<uword*>(*caller_pc_ptr); | 744 return reinterpret_cast<uword*>(*caller_pc_ptr); |
(...skipping 11 matching lines...) Expand all Loading... | |
684 bool ValidFramePointer(uword* fp) const { | 756 bool ValidFramePointer(uword* fp) const { |
685 if (fp == NULL) { | 757 if (fp == NULL) { |
686 return false; | 758 return false; |
687 } | 759 } |
688 uword cursor = reinterpret_cast<uword>(fp); | 760 uword cursor = reinterpret_cast<uword>(fp); |
689 cursor += sizeof(fp); | 761 cursor += sizeof(fp); |
690 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_); | 762 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_); |
691 return r; | 763 return r; |
692 } | 764 } |
693 | 765 |
694 Sample* sample_; | |
695 const uword stack_upper_; | 766 const uword stack_upper_; |
696 const uword original_pc_; | 767 const uword original_pc_; |
697 const uword original_fp_; | 768 const uword original_fp_; |
698 const uword original_sp_; | 769 const uword original_sp_; |
699 uword lower_bound_; | 770 uword lower_bound_; |
700 }; | 771 }; |
701 | 772 |
702 | 773 |
703 static void CopyPCMarkerIfSafe(Sample* sample, uword fp_addr, uword sp_addr) { | 774 static void CopyPCMarkerIfSafe(Sample* sample, uword fp_addr, uword sp_addr) { |
704 ASSERT(sample != NULL); | 775 ASSERT(sample != NULL); |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
854 // Attempt to find the real runtime entry function address and use that. | 925 // Attempt to find the real runtime entry function address and use that. |
855 uword redirect_vm_tag = Simulator::FunctionForRedirect(vm_tag); | 926 uword redirect_vm_tag = Simulator::FunctionForRedirect(vm_tag); |
856 if (redirect_vm_tag != 0) { | 927 if (redirect_vm_tag != 0) { |
857 vm_tag = redirect_vm_tag; | 928 vm_tag = redirect_vm_tag; |
858 } | 929 } |
859 #endif | 930 #endif |
860 sample->set_vm_tag(vm_tag); | 931 sample->set_vm_tag(vm_tag); |
861 sample->set_user_tag(isolate->user_tag()); | 932 sample->set_user_tag(isolate->user_tag()); |
862 sample->SetAllocationCid(cid); | 933 sample->SetAllocationCid(cid); |
863 | 934 |
864 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, sample); | 935 ProfilerDartExitStackWalker |
936 dart_exit_stack_walker(isolate, sample, sample_buffer); | |
865 dart_exit_stack_walker.walk(); | 937 dart_exit_stack_walker.walk(); |
866 } | 938 } |
867 | 939 |
868 | 940 |
869 void Profiler::RecordSampleInterruptCallback( | 941 void Profiler::RecordSampleInterruptCallback( |
870 const InterruptedThreadState& state, | 942 const InterruptedThreadState& state, |
871 void* data) { | 943 void* data) { |
872 Isolate* isolate = reinterpret_cast<Isolate*>(data); | 944 Isolate* isolate = reinterpret_cast<Isolate*>(data); |
873 if ((isolate == NULL) || (Dart::vm_isolate() == NULL)) { | 945 if ((isolate == NULL) || (Dart::vm_isolate() == NULL)) { |
874 // No isolate. | 946 // No isolate. |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
994 } | 1066 } |
995 #endif | 1067 #endif |
996 // Increment counter for vm tag. | 1068 // Increment counter for vm tag. |
997 VMTagCounters* counters = isolate->vm_tag_counters(); | 1069 VMTagCounters* counters = isolate->vm_tag_counters(); |
998 ASSERT(counters != NULL); | 1070 ASSERT(counters != NULL); |
999 counters->Increment(vm_tag); | 1071 counters->Increment(vm_tag); |
1000 sample->set_vm_tag(vm_tag); | 1072 sample->set_vm_tag(vm_tag); |
1001 sample->set_user_tag(isolate->user_tag()); | 1073 sample->set_user_tag(isolate->user_tag()); |
1002 sample->set_lr(lr); | 1074 sample->set_lr(lr); |
1003 | 1075 |
1004 ProfilerNativeStackWalker native_stack_walker(sample, | 1076 ProfilerNativeStackWalker native_stack_walker(isolate, |
1077 sample, | |
1078 sample_buffer, | |
1005 stack_lower, | 1079 stack_lower, |
1006 stack_upper, | 1080 stack_upper, |
1007 pc, | 1081 pc, |
1008 fp, | 1082 fp, |
1009 sp); | 1083 sp); |
1010 | 1084 |
1011 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, sample); | 1085 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, |
1086 sample, | |
1087 sample_buffer); | |
1012 | 1088 |
1013 ProfilerDartStackWalker dart_stack_walker(sample, | 1089 ProfilerDartStackWalker dart_stack_walker(isolate, |
1090 sample, | |
1091 sample_buffer, | |
1014 stack_lower, | 1092 stack_lower, |
1015 stack_upper, | 1093 stack_upper, |
1016 pc, | 1094 pc, |
1017 fp, | 1095 fp, |
1018 sp); | 1096 sp); |
1019 | 1097 |
1020 // All memory access is done inside CollectSample. | 1098 // All memory access is done inside CollectSample. |
1021 CollectSample(isolate, | 1099 CollectSample(isolate, |
1022 exited_dart_code, | 1100 exited_dart_code, |
1023 in_dart_code, | 1101 in_dart_code, |
(...skipping 15 matching lines...) Expand all Loading... | |
1039 | 1117 |
1040 ProcessedSampleBuffer* buffer = new(zone) ProcessedSampleBuffer(); | 1118 ProcessedSampleBuffer* buffer = new(zone) ProcessedSampleBuffer(); |
1041 | 1119 |
1042 const intptr_t length = capacity(); | 1120 const intptr_t length = capacity(); |
1043 for (intptr_t i = 0; i < length; i++) { | 1121 for (intptr_t i = 0; i < length; i++) { |
1044 Sample* sample = At(i); | 1122 Sample* sample = At(i); |
1045 if (sample->ignore_sample()) { | 1123 if (sample->ignore_sample()) { |
1046 // Bad sample. | 1124 // Bad sample. |
1047 continue; | 1125 continue; |
1048 } | 1126 } |
1127 if (!sample->head_sample()) { | |
1128 // An inner sample in a chain of samples. | |
1129 continue; | |
1130 } | |
1049 if (sample->isolate() != filter->isolate()) { | 1131 if (sample->isolate() != filter->isolate()) { |
1050 // Another isolate. | 1132 // Another isolate. |
1051 continue; | 1133 continue; |
1052 } | 1134 } |
1053 if (sample->timestamp() == 0) { | 1135 if (sample->timestamp() == 0) { |
1054 // Empty. | 1136 // Empty. |
1055 continue; | 1137 continue; |
1056 } | 1138 } |
1057 if (sample->At(0) == 0) { | 1139 if (sample->At(0) == 0) { |
1058 // No frames. | 1140 // No frames. |
(...skipping 21 matching lines...) Expand all Loading... | |
1080 processed_sample->set_user_tag(sample->user_tag()); | 1162 processed_sample->set_user_tag(sample->user_tag()); |
1081 if (sample->is_allocation_sample()) { | 1163 if (sample->is_allocation_sample()) { |
1082 processed_sample->set_allocation_cid(sample->allocation_cid()); | 1164 processed_sample->set_allocation_cid(sample->allocation_cid()); |
1083 } | 1165 } |
1084 processed_sample->set_first_frame_executing(!sample->exit_frame_sample()); | 1166 processed_sample->set_first_frame_executing(!sample->exit_frame_sample()); |
1085 | 1167 |
1086 // Copy stack trace from sample(s). | 1168 // Copy stack trace from sample(s). |
1087 bool truncated = false; | 1169 bool truncated = false; |
1088 Sample* current = sample; | 1170 Sample* current = sample; |
1089 while (current != NULL) { | 1171 while (current != NULL) { |
1090 for (intptr_t i = 0; i < FLAG_profile_depth; i++) { | 1172 for (intptr_t i = 0; i < kSampleSize; i++) { |
1091 if (current->At(i) == 0) { | 1173 if (current->At(i) == 0) { |
1092 break; | 1174 break; |
1093 } | 1175 } |
1094 processed_sample->Add(current->At(i)); | 1176 processed_sample->Add(current->At(i)); |
1095 } | 1177 } |
1096 | 1178 |
1097 truncated = truncated || current->truncated_trace(); | 1179 truncated = truncated || current->truncated_trace(); |
1098 current = Next(sample); | 1180 current = Next(current); |
1099 } | 1181 } |
1100 | 1182 |
1101 if (!sample->exit_frame_sample()) { | 1183 if (!sample->exit_frame_sample()) { |
1102 Isolate* isolate = thread->isolate(); | 1184 Isolate* isolate = thread->isolate(); |
1103 Isolate* vm_isolate = Dart::vm_isolate(); | 1185 Isolate* vm_isolate = Dart::vm_isolate(); |
1104 processed_sample->FixupCaller(isolate, | 1186 processed_sample->FixupCaller(isolate, |
1105 vm_isolate, | 1187 vm_isolate, |
1106 sample->pc_marker(), | 1188 sample->pc_marker(), |
1107 sample->GetStackBuffer()); | 1189 sample->GetStackBuffer()); |
1108 } | 1190 } |
1109 | 1191 |
1110 processed_sample->set_truncated(truncated); | 1192 processed_sample->set_truncated(truncated); |
1111 return processed_sample; | 1193 return processed_sample; |
1112 } | 1194 } |
1113 | 1195 |
1114 | 1196 |
1115 Sample* SampleBuffer::Next(Sample* sample) { | 1197 Sample* SampleBuffer::Next(Sample* sample) { |
1116 // TODO(johnmccutchan): Support chaining samples for complete stack traces. | 1198 if (!sample->is_continuation_sample()) |
1117 return NULL; | 1199 return NULL; |
1200 Sample* next_sample = At(sample->continuation_index()); | |
1201 // Sanity check. | |
1202 ASSERT(sample != next_sample); | |
1203 // Detect invalid chaining. | |
1204 if (sample->isolate() != next_sample->isolate()) { | |
1205 return NULL; | |
1206 } | |
1207 if (sample->timestamp() != next_sample->timestamp()) { | |
1208 return NULL; | |
1209 } | |
1210 if (sample->tid() != next_sample->tid()) { | |
1211 return NULL; | |
1212 } | |
1213 return next_sample; | |
1118 } | 1214 } |
1119 | 1215 |
1120 | 1216 |
1121 ProcessedSample::ProcessedSample() | 1217 ProcessedSample::ProcessedSample() |
1122 : pcs_(FLAG_profile_depth), | 1218 : pcs_(kSampleSize), |
1123 timestamp_(0), | 1219 timestamp_(0), |
1124 vm_tag_(0), | 1220 vm_tag_(0), |
1125 user_tag_(0), | 1221 user_tag_(0), |
1126 allocation_cid_(-1), | 1222 allocation_cid_(-1), |
1127 truncated_(false) { | 1223 truncated_(false) { |
1128 } | 1224 } |
1129 | 1225 |
1130 | 1226 |
1131 void ProcessedSample::FixupCaller(Isolate* isolate, | 1227 void ProcessedSample::FixupCaller(Isolate* isolate, |
1132 Isolate* vm_isolate, | 1228 Isolate* vm_isolate, |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1223 uword pc) { | 1319 uword pc) { |
1224 return vm_isolate->heap()->CodeContains(pc) | 1320 return vm_isolate->heap()->CodeContains(pc) |
1225 || isolate->heap()->CodeContains(pc); | 1321 || isolate->heap()->CodeContains(pc); |
1226 } | 1322 } |
1227 | 1323 |
1228 | 1324 |
1229 ProcessedSampleBuffer::ProcessedSampleBuffer() { | 1325 ProcessedSampleBuffer::ProcessedSampleBuffer() { |
1230 } | 1326 } |
1231 | 1327 |
1232 } // namespace dart | 1328 } // namespace dart |
OLD | NEW |