OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "platform/address_sanitizer.h" | 5 #include "platform/address_sanitizer.h" |
6 #include "platform/memory_sanitizer.h" | 6 #include "platform/memory_sanitizer.h" |
7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
8 | 8 |
9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
10 #include "vm/atomic.h" | 10 #include "vm/atomic.h" |
11 #include "vm/code_patcher.h" | 11 #include "vm/code_patcher.h" |
12 #include "vm/instructions.h" | 12 #include "vm/instructions.h" |
13 #include "vm/isolate.h" | 13 #include "vm/isolate.h" |
14 #include "vm/json_stream.h" | 14 #include "vm/json_stream.h" |
15 #include "vm/lockers.h" | 15 #include "vm/lockers.h" |
16 #include "vm/native_symbol.h" | 16 #include "vm/native_symbol.h" |
17 #include "vm/object.h" | 17 #include "vm/object.h" |
18 #include "vm/os.h" | 18 #include "vm/os.h" |
19 #include "vm/profiler.h" | 19 #include "vm/profiler.h" |
20 #include "vm/reusable_handles.h" | 20 #include "vm/reusable_handles.h" |
21 #include "vm/signal_handler.h" | 21 #include "vm/signal_handler.h" |
22 #include "vm/simulator.h" | 22 #include "vm/simulator.h" |
23 #include "vm/stack_frame.h" | 23 #include "vm/stack_frame.h" |
24 | 24 |
25 namespace dart { | 25 namespace dart { |
26 | 26 |
27 | 27 |
| 28 static const intptr_t kSampleSize = 8; |
| 29 |
28 DECLARE_FLAG(bool, trace_profiler); | 30 DECLARE_FLAG(bool, trace_profiler); |
29 | 31 |
30 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); | 32 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); |
31 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); | 33 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); |
32 | 34 |
33 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ | 35 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ |
34 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) | 36 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) |
35 DEFINE_FLAG(int, profile_period, 10000, | 37 DEFINE_FLAG(int, profile_period, 10000, |
36 "Time between profiler samples in microseconds. Minimum 50."); | 38 "Time between profiler samples in microseconds. Minimum 50."); |
37 #else | 39 #else |
38 DEFINE_FLAG(int, profile_period, 1000, | 40 DEFINE_FLAG(int, profile_period, 1000, |
39 "Time between profiler samples in microseconds. Minimum 50."); | 41 "Time between profiler samples in microseconds. Minimum 50."); |
40 #endif | 42 #endif |
41 DEFINE_FLAG(int, profile_depth, 8, | 43 DEFINE_FLAG(int, max_profile_depth, kSampleSize, |
42 "Maximum number stack frames walked. Minimum 1. Maximum 255."); | 44 "Maximum number stack frames walked. Minimum 1. Maximum 255."); |
43 #if defined(USING_SIMULATOR) | 45 #if defined(USING_SIMULATOR) |
44 DEFINE_FLAG(bool, profile_vm, true, | 46 DEFINE_FLAG(bool, profile_vm, true, |
45 "Always collect native stack traces."); | 47 "Always collect native stack traces."); |
46 #else | 48 #else |
47 DEFINE_FLAG(bool, profile_vm, false, | 49 DEFINE_FLAG(bool, profile_vm, false, |
48 "Always collect native stack traces."); | 50 "Always collect native stack traces."); |
49 #endif | 51 #endif |
50 | 52 |
51 bool Profiler::initialized_ = false; | 53 bool Profiler::initialized_ = false; |
52 SampleBuffer* Profiler::sample_buffer_ = NULL; | 54 SampleBuffer* Profiler::sample_buffer_ = NULL; |
53 | 55 |
54 static intptr_t NumberOfFramesToCollect() { | |
55 if (FLAG_profile_depth <= 0) { | |
56 return 0; | |
57 } | |
58 // Subtract to reserve space for the possible missing frame. | |
59 return FLAG_profile_depth - 1; | |
60 } | |
61 | 56 |
62 void Profiler::InitOnce() { | 57 void Profiler::InitOnce() { |
63 // Place some sane restrictions on user controlled flags. | 58 // Place some sane restrictions on user controlled flags. |
64 SetSamplePeriod(FLAG_profile_period); | 59 SetSamplePeriod(FLAG_profile_period); |
65 SetSampleDepth(FLAG_profile_depth); | 60 SetSampleDepth(FLAG_max_profile_depth); |
66 Sample::InitOnce(); | 61 Sample::InitOnce(); |
67 if (!FLAG_profile) { | 62 if (!FLAG_profile) { |
68 return; | 63 return; |
69 } | 64 } |
70 ASSERT(!initialized_); | 65 ASSERT(!initialized_); |
71 sample_buffer_ = new SampleBuffer(); | 66 sample_buffer_ = new SampleBuffer(); |
72 NativeSymbolResolver::InitOnce(); | 67 NativeSymbolResolver::InitOnce(); |
73 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period); | 68 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period); |
74 ThreadInterrupter::Startup(); | 69 ThreadInterrupter::Startup(); |
75 initialized_ = true; | 70 initialized_ = true; |
76 } | 71 } |
77 | 72 |
78 | 73 |
79 void Profiler::Shutdown() { | 74 void Profiler::Shutdown() { |
80 if (!FLAG_profile) { | 75 if (!FLAG_profile) { |
81 return; | 76 return; |
82 } | 77 } |
83 ASSERT(initialized_); | 78 ASSERT(initialized_); |
84 ThreadInterrupter::Shutdown(); | 79 ThreadInterrupter::Shutdown(); |
85 NativeSymbolResolver::ShutdownOnce(); | 80 NativeSymbolResolver::ShutdownOnce(); |
86 } | 81 } |
87 | 82 |
88 | 83 |
89 void Profiler::SetSampleDepth(intptr_t depth) { | 84 void Profiler::SetSampleDepth(intptr_t depth) { |
90 const int kMinimumDepth = 2; | 85 const int kMinimumDepth = 2; |
91 const int kMaximumDepth = 255; | 86 const int kMaximumDepth = 255; |
92 if (depth < kMinimumDepth) { | 87 if (depth < kMinimumDepth) { |
93 FLAG_profile_depth = kMinimumDepth; | 88 FLAG_max_profile_depth = kMinimumDepth; |
94 } else if (depth > kMaximumDepth) { | 89 } else if (depth > kMaximumDepth) { |
95 FLAG_profile_depth = kMaximumDepth; | 90 FLAG_max_profile_depth = kMaximumDepth; |
96 } else { | 91 } else { |
97 FLAG_profile_depth = depth; | 92 FLAG_max_profile_depth = depth; |
98 } | 93 } |
99 } | 94 } |
100 | 95 |
101 | 96 |
102 void Profiler::SetSamplePeriod(intptr_t period) { | 97 void Profiler::SetSamplePeriod(intptr_t period) { |
103 const int kMinimumProfilePeriod = 50; | 98 const int kMinimumProfilePeriod = 50; |
104 if (period < kMinimumProfilePeriod) { | 99 if (period < kMinimumProfilePeriod) { |
105 FLAG_profile_period = kMinimumProfilePeriod; | 100 FLAG_profile_period = kMinimumProfilePeriod; |
106 } else { | 101 } else { |
107 FLAG_profile_period = period; | 102 FLAG_profile_period = period; |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
221 ThreadInterrupter::WakeUp(); | 216 ThreadInterrupter::WakeUp(); |
222 } | 217 } |
223 } | 218 } |
224 | 219 |
225 | 220 |
226 intptr_t Sample::pcs_length_ = 0; | 221 intptr_t Sample::pcs_length_ = 0; |
227 intptr_t Sample::instance_size_ = 0; | 222 intptr_t Sample::instance_size_ = 0; |
228 | 223 |
229 | 224 |
230 void Sample::InitOnce() { | 225 void Sample::InitOnce() { |
231 ASSERT(FLAG_profile_depth >= 2); | 226 pcs_length_ = kSampleSize; |
232 pcs_length_ = FLAG_profile_depth; | |
233 instance_size_ = | 227 instance_size_ = |
234 sizeof(Sample) + (sizeof(uword) * pcs_length_); // NOLINT. | 228 sizeof(Sample) + (sizeof(uword) * pcs_length_); // NOLINT. |
235 } | 229 } |
236 | 230 |
237 | 231 |
238 uword* Sample::GetPCArray() const { | 232 uword* Sample::GetPCArray() const { |
239 return reinterpret_cast<uword*>( | 233 return reinterpret_cast<uword*>( |
240 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); | 234 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); |
241 } | 235 } |
242 | 236 |
(...skipping 15 matching lines...) Expand all Loading... |
258 | 252 |
259 Sample* SampleBuffer::At(intptr_t idx) const { | 253 Sample* SampleBuffer::At(intptr_t idx) const { |
260 ASSERT(idx >= 0); | 254 ASSERT(idx >= 0); |
261 ASSERT(idx < capacity_); | 255 ASSERT(idx < capacity_); |
262 intptr_t offset = idx * Sample::instance_size(); | 256 intptr_t offset = idx * Sample::instance_size(); |
263 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_); | 257 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_); |
264 return reinterpret_cast<Sample*>(samples + offset); | 258 return reinterpret_cast<Sample*>(samples + offset); |
265 } | 259 } |
266 | 260 |
267 | 261 |
268 Sample* SampleBuffer::ReserveSample() { | 262 intptr_t SampleBuffer::ReserveSampleSlot() { |
269 ASSERT(samples_ != NULL); | 263 ASSERT(samples_ != NULL); |
270 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); | 264 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); |
271 // Map back into sample buffer range. | 265 // Map back into sample buffer range. |
272 cursor = cursor % capacity_; | 266 cursor = cursor % capacity_; |
273 return At(cursor); | 267 return cursor; |
274 } | 268 } |
275 | 269 |
| 270 Sample* SampleBuffer::ReserveSample() { |
| 271 return At(ReserveSampleSlot()); |
| 272 } |
| 273 |
| 274 |
| 275 Sample* SampleBuffer::ReserveSampleAndLink(Sample* previous) { |
| 276 ASSERT(previous != NULL); |
| 277 intptr_t next_index = ReserveSampleSlot(); |
| 278 Sample* next = At(next_index); |
| 279 next->Init(previous->isolate(), previous->timestamp(), previous->tid()); |
| 280 next->set_head_sample(false); |
| 281 // Mark that previous continues at next. |
| 282 previous->SetContinuationIndex(next_index); |
| 283 return next; |
| 284 } |
| 285 |
| 286 |
276 // Attempts to find the true return address when a Dart frame is being setup | 287 // Attempts to find the true return address when a Dart frame is being setup |
277 // or torn down. | 288 // or torn down. |
278 // NOTE: Architecture specific implementations below. | 289 // NOTE: Architecture specific implementations below. |
279 class ReturnAddressLocator : public ValueObject { | 290 class ReturnAddressLocator : public ValueObject { |
280 public: | 291 public: |
281 ReturnAddressLocator(Sample* sample, const Code& code) | 292 ReturnAddressLocator(Sample* sample, const Code& code) |
282 : stack_buffer_(sample->GetStackBuffer()), | 293 : stack_buffer_(sample->GetStackBuffer()), |
283 pc_(sample->pc()), | 294 pc_(sample->pc()), |
284 code_(Code::ZoneHandle(code.raw())) { | 295 code_(Code::ZoneHandle(code.raw())) { |
285 ASSERT(!code_.IsNull()); | 296 ASSERT(!code_.IsNull()); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
400 ClearProfileVisitor::ClearProfileVisitor(Isolate* isolate) | 411 ClearProfileVisitor::ClearProfileVisitor(Isolate* isolate) |
401 : SampleVisitor(isolate) { | 412 : SampleVisitor(isolate) { |
402 } | 413 } |
403 | 414 |
404 | 415 |
405 void ClearProfileVisitor::VisitSample(Sample* sample) { | 416 void ClearProfileVisitor::VisitSample(Sample* sample) { |
406 sample->Clear(); | 417 sample->Clear(); |
407 } | 418 } |
408 | 419 |
409 | 420 |
| 421 class ProfilerStackWalker : public ValueObject { |
| 422 public: |
| 423 ProfilerStackWalker(Isolate* isolate, |
| 424 Sample* head_sample, |
| 425 SampleBuffer* sample_buffer) |
| 426 : isolate_(isolate), |
| 427 sample_(head_sample), |
| 428 sample_buffer_(sample_buffer), |
| 429 frame_index_(0), |
| 430 total_frames_(0) { |
| 431 ASSERT(isolate_ != NULL); |
| 432 ASSERT(sample_ != NULL); |
| 433 ASSERT(sample_buffer_ != NULL); |
| 434 ASSERT(sample_->head_sample()); |
| 435 } |
| 436 |
| 437 bool Append(uword pc) { |
| 438 if (total_frames_ >= FLAG_max_profile_depth) { |
| 439 sample_->set_truncated_trace(true); |
| 440 return false; |
| 441 } |
| 442 ASSERT(sample_ != NULL); |
| 443 if (frame_index_ == kSampleSize) { |
| 444 Sample* new_sample = sample_buffer_->ReserveSampleAndLink(sample_); |
| 445 if (new_sample == NULL) { |
| 446 // Could not reserve new sample- mark this as truncated. |
| 447 sample_->set_truncated_trace(true); |
| 448 return false; |
| 449 } |
| 450 frame_index_ = 0; |
| 451 sample_ = new_sample; |
| 452 } |
| 453 ASSERT(frame_index_ < kSampleSize); |
| 454 sample_->SetAt(frame_index_, pc); |
| 455 frame_index_++; |
| 456 total_frames_++; |
| 457 return true; |
| 458 } |
| 459 |
| 460 protected: |
| 461 Isolate* isolate_; |
| 462 Sample* sample_; |
| 463 SampleBuffer* sample_buffer_; |
| 464 intptr_t frame_index_; |
| 465 intptr_t total_frames_; |
| 466 }; |
| 467 |
| 468 |
410 // Given an exit frame, walk the Dart stack. | 469 // Given an exit frame, walk the Dart stack. |
411 class ProfilerDartExitStackWalker : public ValueObject { | 470 class ProfilerDartExitStackWalker : public ProfilerStackWalker { |
412 public: | 471 public: |
413 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) | 472 ProfilerDartExitStackWalker(Isolate* isolate, |
414 : sample_(sample), | 473 Sample* sample, |
| 474 SampleBuffer* sample_buffer) |
| 475 : ProfilerStackWalker(isolate, sample, sample_buffer), |
415 frame_iterator_(isolate) { | 476 frame_iterator_(isolate) { |
416 ASSERT(sample_ != NULL); | |
417 } | 477 } |
418 | 478 |
419 void walk() { | 479 void walk() { |
420 // Mark that this sample was collected from an exit frame. | 480 // Mark that this sample was collected from an exit frame. |
421 sample_->set_exit_frame_sample(true); | 481 sample_->set_exit_frame_sample(true); |
422 intptr_t frame_index = 0; | 482 |
423 StackFrame* frame = frame_iterator_.NextFrame(); | 483 StackFrame* frame = frame_iterator_.NextFrame(); |
424 while (frame != NULL) { | 484 while (frame != NULL) { |
425 sample_->SetAt(frame_index, frame->pc()); | 485 if (!Append(frame->pc())) { |
426 frame_index++; | 486 return; |
427 if (frame_index >= NumberOfFramesToCollect()) { | |
428 sample_->set_truncated_trace(true); | |
429 break; | |
430 } | 487 } |
431 frame = frame_iterator_.NextFrame(); | 488 frame = frame_iterator_.NextFrame(); |
432 } | 489 } |
433 } | 490 } |
434 | 491 |
435 private: | 492 private: |
436 Sample* sample_; | |
437 DartFrameIterator frame_iterator_; | 493 DartFrameIterator frame_iterator_; |
438 }; | 494 }; |
439 | 495 |
440 | 496 |
441 // Executing Dart code, walk the stack. | 497 // Executing Dart code, walk the stack. |
442 class ProfilerDartStackWalker : public ValueObject { | 498 class ProfilerDartStackWalker : public ProfilerStackWalker { |
443 public: | 499 public: |
444 ProfilerDartStackWalker(Sample* sample, | 500 ProfilerDartStackWalker(Isolate* isolate, |
| 501 Sample* sample, |
| 502 SampleBuffer* sample_buffer, |
445 uword stack_lower, | 503 uword stack_lower, |
446 uword stack_upper, | 504 uword stack_upper, |
447 uword pc, | 505 uword pc, |
448 uword fp, | 506 uword fp, |
449 uword sp) | 507 uword sp) |
450 : sample_(sample), | 508 : ProfilerStackWalker(isolate, sample, sample_buffer), |
451 stack_upper_(stack_upper), | 509 stack_upper_(stack_upper), |
452 stack_lower_(stack_lower) { | 510 stack_lower_(stack_lower) { |
453 ASSERT(sample_ != NULL); | |
454 pc_ = reinterpret_cast<uword*>(pc); | 511 pc_ = reinterpret_cast<uword*>(pc); |
455 fp_ = reinterpret_cast<uword*>(fp); | 512 fp_ = reinterpret_cast<uword*>(fp); |
456 sp_ = reinterpret_cast<uword*>(sp); | 513 sp_ = reinterpret_cast<uword*>(sp); |
457 } | 514 } |
458 | 515 |
459 void walk() { | 516 void walk() { |
460 sample_->set_exit_frame_sample(false); | 517 sample_->set_exit_frame_sample(false); |
461 if (!ValidFramePointer()) { | 518 if (!ValidFramePointer()) { |
462 sample_->set_ignore_sample(true); | 519 sample_->set_ignore_sample(true); |
463 return; | 520 return; |
464 } | 521 } |
465 ASSERT(ValidFramePointer()); | 522 ASSERT(ValidFramePointer()); |
466 uword return_pc = InitialReturnAddress(); | 523 uword return_pc = InitialReturnAddress(); |
467 if (StubCode::InInvocationStub(return_pc)) { | 524 if (StubCode::InInvocationStub(return_pc)) { |
468 // Edge case- we have called out from the Invocation Stub but have not | 525 // Edge case- we have called out from the Invocation Stub but have not |
469 // created the stack frame of the callee. Attempt to locate the exit | 526 // created the stack frame of the callee. Attempt to locate the exit |
470 // frame before walking the stack. | 527 // frame before walking the stack. |
471 if (!NextExit() || !ValidFramePointer()) { | 528 if (!NextExit() || !ValidFramePointer()) { |
472 // Nothing to sample. | 529 // Nothing to sample. |
473 sample_->set_ignore_sample(true); | 530 sample_->set_ignore_sample(true); |
474 return; | 531 return; |
475 } | 532 } |
476 } | 533 } |
477 for (int i = 0; i < NumberOfFramesToCollect(); i++) { | 534 while (true) { |
478 sample_->SetAt(i, reinterpret_cast<uword>(pc_)); | 535 if (!Append(reinterpret_cast<uword>(pc_))) { |
| 536 return; |
| 537 } |
479 if (!Next()) { | 538 if (!Next()) { |
480 return; | 539 return; |
481 } | 540 } |
482 } | 541 } |
483 sample_->set_truncated_trace(true); | |
484 } | 542 } |
485 | 543 |
486 private: | 544 private: |
487 bool Next() { | 545 bool Next() { |
488 if (!ValidFramePointer()) { | 546 if (!ValidFramePointer()) { |
489 return false; | 547 return false; |
490 } | 548 } |
491 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) { | 549 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) { |
492 // In invocation stub. | 550 // In invocation stub. |
493 return NextExit(); | 551 return NextExit(); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
576 return false; | 634 return false; |
577 } | 635 } |
578 uword cursor = reinterpret_cast<uword>(fp); | 636 uword cursor = reinterpret_cast<uword>(fp); |
579 cursor += sizeof(fp); | 637 cursor += sizeof(fp); |
580 return (cursor >= stack_lower_) && (cursor < stack_upper_); | 638 return (cursor >= stack_lower_) && (cursor < stack_upper_); |
581 } | 639 } |
582 | 640 |
583 uword* pc_; | 641 uword* pc_; |
584 uword* fp_; | 642 uword* fp_; |
585 uword* sp_; | 643 uword* sp_; |
586 Sample* sample_; | |
587 const uword stack_upper_; | 644 const uword stack_upper_; |
588 uword stack_lower_; | 645 uword stack_lower_; |
589 }; | 646 }; |
590 | 647 |
591 | 648 |
592 // If the VM is compiled without frame pointers (which is the default on | 649 // If the VM is compiled without frame pointers (which is the default on |
593 // recent GCC versions with optimizing enabled) the stack walking code may | 650 // recent GCC versions with optimizing enabled) the stack walking code may |
594 // fail. | 651 // fail. |
595 // | 652 // |
596 class ProfilerNativeStackWalker : public ValueObject { | 653 class ProfilerNativeStackWalker : public ProfilerStackWalker { |
597 public: | 654 public: |
598 ProfilerNativeStackWalker(Sample* sample, | 655 ProfilerNativeStackWalker(Isolate* isolate, |
| 656 Sample* sample, |
| 657 SampleBuffer* sample_buffer, |
599 uword stack_lower, | 658 uword stack_lower, |
600 uword stack_upper, | 659 uword stack_upper, |
601 uword pc, | 660 uword pc, |
602 uword fp, | 661 uword fp, |
603 uword sp) | 662 uword sp) |
604 : sample_(sample), | 663 : ProfilerStackWalker(isolate, sample, sample_buffer), |
605 stack_upper_(stack_upper), | 664 stack_upper_(stack_upper), |
606 original_pc_(pc), | 665 original_pc_(pc), |
607 original_fp_(fp), | 666 original_fp_(fp), |
608 original_sp_(sp), | 667 original_sp_(sp), |
609 lower_bound_(stack_lower) { | 668 lower_bound_(stack_lower) { |
610 ASSERT(sample_ != NULL); | |
611 } | 669 } |
612 | 670 |
613 void walk() { | 671 void walk() { |
614 const uword kMaxStep = VirtualMemory::PageSize(); | 672 const uword kMaxStep = VirtualMemory::PageSize(); |
615 | 673 |
616 sample_->SetAt(0, original_pc_); | 674 Append(original_pc_); |
617 | 675 |
618 uword* pc = reinterpret_cast<uword*>(original_pc_); | 676 uword* pc = reinterpret_cast<uword*>(original_pc_); |
619 uword* fp = reinterpret_cast<uword*>(original_fp_); | 677 uword* fp = reinterpret_cast<uword*>(original_fp_); |
620 uword* previous_fp = fp; | 678 uword* previous_fp = fp; |
621 | 679 |
622 uword gap = original_fp_ - original_sp_; | 680 uword gap = original_fp_ - original_sp_; |
623 if (gap >= kMaxStep) { | 681 if (gap >= kMaxStep) { |
624 // Gap between frame pointer and stack pointer is | 682 // Gap between frame pointer and stack pointer is |
625 // too large. | 683 // too large. |
626 return; | 684 return; |
627 } | 685 } |
628 | 686 |
629 if (!ValidFramePointer(fp)) { | 687 if (!ValidFramePointer(fp)) { |
630 return; | 688 return; |
631 } | 689 } |
632 | 690 |
633 for (int i = 0; i < NumberOfFramesToCollect(); i++) { | 691 while (true) { |
634 sample_->SetAt(i, reinterpret_cast<uword>(pc)); | 692 if (!Append(reinterpret_cast<uword>(pc))) { |
| 693 return; |
| 694 } |
635 | 695 |
636 pc = CallerPC(fp); | 696 pc = CallerPC(fp); |
637 previous_fp = fp; | 697 previous_fp = fp; |
638 fp = CallerFP(fp); | 698 fp = CallerFP(fp); |
639 | 699 |
640 if (fp == NULL) { | 700 if (fp == NULL) { |
641 return; | 701 return; |
642 } | 702 } |
643 | 703 |
644 if (fp <= previous_fp) { | 704 if (fp <= previous_fp) { |
645 // Frame pointer did not move to a higher address. | 705 // Frame pointer did not move to a higher address. |
646 return; | 706 return; |
647 } | 707 } |
648 | 708 |
649 gap = fp - previous_fp; | 709 gap = fp - previous_fp; |
650 if (gap >= kMaxStep) { | 710 if (gap >= kMaxStep) { |
651 // Frame pointer step is too large. | 711 // Frame pointer step is too large. |
652 return; | 712 return; |
653 } | 713 } |
654 | 714 |
655 if (!ValidFramePointer(fp)) { | 715 if (!ValidFramePointer(fp)) { |
656 // Frame pointer is outside of isolate stack boundary. | 716 // Frame pointer is outside of isolate stack boundary. |
657 return; | 717 return; |
658 } | 718 } |
659 | 719 |
660 // Move the lower bound up. | 720 // Move the lower bound up. |
661 lower_bound_ = reinterpret_cast<uword>(fp); | 721 lower_bound_ = reinterpret_cast<uword>(fp); |
662 } | 722 } |
663 | |
664 sample_->set_truncated_trace(true); | |
665 } | 723 } |
666 | 724 |
667 private: | 725 private: |
668 uword* CallerPC(uword* fp) const { | 726 uword* CallerPC(uword* fp) const { |
669 ASSERT(fp != NULL); | 727 ASSERT(fp != NULL); |
670 uword* caller_pc_ptr = fp + kSavedCallerPcSlotFromFp; | 728 uword* caller_pc_ptr = fp + kSavedCallerPcSlotFromFp; |
671 // This may actually be uninitialized, by design (see class comment above). | 729 // This may actually be uninitialized, by design (see class comment above). |
672 MSAN_UNPOISON(caller_pc_ptr, kWordSize); | 730 MSAN_UNPOISON(caller_pc_ptr, kWordSize); |
673 ASAN_UNPOISON(caller_pc_ptr, kWordSize); | 731 ASAN_UNPOISON(caller_pc_ptr, kWordSize); |
674 return reinterpret_cast<uword*>(*caller_pc_ptr); | 732 return reinterpret_cast<uword*>(*caller_pc_ptr); |
(...skipping 11 matching lines...) Expand all Loading... |
686 bool ValidFramePointer(uword* fp) const { | 744 bool ValidFramePointer(uword* fp) const { |
687 if (fp == NULL) { | 745 if (fp == NULL) { |
688 return false; | 746 return false; |
689 } | 747 } |
690 uword cursor = reinterpret_cast<uword>(fp); | 748 uword cursor = reinterpret_cast<uword>(fp); |
691 cursor += sizeof(fp); | 749 cursor += sizeof(fp); |
692 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_); | 750 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_); |
693 return r; | 751 return r; |
694 } | 752 } |
695 | 753 |
696 Sample* sample_; | |
697 const uword stack_upper_; | 754 const uword stack_upper_; |
698 const uword original_pc_; | 755 const uword original_pc_; |
699 const uword original_fp_; | 756 const uword original_fp_; |
700 const uword original_sp_; | 757 const uword original_sp_; |
701 uword lower_bound_; | 758 uword lower_bound_; |
702 }; | 759 }; |
703 | 760 |
704 | 761 |
705 static void CopyPCMarkerIfSafe(Sample* sample, uword fp_addr, uword sp_addr) { | 762 static void CopyPCMarkerIfSafe(Sample* sample, uword fp_addr, uword sp_addr) { |
706 ASSERT(sample != NULL); | 763 ASSERT(sample != NULL); |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
993 &stack_lower, | 1050 &stack_lower, |
994 &stack_upper)) { | 1051 &stack_upper)) { |
995 // Could not get stack boundary. | 1052 // Could not get stack boundary. |
996 return; | 1053 return; |
997 } | 1054 } |
998 | 1055 |
999 Sample* sample = SetupSample(isolate, | 1056 Sample* sample = SetupSample(isolate, |
1000 sample_buffer, | 1057 sample_buffer, |
1001 OSThread::GetCurrentThreadId()); | 1058 OSThread::GetCurrentThreadId()); |
1002 sample->SetAllocationCid(cid); | 1059 sample->SetAllocationCid(cid); |
1003 ProfilerNativeStackWalker native_stack_walker(sample, | 1060 ProfilerNativeStackWalker native_stack_walker(isolate, |
| 1061 sample, |
| 1062 sample_buffer, |
1004 stack_lower, | 1063 stack_lower, |
1005 stack_upper, | 1064 stack_upper, |
1006 pc, | 1065 pc, |
1007 fp, | 1066 fp, |
1008 sp); | 1067 sp); |
1009 native_stack_walker.walk(); | 1068 native_stack_walker.walk(); |
1010 } else if (exited_dart_code) { | 1069 } else if (exited_dart_code) { |
1011 Sample* sample = SetupSample(isolate, | 1070 Sample* sample = SetupSample(isolate, |
1012 sample_buffer, | 1071 sample_buffer, |
1013 OSThread::GetCurrentThreadId()); | 1072 OSThread::GetCurrentThreadId()); |
1014 sample->SetAllocationCid(cid); | 1073 sample->SetAllocationCid(cid); |
1015 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, sample); | 1074 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, |
| 1075 sample, |
| 1076 sample_buffer); |
1016 dart_exit_stack_walker.walk(); | 1077 dart_exit_stack_walker.walk(); |
1017 } else { | 1078 } else { |
1018 // Fall back. | 1079 // Fall back. |
1019 uintptr_t pc = GetProgramCounter(); | 1080 uintptr_t pc = GetProgramCounter(); |
1020 Sample* sample = SetupSample(isolate, | 1081 Sample* sample = SetupSample(isolate, |
1021 sample_buffer, | 1082 sample_buffer, |
1022 OSThread::GetCurrentThreadId()); | 1083 OSThread::GetCurrentThreadId()); |
1023 sample->SetAllocationCid(cid); | 1084 sample->SetAllocationCid(cid); |
1024 sample->set_vm_tag(VMTag::kEmbedderTagId); | 1085 sample->set_vm_tag(VMTag::kEmbedderTagId); |
1025 sample->SetAt(0, pc); | 1086 sample->SetAt(0, pc); |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1095 | 1156 |
1096 // Setup sample. | 1157 // Setup sample. |
1097 Sample* sample = SetupSample(isolate, | 1158 Sample* sample = SetupSample(isolate, |
1098 sample_buffer, | 1159 sample_buffer, |
1099 OSThread::GetCurrentThreadId()); | 1160 OSThread::GetCurrentThreadId()); |
1100 // Increment counter for vm tag. | 1161 // Increment counter for vm tag. |
1101 VMTagCounters* counters = isolate->vm_tag_counters(); | 1162 VMTagCounters* counters = isolate->vm_tag_counters(); |
1102 ASSERT(counters != NULL); | 1163 ASSERT(counters != NULL); |
1103 counters->Increment(sample->vm_tag()); | 1164 counters->Increment(sample->vm_tag()); |
1104 | 1165 |
1105 ProfilerNativeStackWalker native_stack_walker(sample, | 1166 ProfilerNativeStackWalker native_stack_walker(isolate, |
| 1167 sample, |
| 1168 sample_buffer, |
1106 stack_lower, | 1169 stack_lower, |
1107 stack_upper, | 1170 stack_upper, |
1108 pc, | 1171 pc, |
1109 fp, | 1172 fp, |
1110 sp); | 1173 sp); |
1111 | 1174 |
1112 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, sample); | 1175 ProfilerDartExitStackWalker dart_exit_stack_walker(isolate, |
| 1176 sample, |
| 1177 sample_buffer); |
1113 | 1178 |
1114 ProfilerDartStackWalker dart_stack_walker(sample, | 1179 ProfilerDartStackWalker dart_stack_walker(isolate, |
| 1180 sample, |
| 1181 sample_buffer, |
1115 stack_lower, | 1182 stack_lower, |
1116 stack_upper, | 1183 stack_upper, |
1117 pc, | 1184 pc, |
1118 fp, | 1185 fp, |
1119 sp); | 1186 sp); |
1120 | 1187 |
1121 // All memory access is done inside CollectSample. | 1188 // All memory access is done inside CollectSample. |
1122 CollectSample(isolate, | 1189 CollectSample(isolate, |
1123 exited_dart_code, | 1190 exited_dart_code, |
1124 in_dart_code, | 1191 in_dart_code, |
(...skipping 15 matching lines...) Expand all Loading... |
1140 | 1207 |
1141 ProcessedSampleBuffer* buffer = new(zone) ProcessedSampleBuffer(); | 1208 ProcessedSampleBuffer* buffer = new(zone) ProcessedSampleBuffer(); |
1142 | 1209 |
1143 const intptr_t length = capacity(); | 1210 const intptr_t length = capacity(); |
1144 for (intptr_t i = 0; i < length; i++) { | 1211 for (intptr_t i = 0; i < length; i++) { |
1145 Sample* sample = At(i); | 1212 Sample* sample = At(i); |
1146 if (sample->ignore_sample()) { | 1213 if (sample->ignore_sample()) { |
1147 // Bad sample. | 1214 // Bad sample. |
1148 continue; | 1215 continue; |
1149 } | 1216 } |
| 1217 if (!sample->head_sample()) { |
| 1218 // An inner sample in a chain of samples. |
| 1219 continue; |
| 1220 } |
1150 if (sample->isolate() != filter->isolate()) { | 1221 if (sample->isolate() != filter->isolate()) { |
1151 // Another isolate. | 1222 // Another isolate. |
1152 continue; | 1223 continue; |
1153 } | 1224 } |
1154 if (sample->timestamp() == 0) { | 1225 if (sample->timestamp() == 0) { |
1155 // Empty. | 1226 // Empty. |
1156 continue; | 1227 continue; |
1157 } | 1228 } |
1158 if (sample->At(0) == 0) { | 1229 if (sample->At(0) == 0) { |
1159 // No frames. | 1230 // No frames. |
(...skipping 21 matching lines...) Expand all Loading... |
1181 processed_sample->set_user_tag(sample->user_tag()); | 1252 processed_sample->set_user_tag(sample->user_tag()); |
1182 if (sample->is_allocation_sample()) { | 1253 if (sample->is_allocation_sample()) { |
1183 processed_sample->set_allocation_cid(sample->allocation_cid()); | 1254 processed_sample->set_allocation_cid(sample->allocation_cid()); |
1184 } | 1255 } |
1185 processed_sample->set_first_frame_executing(!sample->exit_frame_sample()); | 1256 processed_sample->set_first_frame_executing(!sample->exit_frame_sample()); |
1186 | 1257 |
1187 // Copy stack trace from sample(s). | 1258 // Copy stack trace from sample(s). |
1188 bool truncated = false; | 1259 bool truncated = false; |
1189 Sample* current = sample; | 1260 Sample* current = sample; |
1190 while (current != NULL) { | 1261 while (current != NULL) { |
1191 for (intptr_t i = 0; i < FLAG_profile_depth; i++) { | 1262 for (intptr_t i = 0; i < kSampleSize; i++) { |
1192 if (current->At(i) == 0) { | 1263 if (current->At(i) == 0) { |
1193 break; | 1264 break; |
1194 } | 1265 } |
1195 processed_sample->Add(current->At(i)); | 1266 processed_sample->Add(current->At(i)); |
1196 } | 1267 } |
1197 | 1268 |
1198 truncated = truncated || current->truncated_trace(); | 1269 truncated = truncated || current->truncated_trace(); |
1199 current = Next(sample); | 1270 current = Next(current); |
1200 } | 1271 } |
1201 | 1272 |
1202 if (!sample->exit_frame_sample()) { | 1273 if (!sample->exit_frame_sample()) { |
1203 Isolate* isolate = thread->isolate(); | 1274 Isolate* isolate = thread->isolate(); |
1204 Isolate* vm_isolate = Dart::vm_isolate(); | 1275 Isolate* vm_isolate = Dart::vm_isolate(); |
1205 processed_sample->FixupCaller(isolate, | 1276 processed_sample->FixupCaller(isolate, |
1206 vm_isolate, | 1277 vm_isolate, |
1207 sample->pc_marker(), | 1278 sample->pc_marker(), |
1208 sample->GetStackBuffer()); | 1279 sample->GetStackBuffer()); |
1209 } | 1280 } |
1210 | 1281 |
1211 processed_sample->set_truncated(truncated); | 1282 processed_sample->set_truncated(truncated); |
1212 return processed_sample; | 1283 return processed_sample; |
1213 } | 1284 } |
1214 | 1285 |
1215 | 1286 |
1216 Sample* SampleBuffer::Next(Sample* sample) { | 1287 Sample* SampleBuffer::Next(Sample* sample) { |
1217 // TODO(johnmccutchan): Support chaining samples for complete stack traces. | 1288 if (!sample->is_continuation_sample()) |
1218 return NULL; | 1289 return NULL; |
| 1290 Sample* next_sample = At(sample->continuation_index()); |
| 1291 // Sanity check. |
| 1292 ASSERT(sample != next_sample); |
| 1293 // Detect invalid chaining. |
| 1294 if (sample->isolate() != next_sample->isolate()) { |
| 1295 return NULL; |
| 1296 } |
| 1297 if (sample->timestamp() != next_sample->timestamp()) { |
| 1298 return NULL; |
| 1299 } |
| 1300 if (sample->tid() != next_sample->tid()) { |
| 1301 return NULL; |
| 1302 } |
| 1303 return next_sample; |
1219 } | 1304 } |
1220 | 1305 |
1221 | 1306 |
1222 ProcessedSample::ProcessedSample() | 1307 ProcessedSample::ProcessedSample() |
1223 : pcs_(FLAG_profile_depth), | 1308 : pcs_(kSampleSize), |
1224 timestamp_(0), | 1309 timestamp_(0), |
1225 vm_tag_(0), | 1310 vm_tag_(0), |
1226 user_tag_(0), | 1311 user_tag_(0), |
1227 allocation_cid_(-1), | 1312 allocation_cid_(-1), |
1228 truncated_(false) { | 1313 truncated_(false) { |
1229 } | 1314 } |
1230 | 1315 |
1231 | 1316 |
1232 void ProcessedSample::FixupCaller(Isolate* isolate, | 1317 void ProcessedSample::FixupCaller(Isolate* isolate, |
1233 Isolate* vm_isolate, | 1318 Isolate* vm_isolate, |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1324 uword pc) { | 1409 uword pc) { |
1325 return vm_isolate->heap()->CodeContains(pc) | 1410 return vm_isolate->heap()->CodeContains(pc) |
1326 || isolate->heap()->CodeContains(pc); | 1411 || isolate->heap()->CodeContains(pc); |
1327 } | 1412 } |
1328 | 1413 |
1329 | 1414 |
1330 ProcessedSampleBuffer::ProcessedSampleBuffer() { | 1415 ProcessedSampleBuffer::ProcessedSampleBuffer() { |
1331 } | 1416 } |
1332 | 1417 |
1333 } // namespace dart | 1418 } // namespace dart |
OLD | NEW |