OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "platform/address_sanitizer.h" | 5 #include "platform/address_sanitizer.h" |
6 #include "platform/memory_sanitizer.h" | 6 #include "platform/memory_sanitizer.h" |
7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
8 | 8 |
9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
10 #include "vm/atomic.h" | 10 #include "vm/atomic.h" |
11 #include "vm/code_patcher.h" | 11 #include "vm/code_patcher.h" |
| 12 #include "vm/instructions.h" |
12 #include "vm/isolate.h" | 13 #include "vm/isolate.h" |
13 #include "vm/json_stream.h" | 14 #include "vm/json_stream.h" |
14 #include "vm/lockers.h" | 15 #include "vm/lockers.h" |
15 #include "vm/native_symbol.h" | 16 #include "vm/native_symbol.h" |
16 #include "vm/object.h" | 17 #include "vm/object.h" |
17 #include "vm/os.h" | 18 #include "vm/os.h" |
18 #include "vm/profiler.h" | 19 #include "vm/profiler.h" |
19 #include "vm/reusable_handles.h" | 20 #include "vm/reusable_handles.h" |
20 #include "vm/signal_handler.h" | 21 #include "vm/signal_handler.h" |
21 #include "vm/simulator.h" | 22 #include "vm/simulator.h" |
22 #include "vm/stack_frame.h" | 23 #include "vm/stack_frame.h" |
23 | 24 |
24 namespace dart { | 25 namespace dart { |
25 | 26 |
26 | 27 |
27 #if defined(TARGET_OS_ANDROID) || defined(HOST_ARCH_ARM64) | 28 #if defined(TARGET_OS_ANDROID) || defined(HOST_ARCH_ARM64) |
28 DEFINE_FLAG(bool, profile, false, "Enable Sampling Profiler"); | 29 DEFINE_FLAG(bool, profile, false, "Enable Sampling Profiler"); |
29 #else | 30 #else |
30 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); | 31 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); |
31 #endif | 32 #endif |
32 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); | 33 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); |
33 DEFINE_FLAG(bool, trace_profiler, false, "Trace profiler."); | |
34 DEFINE_FLAG(int, profile_period, 1000, | 34 DEFINE_FLAG(int, profile_period, 1000, |
35 "Time between profiler samples in microseconds. Minimum 50."); | 35 "Time between profiler samples in microseconds. Minimum 50."); |
36 DEFINE_FLAG(int, profile_depth, 8, | 36 DEFINE_FLAG(int, profile_depth, 8, |
37 "Maximum number stack frames walked. Minimum 1. Maximum 255."); | 37 "Maximum number stack frames walked. Minimum 1. Maximum 255."); |
38 #if defined(PROFILE_NATIVE_CODE) || defined(USING_SIMULATOR) | 38 #if defined(PROFILE_NATIVE_CODE) || defined(USING_SIMULATOR) |
39 DEFINE_FLAG(bool, profile_vm, true, | 39 DEFINE_FLAG(bool, profile_vm, true, |
40 "Always collect native stack traces."); | 40 "Always collect native stack traces."); |
41 #else | 41 #else |
42 DEFINE_FLAG(bool, profile_vm, false, | 42 DEFINE_FLAG(bool, profile_vm, false, |
43 "Always collect native stack traces."); | 43 "Always collect native stack traces."); |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
246 | 246 |
247 Sample* SampleBuffer::ReserveSample() { | 247 Sample* SampleBuffer::ReserveSample() { |
248 ASSERT(samples_ != NULL); | 248 ASSERT(samples_ != NULL); |
249 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); | 249 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); |
250 // Map back into sample buffer range. | 250 // Map back into sample buffer range. |
251 cursor = cursor % capacity_; | 251 cursor = cursor % capacity_; |
252 return At(cursor); | 252 return At(cursor); |
253 } | 253 } |
254 | 254 |
255 | 255 |
256 static void SetPCMarkerIfSafe(Sample* sample) { | 256 // Attempts to find the true return address when a Dart frame is being setup |
257 ASSERT(sample != NULL); | 257 // or torn down. |
258 | 258 // NOTE: Architecture specific implementations below. |
259 uword* fp = reinterpret_cast<uword*>(sample->fp()); | 259 class ReturnAddressLocator : public ValueObject { |
260 uword* sp = reinterpret_cast<uword*>(sample->sp()); | 260 public: |
261 | 261 ReturnAddressLocator(Sample* sample, const Code& code) |
262 // If FP == SP, the pc marker hasn't been pushed. | 262 : sample_(sample), |
263 if (fp > sp) { | 263 code_(Code::ZoneHandle(code.raw())), |
264 #if defined(TARGET_OS_WINDOWS) | 264 is_optimized_(code.is_optimized()) { |
265 // If the fp is at the beginning of a page, it may be unsafe to access | 265 ASSERT(!code_.IsNull()); |
266 // the pc marker, because we are reading it from a different thread on | 266 ASSERT(code_.ContainsInstructionAt(pc())); |
267 // Windows. The marker is below fp and the previous page may be a guard | 267 } |
268 // page. | 268 |
269 const intptr_t kPageMask = VirtualMemory::PageSize() - 1; | 269 bool is_code_optimized() { |
270 if ((sample->fp() & kPageMask) == 0) { | 270 return is_optimized_; |
| 271 } |
| 272 |
| 273 uword pc() { |
| 274 return sample_->pc(); |
| 275 } |
| 276 |
| 277 // Returns false on failure. |
| 278 bool LocateReturnAddress(uword* return_address); |
| 279 |
| 280 // Returns offset into code object. |
| 281 uword RelativePC() { |
| 282 return pc() - code_.EntryPoint(); |
| 283 } |
| 284 |
| 285 uint8_t* CodePointer(uword offset) { |
| 286 const uword size = code_.Size(); |
| 287 ASSERT(offset < size); |
| 288 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); |
| 289 code_pointer += offset; |
| 290 return code_pointer; |
| 291 } |
| 292 |
| 293 uword StackAt(intptr_t i) { |
| 294 ASSERT(i >= 0); |
| 295 ASSERT(i < Sample::kStackBufferSizeInWords); |
| 296 return sample_->GetStackBuffer()[i]; |
| 297 } |
| 298 |
| 299 private: |
| 300 Sample* sample_; |
| 301 const Code& code_; |
| 302 const bool is_optimized_; |
| 303 }; |
| 304 |
| 305 |
| 306 #if defined(TARGET_ARCH_IA32) |
| 307 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 308 ASSERT(return_address != NULL); |
| 309 const uword offset = RelativePC(); |
| 310 const uword size = code_.Size(); |
| 311 if (is_optimized_) { |
| 312 // 0: push ebp |
| 313 // 1: mov ebp, esp |
| 314 // 3: ... |
| 315 if (offset == 0x0) { |
| 316 // Stack layout: |
| 317 // 0 RETURN ADDRESS. |
| 318 *return_address = StackAt(0); |
| 319 return true; |
| 320 } |
| 321 if (offset == 0x1) { |
| 322 // Stack layout: |
| 323 // 0 CALLER FRAME POINTER |
| 324 // 1 RETURN ADDRESS |
| 325 *return_address = StackAt(1); |
| 326 return true; |
| 327 } |
| 328 ReturnPattern rp(pc()); |
| 329 if (rp.IsValid()) { |
| 330 // Stack layout: |
| 331 // 0 RETURN ADDRESS. |
| 332 *return_address = StackAt(0); |
| 333 return true; |
| 334 } |
| 335 return false; |
| 336 } else { |
| 337 // 0x00: mov edi, function |
| 338 // 0x05: incl (inc usage count) <-- this is optional. |
| 339 // 0x08: cmpl (compare usage count) |
| 340 // 0x0f: jump to optimize function |
| 341 // 0x15: push ebp |
| 342 // 0x16: mov ebp, esp |
| 343 // 0x18: ... |
| 344 ASSERT(size >= 0x08); |
| 345 const uword incl_offset = 0x05; |
| 346 const uword incl_length = 0x03; |
| 347 const uint8_t incl_op_code = 0xFF; |
| 348 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); |
| 349 const uword push_fp_offset = has_incl ? 0x15 : 0x15 - incl_length; |
| 350 if (offset <= push_fp_offset) { |
| 351 // Stack layout: |
| 352 // 0 RETURN ADDRESS. |
| 353 *return_address = StackAt(0); |
| 354 return true; |
| 355 } |
| 356 if (offset == (push_fp_offset + 1)) { |
| 357 // Stack layout: |
| 358 // 0 CALLER FRAME POINTER |
| 359 // 1 RETURN ADDRESS |
| 360 *return_address = StackAt(1); |
| 361 return true; |
| 362 } |
| 363 ReturnPattern rp(pc()); |
| 364 if (rp.IsValid()) { |
| 365 // Stack layout: |
| 366 // 0 RETURN ADDRESS. |
| 367 *return_address = StackAt(0); |
| 368 return true; |
| 369 } |
| 370 return false; |
| 371 } |
| 372 UNREACHABLE(); |
| 373 return false; |
| 374 } |
| 375 #elif defined(TARGET_ARCH_X64) |
| 376 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 377 ASSERT(return_address != NULL); |
| 378 const uword offset = RelativePC(); |
| 379 const uword size = code_.Size(); |
| 380 if (is_optimized_) { |
| 381 // 0x00: leaq (load pc marker) |
| 382 // 0x07: movq (load pool pointer) |
| 383 // 0x0c: push rpb |
| 384 // 0x0d: movq rbp, rsp |
| 385 // 0x10: ... |
| 386 const uword push_fp_offset = 0x0c; |
| 387 if (offset <= push_fp_offset) { |
| 388 // Stack layout: |
| 389 // 0 RETURN ADDRESS. |
| 390 *return_address = StackAt(0); |
| 391 return true; |
| 392 } |
| 393 if (offset == (push_fp_offset + 1)) { |
| 394 // Stack layout: |
| 395 // 0 CALLER FRAME POINTER |
| 396 // 1 RETURN ADDRESS |
| 397 *return_address = StackAt(1); |
| 398 return true; |
| 399 } |
| 400 ReturnPattern rp(pc()); |
| 401 if (rp.IsValid()) { |
| 402 // Stack layout: |
| 403 // 0 RETURN ADDRESS. |
| 404 *return_address = StackAt(0); |
| 405 return true; |
| 406 } |
| 407 return false; |
| 408 } else { |
| 409 // 0x00: leaq (load pc marker) |
| 410 // 0x07: movq (load pool pointer) |
| 411 // 0x0c: movq (load function) |
| 412 // 0x13: incl (inc usage count) <-- this is optional. |
| 413 // 0x16: cmpl (compare usage count) |
| 414 // 0x1d: jl + 0x |
| 415 // 0x23: jmp [pool pointer] |
| 416 // 0x27: push rbp |
| 417 // 0x28: movq rbp, rsp |
| 418 // 0x2b: ... |
| 419 ASSERT(size >= 0x16); |
| 420 const uword incl_offset = 0x13; |
| 421 const uword incl_length = 0x03; |
| 422 const uint8_t incl_op_code = 0xFF; |
| 423 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); |
| 424 const uword push_fp_offset = has_incl ? 0x27 : 0x27 - incl_length; |
| 425 if (offset <= push_fp_offset) { |
| 426 // Stack layout: |
| 427 // 0 RETURN ADDRESS. |
| 428 *return_address = StackAt(0); |
| 429 return true; |
| 430 } |
| 431 if (offset == (push_fp_offset + 1)) { |
| 432 // Stack layout: |
| 433 // 0 CALLER FRAME POINTER |
| 434 // 1 RETURN ADDRESS |
| 435 *return_address = StackAt(1); |
| 436 return true; |
| 437 } |
| 438 ReturnPattern rp(pc()); |
| 439 if (rp.IsValid()) { |
| 440 // Stack layout: |
| 441 // 0 RETURN ADDRESS. |
| 442 *return_address = StackAt(0); |
| 443 return true; |
| 444 } |
| 445 return false; |
| 446 } |
| 447 UNREACHABLE(); |
| 448 return false; |
| 449 } |
| 450 #elif defined(TARGET_ARCH_ARM) |
| 451 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 452 ASSERT(return_address != NULL); |
| 453 return false; |
| 454 } |
| 455 #elif defined(TARGET_ARCH_ARM64) |
| 456 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 457 ASSERT(return_address != NULL); |
| 458 return false; |
| 459 } |
| 460 #elif defined(TARGET_ARCH_MIPS) |
| 461 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 462 ASSERT(return_address != NULL); |
| 463 return false; |
| 464 } |
| 465 #else |
| 466 #error ReturnAddressLocator implementation missing for this architecture. |
| 467 #endif |
| 468 |
| 469 |
| 470 PreprocessVisitor::PreprocessVisitor(Isolate* isolate) |
| 471 : SampleVisitor(isolate), |
| 472 vm_isolate_(Dart::vm_isolate()) { |
| 473 } |
| 474 |
| 475 |
| 476 void PreprocessVisitor::VisitSample(Sample* sample) { |
| 477 if (sample->processed()) { |
| 478 // Already processed. |
| 479 return; |
| 480 } |
| 481 // Mark that we've processed this sample. |
| 482 sample->set_processed(true); |
| 483 |
| 484 if (sample->exit_frame_sample()) { |
| 485 // Exit frame sample, no preprocessing required. |
| 486 return; |
| 487 } |
| 488 REUSABLE_CODE_HANDLESCOPE(isolate()); |
| 489 // Lookup code object for leaf frame. |
| 490 Code& code = reused_code_handle.Handle(); |
| 491 code = FindCodeForPC(sample->At(0)); |
| 492 sample->set_leaf_frame_is_dart(!code.IsNull()); |
| 493 if (!code.IsNull() && (code.compile_timestamp() > sample->timestamp())) { |
| 494 // Code compiled after sample. Ignore. |
| 495 return; |
| 496 } |
| 497 if (sample->leaf_frame_is_dart()) { |
| 498 CheckForMissingDartFrame(code, sample); |
| 499 } |
| 500 } |
| 501 |
| 502 |
| 503 void PreprocessVisitor::CheckForMissingDartFrame(const Code& code, |
| 504 Sample* sample) const { |
| 505 // Some stubs (and intrinsics) do not push a frame onto the stack leaving |
| 506 // the frame pointer in the caller. |
| 507 // |
| 508 // PC -> STUB |
| 509 // FP -> DART3 <-+ |
| 510 // DART2 <-| <- TOP FRAME RETURN ADDRESS. |
| 511 // DART1 <-| |
| 512 // ..... |
| 513 // |
| 514 // In this case, traversing the linked stack frames will not collect a PC |
| 515 // inside DART3. The stack will incorrectly be: STUB, DART2, DART1. |
| 516 // In Dart code, after pushing the FP onto the stack, an IP in the current |
| 517 // function is pushed onto the stack as well. This stack slot is called |
| 518 // the PC marker. We can use the PC marker to insert DART3 into the stack |
| 519 // so that it will correctly be: STUB, DART3, DART2, DART1. Note the |
| 520 // inserted PC may not accurately reflect the true return address into DART3. |
| 521 ASSERT(!code.IsNull()); |
| 522 |
| 523 // The pc marker is our current best guess of a return address. |
| 524 uword return_address = sample->pc_marker(); |
| 525 |
| 526 // Attempt to find a better return address. |
| 527 ReturnAddressLocator ral(sample, code); |
| 528 |
| 529 if (!ral.LocateReturnAddress(&return_address)) { |
| 530 ASSERT(return_address == sample->pc_marker()); |
| 531 // Could not find a better return address than the pc_marker. |
| 532 if (code.ContainsInstructionAt(return_address)) { |
| 533 // PC marker is in the same code as pc, no missing frame. |
271 return; | 534 return; |
272 } | 535 } |
273 #endif | 536 if (!ContainedInDartCodeHeaps(return_address)) { |
274 uword* pc_marker_ptr = fp + kPcMarkerSlotFromFp; | 537 // PC marker is not from the Dart heap. Do not insert. |
275 // MSan/ASan are unaware of frames initialized by generated code. | 538 return; |
276 MSAN_UNPOISON(pc_marker_ptr, kWordSize); | 539 } |
277 ASAN_UNPOISON(pc_marker_ptr, kWordSize); | 540 } |
278 sample->set_pc_marker(*pc_marker_ptr); | 541 |
279 } | 542 if (return_address != 0) { |
280 } | 543 sample->InsertCallerForTopFrame(return_address); |
281 | 544 } |
282 | 545 } |
| 546 |
| 547 |
| 548 bool PreprocessVisitor::ContainedInDartCodeHeaps(uword pc) const { |
| 549 return isolate()->heap()->CodeContains(pc) || |
| 550 vm_isolate()->heap()->CodeContains(pc); |
| 551 } |
| 552 |
| 553 |
| 554 RawCode* PreprocessVisitor::FindCodeForPC(uword pc) const { |
| 555 // Check current isolate for pc. |
| 556 if (isolate()->heap()->CodeContains(pc)) { |
| 557 return Code::LookupCode(pc); |
| 558 } |
| 559 // Check VM isolate for pc. |
| 560 if (vm_isolate()->heap()->CodeContains(pc)) { |
| 561 return Code::LookupCodeInVmIsolate(pc); |
| 562 } |
| 563 return Code::null(); |
| 564 } |
| 565 |
| 566 |
| 567 ClearProfileVisitor::ClearProfileVisitor(Isolate* isolate) |
| 568 : SampleVisitor(isolate) { |
| 569 } |
| 570 |
| 571 |
| 572 void ClearProfileVisitor::VisitSample(Sample* sample) { |
| 573 sample->Clear(); |
| 574 } |
| 575 |
| 576 |
283 // Given an exit frame, walk the Dart stack. | 577 // Given an exit frame, walk the Dart stack. |
284 class ProfilerDartExitStackWalker : public ValueObject { | 578 class ProfilerDartExitStackWalker : public ValueObject { |
285 public: | 579 public: |
286 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) | 580 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) |
287 : sample_(sample), | 581 : sample_(sample), |
288 frame_iterator_(isolate) { | 582 frame_iterator_(isolate) { |
289 ASSERT(sample_ != NULL); | 583 ASSERT(sample_ != NULL); |
290 // Mark that this sample was collected from an exit frame. | 584 // Mark that this sample was collected from an exit frame. |
291 sample_->set_exit_frame_sample(true); | 585 sample_->set_exit_frame_sample(true); |
292 } | 586 } |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
568 | 862 |
569 Sample* sample_; | 863 Sample* sample_; |
570 const uword stack_upper_; | 864 const uword stack_upper_; |
571 const uword original_pc_; | 865 const uword original_pc_; |
572 const uword original_fp_; | 866 const uword original_fp_; |
573 const uword original_sp_; | 867 const uword original_sp_; |
574 uword lower_bound_; | 868 uword lower_bound_; |
575 }; | 869 }; |
576 | 870 |
577 | 871 |
| 872 static void CopyPCMarkerIfSafe(Sample* sample) { |
| 873 ASSERT(sample != NULL); |
| 874 |
| 875 uword* fp = reinterpret_cast<uword*>(sample->fp()); |
| 876 uword* sp = reinterpret_cast<uword*>(sample->sp()); |
| 877 |
| 878 // If FP == SP, the pc marker hasn't been pushed. |
| 879 if (fp > sp) { |
| 880 #if defined(TARGET_OS_WINDOWS) |
| 881 COMPILE_ASSERT(kPcMarkerSlotFromFp < 0); |
| 882 // If the fp is at the beginning of a page, it may be unsafe to access |
| 883 // the pc marker, because we are reading it from a different thread on |
| 884 // Windows. The marker is below fp and the previous page may be a guard |
| 885 // page. |
| 886 const intptr_t kPageMask = VirtualMemory::PageSize() - 1; |
| 887 if ((sample->fp() & kPageMask) == 0) { |
| 888 return; |
| 889 } |
| 890 #endif |
| 891 uword* pc_marker_ptr = fp + kPcMarkerSlotFromFp; |
| 892 // MSan/ASan are unaware of frames initialized by generated code. |
| 893 MSAN_UNPOISON(pc_marker_ptr, kWordSize); |
| 894 ASAN_UNPOISON(pc_marker_ptr, kWordSize); |
| 895 sample->set_pc_marker(*pc_marker_ptr); |
| 896 } |
| 897 } |
| 898 |
| 899 |
| 900 static void CopyStackBuffer(Sample* sample) { |
| 901 ASSERT(sample != NULL); |
| 902 uword* sp = reinterpret_cast<uword*>(sample->sp()); |
| 903 uword* buffer = sample->GetStackBuffer(); |
| 904 if (sp != NULL) { |
| 905 for (intptr_t i = 0; i < Sample::kStackBufferSizeInWords; i++) { |
| 906 MSAN_UNPOISON(sp, kWordSize); |
| 907 ASAN_UNPOISON(sp, kWordSize); |
| 908 buffer[i] = *sp; |
| 909 sp++; |
| 910 } |
| 911 } |
| 912 } |
| 913 |
| 914 |
578 void Profiler::RecordSampleInterruptCallback( | 915 void Profiler::RecordSampleInterruptCallback( |
579 const InterruptedThreadState& state, | 916 const InterruptedThreadState& state, |
580 void* data) { | 917 void* data) { |
581 Isolate* isolate = reinterpret_cast<Isolate*>(data); | 918 Isolate* isolate = reinterpret_cast<Isolate*>(data); |
582 if ((isolate == NULL) || (Dart::vm_isolate() == NULL)) { | 919 if ((isolate == NULL) || (Dart::vm_isolate() == NULL)) { |
583 // No isolate. | 920 // No isolate. |
584 return; | 921 return; |
585 } | 922 } |
586 | 923 |
587 ASSERT(isolate != Dart::vm_isolate()); | 924 ASSERT(isolate != Dart::vm_isolate()); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
672 } | 1009 } |
673 #endif | 1010 #endif |
674 // Increment counter for vm tag. | 1011 // Increment counter for vm tag. |
675 VMTagCounters* counters = isolate->vm_tag_counters(); | 1012 VMTagCounters* counters = isolate->vm_tag_counters(); |
676 ASSERT(counters != NULL); | 1013 ASSERT(counters != NULL); |
677 counters->Increment(vm_tag); | 1014 counters->Increment(vm_tag); |
678 sample->set_vm_tag(vm_tag); | 1015 sample->set_vm_tag(vm_tag); |
679 sample->set_user_tag(isolate->user_tag()); | 1016 sample->set_user_tag(isolate->user_tag()); |
680 sample->set_sp(sp); | 1017 sample->set_sp(sp); |
681 sample->set_fp(state.fp); | 1018 sample->set_fp(state.fp); |
| 1019 sample->set_lr(state.lr); |
| 1020 CopyStackBuffer(sample); |
682 #if !(defined(TARGET_OS_WINDOWS) && defined(TARGET_ARCH_X64)) | 1021 #if !(defined(TARGET_OS_WINDOWS) && defined(TARGET_ARCH_X64)) |
683 // It is never safe to read other thread's stack unless on Win64 | 1022 // It is never safe to read other thread's stack unless on Win64 |
684 // other thread is inside Dart code. | 1023 // other thread is inside Dart code. |
685 SetPCMarkerIfSafe(sample); | 1024 if (vm_tag != VMTag::kDartTagId) { |
| 1025 CopyPCMarkerIfSafe(sample); |
| 1026 } |
686 #endif | 1027 #endif |
687 | 1028 |
688 // Walk the call stack. | 1029 // Walk the call stack. |
689 if (FLAG_profile_vm) { | 1030 if (FLAG_profile_vm) { |
690 // Always walk the native stack collecting both native and Dart frames. | 1031 // Always walk the native stack collecting both native and Dart frames. |
691 ProfilerNativeStackWalker stackWalker(sample, | 1032 ProfilerNativeStackWalker stackWalker(sample, |
692 stack_lower, | 1033 stack_lower, |
693 stack_upper, | 1034 stack_upper, |
694 state.pc, | 1035 state.pc, |
695 state.fp, | 1036 state.fp, |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
729 state.pc, | 1070 state.pc, |
730 state.fp, | 1071 state.fp, |
731 sp); | 1072 sp); |
732 stackWalker.walk(); | 1073 stackWalker.walk(); |
733 #endif | 1074 #endif |
734 } | 1075 } |
735 } | 1076 } |
736 } | 1077 } |
737 | 1078 |
738 } // namespace dart | 1079 } // namespace dart |
OLD | NEW |