| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "platform/address_sanitizer.h" | 5 #include "platform/address_sanitizer.h" |
| 6 #include "platform/memory_sanitizer.h" | 6 #include "platform/memory_sanitizer.h" |
| 7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
| 8 | 8 |
| 9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
| 10 #include "vm/atomic.h" | 10 #include "vm/atomic.h" |
| 11 #include "vm/code_patcher.h" | 11 #include "vm/code_patcher.h" |
| 12 #include "vm/instructions.h" | 12 #include "vm/instructions.h" |
| 13 #include "vm/isolate.h" | 13 #include "vm/isolate.h" |
| 14 #include "vm/json_stream.h" | 14 #include "vm/json_stream.h" |
| 15 #include "vm/lockers.h" | 15 #include "vm/lockers.h" |
| 16 #include "vm/native_symbol.h" | 16 #include "vm/native_symbol.h" |
| 17 #include "vm/object.h" | 17 #include "vm/object.h" |
| 18 #include "vm/os.h" | 18 #include "vm/os.h" |
| 19 #include "vm/profiler.h" | 19 #include "vm/profiler.h" |
| 20 #include "vm/reusable_handles.h" | 20 #include "vm/reusable_handles.h" |
| 21 #include "vm/signal_handler.h" | 21 #include "vm/signal_handler.h" |
| 22 #include "vm/simulator.h" | 22 #include "vm/simulator.h" |
| 23 #include "vm/stack_frame.h" | 23 #include "vm/stack_frame.h" |
| 24 | 24 |
| 25 namespace dart { | 25 namespace dart { |
| 26 | 26 |
| 27 | 27 |
| 28 DECLARE_FLAG(bool, trace_profiler); |
| 29 |
| 28 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); | 30 DEFINE_FLAG(bool, profile, true, "Enable Sampling Profiler"); |
| 29 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); | 31 DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates."); |
| 32 |
| 30 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ | 33 #if defined(TARGET_OS_ANDROID) || defined(TARGET_ARCH_ARM64) || \ |
| 31 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) | 34 defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_MIPS) |
| 32 DEFINE_FLAG(int, profile_period, 10000, | 35 DEFINE_FLAG(int, profile_period, 10000, |
| 33 "Time between profiler samples in microseconds. Minimum 50."); | 36 "Time between profiler samples in microseconds. Minimum 50."); |
| 34 #else | 37 #else |
| 35 DEFINE_FLAG(int, profile_period, 1000, | 38 DEFINE_FLAG(int, profile_period, 1000, |
| 36 "Time between profiler samples in microseconds. Minimum 50."); | 39 "Time between profiler samples in microseconds. Minimum 50."); |
| 37 #endif | 40 #endif |
| 38 DEFINE_FLAG(int, profile_depth, 8, | 41 DEFINE_FLAG(int, profile_depth, 8, |
| 39 "Maximum number stack frames walked. Minimum 1. Maximum 255."); | 42 "Maximum number stack frames walked. Minimum 1. Maximum 255."); |
| 40 #if defined(USING_SIMULATOR) | 43 #if defined(USING_SIMULATOR) |
| 41 DEFINE_FLAG(bool, profile_vm, true, | 44 DEFINE_FLAG(bool, profile_vm, true, |
| 42 "Always collect native stack traces."); | 45 "Always collect native stack traces."); |
| 43 #else | 46 #else |
| 44 DEFINE_FLAG(bool, profile_vm, true, | 47 DEFINE_FLAG(bool, profile_vm, false, |
| 45 "Always collect native stack traces."); | 48 "Always collect native stack traces."); |
| 46 #endif | 49 #endif |
| 47 | 50 |
| 48 bool Profiler::initialized_ = false; | 51 bool Profiler::initialized_ = false; |
| 49 SampleBuffer* Profiler::sample_buffer_ = NULL; | 52 SampleBuffer* Profiler::sample_buffer_ = NULL; |
| 50 | 53 |
| 51 static intptr_t NumberOfFramesToCollect() { | 54 static intptr_t NumberOfFramesToCollect() { |
| 52 if (FLAG_profile_depth <= 0) { | 55 if (FLAG_profile_depth <= 0) { |
| 53 return 0; | 56 return 0; |
| 54 } | 57 } |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 233 uword* Sample::GetPCArray() const { | 236 uword* Sample::GetPCArray() const { |
| 234 return reinterpret_cast<uword*>( | 237 return reinterpret_cast<uword*>( |
| 235 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); | 238 reinterpret_cast<uintptr_t>(this) + sizeof(*this)); |
| 236 } | 239 } |
| 237 | 240 |
| 238 | 241 |
| 239 SampleBuffer::SampleBuffer(intptr_t capacity) { | 242 SampleBuffer::SampleBuffer(intptr_t capacity) { |
| 240 ASSERT(Sample::instance_size() > 0); | 243 ASSERT(Sample::instance_size() > 0); |
| 241 samples_ = reinterpret_cast<Sample*>( | 244 samples_ = reinterpret_cast<Sample*>( |
| 242 calloc(capacity, Sample::instance_size())); | 245 calloc(capacity, Sample::instance_size())); |
| 246 if (FLAG_trace_profiler) { |
| 247 OS::Print("Profiler holds %" Pd " samples\n", capacity); |
| 248 OS::Print("Profiler sample is %" Pd " bytes\n", Sample::instance_size()); |
| 249 OS::Print("Profiler memory usage = %" Pd " bytes\n", |
| 250 capacity * Sample::instance_size()); |
| 251 } |
| 243 capacity_ = capacity; | 252 capacity_ = capacity; |
| 244 cursor_ = 0; | 253 cursor_ = 0; |
| 245 } | 254 } |
| 246 | 255 |
| 247 | 256 |
| 248 Sample* SampleBuffer::At(intptr_t idx) const { | 257 Sample* SampleBuffer::At(intptr_t idx) const { |
| 249 ASSERT(idx >= 0); | 258 ASSERT(idx >= 0); |
| 250 ASSERT(idx < capacity_); | 259 ASSERT(idx < capacity_); |
| 251 intptr_t offset = idx * Sample::instance_size(); | 260 intptr_t offset = idx * Sample::instance_size(); |
| 252 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_); | 261 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_); |
| 253 return reinterpret_cast<Sample*>(samples + offset); | 262 return reinterpret_cast<Sample*>(samples + offset); |
| 254 } | 263 } |
| 255 | 264 |
| 256 | 265 |
| 257 Sample* SampleBuffer::ReserveSample() { | 266 Sample* SampleBuffer::ReserveSample() { |
| 258 ASSERT(samples_ != NULL); | 267 ASSERT(samples_ != NULL); |
| 259 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); | 268 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); |
| 260 // Map back into sample buffer range. | 269 // Map back into sample buffer range. |
| 261 cursor = cursor % capacity_; | 270 cursor = cursor % capacity_; |
| 262 return At(cursor); | 271 return At(cursor); |
| 263 } | 272 } |
| 264 | 273 |
| 265 | |
| 266 // Attempts to find the true return address when a Dart frame is being setup | 274 // Attempts to find the true return address when a Dart frame is being setup |
| 267 // or torn down. | 275 // or torn down. |
| 268 // NOTE: Architecture specific implementations below. | 276 // NOTE: Architecture specific implementations below. |
| 269 class ReturnAddressLocator : public ValueObject { | 277 class ReturnAddressLocator : public ValueObject { |
| 270 public: | 278 public: |
| 271 ReturnAddressLocator(Sample* sample, const Code& code) | 279 ReturnAddressLocator(Sample* sample, const Code& code) |
| 272 : sample_(sample), | 280 : sample_(sample), |
| 273 code_(Code::ZoneHandle(code.raw())), | 281 code_(Code::ZoneHandle(code.raw())) { |
| 274 is_optimized_(code.is_optimized()) { | |
| 275 ASSERT(!code_.IsNull()); | 282 ASSERT(!code_.IsNull()); |
| 276 ASSERT(code_.ContainsInstructionAt(pc())); | 283 ASSERT(code_.ContainsInstructionAt(pc())); |
| 277 } | 284 } |
| 278 | 285 |
| 279 bool is_code_optimized() { | |
| 280 return is_optimized_; | |
| 281 } | |
| 282 | |
| 283 uword pc() { | 286 uword pc() { |
| 284 return sample_->pc(); | 287 return sample_->pc(); |
| 285 } | 288 } |
| 286 | 289 |
| 287 // Returns false on failure. | 290 // Returns false on failure. |
| 288 bool LocateReturnAddress(uword* return_address); | 291 bool LocateReturnAddress(uword* return_address); |
| 289 | 292 |
| 290 // Returns offset into code object. | 293 // Returns offset into code object. |
| 291 uword RelativePC() { | 294 intptr_t RelativePC() { |
| 292 return pc() - code_.EntryPoint(); | 295 ASSERT(pc() >= code_.EntryPoint()); |
| 296 return static_cast<intptr_t>(pc() - code_.EntryPoint()); |
| 293 } | 297 } |
| 294 | 298 |
| 295 uint8_t* CodePointer(uword offset) { | 299 uint8_t* CodePointer(intptr_t offset) { |
| 296 const uword size = code_.Size(); | 300 const intptr_t size = code_.Size(); |
| 297 ASSERT(offset < size); | 301 ASSERT(offset < size); |
| 298 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); | 302 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); |
| 299 code_pointer += offset; | 303 code_pointer += offset; |
| 300 return code_pointer; | 304 return code_pointer; |
| 301 } | 305 } |
| 302 | 306 |
| 303 uword StackAt(intptr_t i) { | 307 uword StackAt(intptr_t i) { |
| 304 ASSERT(i >= 0); | 308 ASSERT(i >= 0); |
| 305 ASSERT(i < Sample::kStackBufferSizeInWords); | 309 ASSERT(i < Sample::kStackBufferSizeInWords); |
| 306 return sample_->GetStackBuffer()[i]; | 310 return sample_->GetStackBuffer()[i]; |
| 307 } | 311 } |
| 308 | 312 |
| 309 private: | 313 private: |
| 310 Sample* sample_; | 314 Sample* sample_; |
| 311 const Code& code_; | 315 const Code& code_; |
| 312 const bool is_optimized_; | |
| 313 }; | 316 }; |
| 314 | 317 |
| 315 | 318 |
| 316 #if defined(TARGET_ARCH_IA32) | 319 #if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64) |
| 317 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 320 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 318 ASSERT(return_address != NULL); | 321 ASSERT(return_address != NULL); |
| 319 const uword offset = RelativePC(); | 322 const intptr_t offset = RelativePC(); |
| 320 const uword size = code_.Size(); | 323 ASSERT(offset >= 0); |
| 321 if (is_optimized_) { | 324 const intptr_t size = code_.Size(); |
| 322 // 0: push ebp | 325 ASSERT(offset < size); |
| 323 // 1: mov ebp, esp | 326 const intptr_t prologue_offset = code_.GetPrologueOffset(); |
| 324 // 3: ... | 327 if (offset < prologue_offset) { |
| 325 if (offset == 0x0) { | 328 // Before the prologue, return address is at the top of the stack. |
| 326 // Stack layout: | 329 // TODO(johnmccutchan): Some intrinsics and stubs do not conform to the |
| 327 // 0 RETURN ADDRESS. | 330 // expected stack layout. Use a more robust solution for those code objects. |
| 328 *return_address = StackAt(0); | 331 *return_address = StackAt(0); |
| 329 return true; | 332 return true; |
| 330 } | |
| 331 if (offset == 0x1) { | |
| 332 // Stack layout: | |
| 333 // 0 CALLER FRAME POINTER | |
| 334 // 1 RETURN ADDRESS | |
| 335 *return_address = StackAt(1); | |
| 336 return true; | |
| 337 } | |
| 338 ReturnPattern rp(pc()); | |
| 339 if (rp.IsValid()) { | |
| 340 // Stack layout: | |
| 341 // 0 RETURN ADDRESS. | |
| 342 *return_address = StackAt(0); | |
| 343 return true; | |
| 344 } | |
| 345 return false; | |
| 346 } else { | |
| 347 // 0x00: mov edi, function | |
| 348 // 0x05: incl (inc usage count) <-- this is optional. | |
| 349 // 0x08: cmpl (compare usage count) | |
| 350 // 0x0f: jump to optimize function | |
| 351 // 0x15: push ebp | |
| 352 // 0x16: mov ebp, esp | |
| 353 // 0x18: ... | |
| 354 ASSERT(size >= 0x08); | |
| 355 const uword incl_offset = 0x05; | |
| 356 const uword incl_length = 0x03; | |
| 357 const uint8_t incl_op_code = 0xFF; | |
| 358 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); | |
| 359 const uword push_fp_offset = has_incl ? 0x15 : 0x15 - incl_length; | |
| 360 if (offset <= push_fp_offset) { | |
| 361 // Stack layout: | |
| 362 // 0 RETURN ADDRESS. | |
| 363 *return_address = StackAt(0); | |
| 364 return true; | |
| 365 } | |
| 366 if (offset == (push_fp_offset + 1)) { | |
| 367 // Stack layout: | |
| 368 // 0 CALLER FRAME POINTER | |
| 369 // 1 RETURN ADDRESS | |
| 370 *return_address = StackAt(1); | |
| 371 return true; | |
| 372 } | |
| 373 ReturnPattern rp(pc()); | |
| 374 if (rp.IsValid()) { | |
| 375 // Stack layout: | |
| 376 // 0 RETURN ADDRESS. | |
| 377 *return_address = StackAt(0); | |
| 378 return true; | |
| 379 } | |
| 380 return false; | |
| 381 } | 333 } |
| 382 UNREACHABLE(); | 334 // Detect if we are: |
| 383 return false; | 335 // push ebp <--- here |
| 384 } | 336 // mov ebp, esp |
| 385 #elif defined(TARGET_ARCH_X64) | 337 // on X64 the register names are different but the sequence is the same. |
| 386 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 338 ProloguePattern pp(pc()); |
| 387 ASSERT(return_address != NULL); | 339 if (pp.IsValid()) { |
| 388 const uword offset = RelativePC(); | 340 // Stack layout: |
| 389 const uword size = code_.Size(); | 341 // 0 RETURN ADDRESS. |
| 390 if (is_optimized_) { | 342 *return_address = StackAt(0); |
| 391 // 0x00: leaq (load pc marker) | 343 return true; |
| 392 // 0x07: movq (load pool pointer) | |
| 393 // 0x0c: push rpb | |
| 394 // 0x0d: movq rbp, rsp | |
| 395 // 0x10: ... | |
| 396 const uword push_fp_offset = 0x0c; | |
| 397 if (offset <= push_fp_offset) { | |
| 398 // Stack layout: | |
| 399 // 0 RETURN ADDRESS. | |
| 400 *return_address = StackAt(0); | |
| 401 return true; | |
| 402 } | |
| 403 if (offset == (push_fp_offset + 1)) { | |
| 404 // Stack layout: | |
| 405 // 0 CALLER FRAME POINTER | |
| 406 // 1 RETURN ADDRESS | |
| 407 *return_address = StackAt(1); | |
| 408 return true; | |
| 409 } | |
| 410 ReturnPattern rp(pc()); | |
| 411 if (rp.IsValid()) { | |
| 412 // Stack layout: | |
| 413 // 0 RETURN ADDRESS. | |
| 414 *return_address = StackAt(0); | |
| 415 return true; | |
| 416 } | |
| 417 return false; | |
| 418 } else { | |
| 419 // 0x00: leaq (load pc marker) | |
| 420 // 0x07: movq (load pool pointer) | |
| 421 // 0x0c: movq (load function) | |
| 422 // 0x13: incl (inc usage count) <-- this is optional. | |
| 423 // 0x16: cmpl (compare usage count) | |
| 424 // 0x1d: jl + 0x | |
| 425 // 0x23: jmp [pool pointer] | |
| 426 // 0x27: push rbp | |
| 427 // 0x28: movq rbp, rsp | |
| 428 // 0x2b: ... | |
| 429 ASSERT(size >= 0x16); | |
| 430 const uword incl_offset = 0x13; | |
| 431 const uword incl_length = 0x03; | |
| 432 const uint8_t incl_op_code = 0xFF; | |
| 433 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); | |
| 434 const uword push_fp_offset = has_incl ? 0x27 : 0x27 - incl_length; | |
| 435 if (offset <= push_fp_offset) { | |
| 436 // Stack layout: | |
| 437 // 0 RETURN ADDRESS. | |
| 438 *return_address = StackAt(0); | |
| 439 return true; | |
| 440 } | |
| 441 if (offset == (push_fp_offset + 1)) { | |
| 442 // Stack layout: | |
| 443 // 0 CALLER FRAME POINTER | |
| 444 // 1 RETURN ADDRESS | |
| 445 *return_address = StackAt(1); | |
| 446 return true; | |
| 447 } | |
| 448 ReturnPattern rp(pc()); | |
| 449 if (rp.IsValid()) { | |
| 450 // Stack layout: | |
| 451 // 0 RETURN ADDRESS. | |
| 452 *return_address = StackAt(0); | |
| 453 return true; | |
| 454 } | |
| 455 return false; | |
| 456 } | 344 } |
| 457 UNREACHABLE(); | 345 // Detect if we are: |
| 346 // push ebp |
| 347 // mov ebp, esp <--- here |
| 348 // on X64 the register names are different but the sequence is the same. |
| 349 SetFramePointerPattern sfpp(pc()); |
| 350 if (sfpp.IsValid()) { |
| 351 // Stack layout: |
| 352 // 0 CALLER FRAME POINTER |
| 353 // 1 RETURN ADDRESS |
| 354 *return_address = StackAt(1); |
| 355 return true; |
| 356 } |
| 357 // Detect if we are: |
| 358 // ret <--- here |
| 359 ReturnPattern rp(pc()); |
| 360 if (rp.IsValid()) { |
| 361 // Stack layout: |
| 362 // 0 RETURN ADDRESS. |
| 363 *return_address = StackAt(0); |
| 364 return true; |
| 365 } |
| 458 return false; | 366 return false; |
| 459 } | 367 } |
| 460 #elif defined(TARGET_ARCH_ARM) | 368 #elif defined(TARGET_ARCH_ARM) |
| 461 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 369 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 462 ASSERT(return_address != NULL); | 370 ASSERT(return_address != NULL); |
| 463 return false; | 371 return false; |
| 464 } | 372 } |
| 465 #elif defined(TARGET_ARCH_ARM64) | 373 #elif defined(TARGET_ARCH_ARM64) |
| 466 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 374 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 467 ASSERT(return_address != NULL); | 375 ASSERT(return_address != NULL); |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 531 ASSERT(!code.IsNull()); | 439 ASSERT(!code.IsNull()); |
| 532 | 440 |
| 533 // The pc marker is our current best guess of a return address. | 441 // The pc marker is our current best guess of a return address. |
| 534 uword return_address = sample->pc_marker(); | 442 uword return_address = sample->pc_marker(); |
| 535 | 443 |
| 536 // Attempt to find a better return address. | 444 // Attempt to find a better return address. |
| 537 ReturnAddressLocator ral(sample, code); | 445 ReturnAddressLocator ral(sample, code); |
| 538 | 446 |
| 539 if (!ral.LocateReturnAddress(&return_address)) { | 447 if (!ral.LocateReturnAddress(&return_address)) { |
| 540 ASSERT(return_address == sample->pc_marker()); | 448 ASSERT(return_address == sample->pc_marker()); |
| 449 if (code.GetPrologueOffset() == 0) { |
| 450 // Code has the prologue at offset 0. The frame is already setup and |
| 451 // can be trusted. |
| 452 return; |
| 453 } |
| 541 // Could not find a better return address than the pc_marker. | 454 // Could not find a better return address than the pc_marker. |
| 542 if (code.ContainsInstructionAt(return_address)) { | 455 if (code.ContainsInstructionAt(return_address)) { |
| 543 // PC marker is in the same code as pc, no missing frame. | 456 // PC marker is in the same code as pc, no missing frame. |
| 544 return; | 457 return; |
| 545 } | 458 } |
| 546 } | 459 } |
| 547 | 460 |
| 548 if (!ContainedInDartCodeHeaps(return_address)) { | 461 if (!ContainedInDartCodeHeaps(return_address)) { |
| 549 // return address is not from the Dart heap. Do not insert. | 462 // return address is not from the Dart heap. Do not insert. |
| 550 return; | 463 return; |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 585 } | 498 } |
| 586 | 499 |
| 587 | 500 |
| 588 // Given an exit frame, walk the Dart stack. | 501 // Given an exit frame, walk the Dart stack. |
| 589 class ProfilerDartExitStackWalker : public ValueObject { | 502 class ProfilerDartExitStackWalker : public ValueObject { |
| 590 public: | 503 public: |
| 591 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) | 504 ProfilerDartExitStackWalker(Isolate* isolate, Sample* sample) |
| 592 : sample_(sample), | 505 : sample_(sample), |
| 593 frame_iterator_(isolate) { | 506 frame_iterator_(isolate) { |
| 594 ASSERT(sample_ != NULL); | 507 ASSERT(sample_ != NULL); |
| 595 // Mark that this sample was collected from an exit frame. | |
| 596 sample_->set_exit_frame_sample(true); | |
| 597 } | 508 } |
| 598 | 509 |
| 599 void walk() { | 510 void walk() { |
| 511 // Mark that this sample was collected from an exit frame. |
| 512 sample_->set_exit_frame_sample(true); |
| 600 intptr_t frame_index = 0; | 513 intptr_t frame_index = 0; |
| 601 StackFrame* frame = frame_iterator_.NextFrame(); | 514 StackFrame* frame = frame_iterator_.NextFrame(); |
| 602 while (frame != NULL) { | 515 while (frame != NULL) { |
| 603 sample_->SetAt(frame_index, frame->pc()); | 516 sample_->SetAt(frame_index, frame->pc()); |
| 604 frame_index++; | 517 frame_index++; |
| 605 if (frame_index >= NumberOfFramesToCollect()) { | 518 if (frame_index >= NumberOfFramesToCollect()) { |
| 606 sample_->set_truncated_trace(true); | 519 sample_->set_truncated_trace(true); |
| 607 break; | 520 break; |
| 608 } | 521 } |
| 609 frame = frame_iterator_.NextFrame(); | 522 frame = frame_iterator_.NextFrame(); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 630 sample_(sample), | 543 sample_(sample), |
| 631 stack_upper_(stack_upper), | 544 stack_upper_(stack_upper), |
| 632 stack_lower_(stack_lower) { | 545 stack_lower_(stack_lower) { |
| 633 ASSERT(sample_ != NULL); | 546 ASSERT(sample_ != NULL); |
| 634 pc_ = reinterpret_cast<uword*>(pc); | 547 pc_ = reinterpret_cast<uword*>(pc); |
| 635 fp_ = reinterpret_cast<uword*>(fp); | 548 fp_ = reinterpret_cast<uword*>(fp); |
| 636 sp_ = reinterpret_cast<uword*>(sp); | 549 sp_ = reinterpret_cast<uword*>(sp); |
| 637 } | 550 } |
| 638 | 551 |
| 639 void walk() { | 552 void walk() { |
| 553 sample_->set_exit_frame_sample(false); |
| 640 if (!ValidFramePointer()) { | 554 if (!ValidFramePointer()) { |
| 641 sample_->set_ignore_sample(true); | 555 sample_->set_ignore_sample(true); |
| 642 return; | 556 return; |
| 643 } | 557 } |
| 644 ASSERT(ValidFramePointer()); | 558 ASSERT(ValidFramePointer()); |
| 645 uword return_pc = InitialReturnAddress(); | 559 uword return_pc = InitialReturnAddress(); |
| 646 if (StubCode::InInvocationStubForIsolate(isolate_, return_pc)) { | 560 if (StubCode::InInvocationStubForIsolate(isolate_, return_pc)) { |
| 647 // Edge case- we have called out from the Invocation Stub but have not | 561 // Edge case- we have called out from the Invocation Stub but have not |
| 648 // created the stack frame of the callee. Attempt to locate the exit | 562 // created the stack frame of the callee. Attempt to locate the exit |
| 649 // frame before walking the stack. | 563 // frame before walking the stack. |
| (...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1158 exited_dart_code, | 1072 exited_dart_code, |
| 1159 in_dart_code, | 1073 in_dart_code, |
| 1160 sample, | 1074 sample, |
| 1161 &native_stack_walker, | 1075 &native_stack_walker, |
| 1162 &dart_exit_stack_walker, | 1076 &dart_exit_stack_walker, |
| 1163 &dart_stack_walker, | 1077 &dart_stack_walker, |
| 1164 pc); | 1078 pc); |
| 1165 } | 1079 } |
| 1166 | 1080 |
| 1167 } // namespace dart | 1081 } // namespace dart |
| OLD | NEW |