Chromium Code Reviews| Index: runtime/vm/profiler.cc |
| diff --git a/runtime/vm/profiler.cc b/runtime/vm/profiler.cc |
| index c4e27e3e37494afc06ee2469ca584b8f248a1e5f..e88224e8d8c85eeba0fcd84efde2e393e756240e 100644 |
| --- a/runtime/vm/profiler.cc |
| +++ b/runtime/vm/profiler.cc |
| @@ -41,7 +41,7 @@ DEFINE_FLAG(int, profile_depth, 8, |
| DEFINE_FLAG(bool, profile_vm, true, |
| "Always collect native stack traces."); |
| #else |
| -DEFINE_FLAG(bool, profile_vm, true, |
| +DEFINE_FLAG(bool, profile_vm, false, |
| "Always collect native stack traces."); |
| #endif |
| @@ -262,7 +262,6 @@ Sample* SampleBuffer::ReserveSample() { |
| return At(cursor); |
| } |
| - |
| // Attempts to find the true return address when a Dart frame is being setup |
| // or torn down. |
| // NOTE: Architecture specific implementations below. |
| @@ -270,16 +269,11 @@ class ReturnAddressLocator : public ValueObject { |
| public: |
| ReturnAddressLocator(Sample* sample, const Code& code) |
| : sample_(sample), |
| - code_(Code::ZoneHandle(code.raw())), |
| - is_optimized_(code.is_optimized()) { |
| + code_(Code::ZoneHandle(code.raw())) { |
| ASSERT(!code_.IsNull()); |
| ASSERT(code_.ContainsInstructionAt(pc())); |
| } |
| - bool is_code_optimized() { |
| - return is_optimized_; |
| - } |
| - |
| uword pc() { |
| return sample_->pc(); |
| } |
| @@ -288,12 +282,13 @@ class ReturnAddressLocator : public ValueObject { |
| bool LocateReturnAddress(uword* return_address); |
| // Returns offset into code object. |
| - uword RelativePC() { |
| - return pc() - code_.EntryPoint(); |
| + intptr_t RelativePC() { |
| + ASSERT(pc() > code_.EntryPoint()); |
| + return static_cast<intptr_t>(pc() - code_.EntryPoint()); |
| } |
| - uint8_t* CodePointer(uword offset) { |
| - const uword size = code_.Size(); |
| + uint8_t* CodePointer(intptr_t offset) { |
| + const intptr_t size = code_.Size(); |
| ASSERT(offset < size); |
| uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); |
| code_pointer += offset; |
| @@ -309,152 +304,54 @@ class ReturnAddressLocator : public ValueObject { |
| private: |
| Sample* sample_; |
| const Code& code_; |
| - const bool is_optimized_; |
| }; |
| -#if defined(TARGET_ARCH_IA32) |
| +#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64) |
| bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| ASSERT(return_address != NULL); |
| - const uword offset = RelativePC(); |
| - const uword size = code_.Size(); |
| - if (is_optimized_) { |
| - // 0: push ebp |
| - // 1: mov ebp, esp |
| - // 3: ... |
| - if (offset == 0x0) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - if (offset == 0x1) { |
| - // Stack layout: |
| - // 0 CALLER FRAME POINTER |
| - // 1 RETURN ADDRESS |
| - *return_address = StackAt(1); |
| - return true; |
| - } |
| - ReturnPattern rp(pc()); |
| - if (rp.IsValid()) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - return false; |
| - } else { |
| - // 0x00: mov edi, function |
| - // 0x05: incl (inc usage count) <-- this is optional. |
| - // 0x08: cmpl (compare usage count) |
| - // 0x0f: jump to optimize function |
| - // 0x15: push ebp |
| - // 0x16: mov ebp, esp |
| - // 0x18: ... |
| - ASSERT(size >= 0x08); |
| - const uword incl_offset = 0x05; |
| - const uword incl_length = 0x03; |
| - const uint8_t incl_op_code = 0xFF; |
| - const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); |
| - const uword push_fp_offset = has_incl ? 0x15 : 0x15 - incl_length; |
| - if (offset <= push_fp_offset) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - if (offset == (push_fp_offset + 1)) { |
| - // Stack layout: |
| - // 0 CALLER FRAME POINTER |
| - // 1 RETURN ADDRESS |
| - *return_address = StackAt(1); |
| - return true; |
| - } |
| - ReturnPattern rp(pc()); |
| - if (rp.IsValid()) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - return false; |
| + const intptr_t offset = RelativePC(); |
| + ASSERT(offset >= 0); |
| + const intptr_t size = code_.Size(); |
| + ASSERT(offset < size); |
| + const intptr_t prologue_offset = code_.GetPrologueOffset(); |
| + if (offset < prologue_offset) { |
| + // Before the prologue, return address is at the top of the stack. |
| + // TODO(johnmccutchan): Some intrinsics and stubs do not conform to the |
| + // expected stack layout. Use a more robust solution for those code objects. |
| + *return_address = StackAt(0); |
| + return true; |
| } |
| - UNREACHABLE(); |
| - return false; |
| -} |
| -#elif defined(TARGET_ARCH_X64) |
| -bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| - ASSERT(return_address != NULL); |
| - const uword offset = RelativePC(); |
| - const uword size = code_.Size(); |
| - if (is_optimized_) { |
| - // 0x00: leaq (load pc marker) |
| - // 0x07: movq (load pool pointer) |
| - // 0x0c: push rpb |
| - // 0x0d: movq rbp, rsp |
| - // 0x10: ... |
| - const uword push_fp_offset = 0x0c; |
| - if (offset <= push_fp_offset) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - if (offset == (push_fp_offset + 1)) { |
| - // Stack layout: |
| - // 0 CALLER FRAME POINTER |
| - // 1 RETURN ADDRESS |
| - *return_address = StackAt(1); |
| - return true; |
| - } |
| - ReturnPattern rp(pc()); |
| - if (rp.IsValid()) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - return false; |
| - } else { |
| - // 0x00: leaq (load pc marker) |
| - // 0x07: movq (load pool pointer) |
| - // 0x0c: movq (load function) |
| - // 0x13: incl (inc usage count) <-- this is optional. |
| - // 0x16: cmpl (compare usage count) |
| - // 0x1d: jl + 0x |
| - // 0x23: jmp [pool pointer] |
| - // 0x27: push rbp |
| - // 0x28: movq rbp, rsp |
| - // 0x2b: ... |
| - ASSERT(size >= 0x16); |
| - const uword incl_offset = 0x13; |
| - const uword incl_length = 0x03; |
| - const uint8_t incl_op_code = 0xFF; |
| - const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); |
| - const uword push_fp_offset = has_incl ? 0x27 : 0x27 - incl_length; |
| - if (offset <= push_fp_offset) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - if (offset == (push_fp_offset + 1)) { |
| - // Stack layout: |
| - // 0 CALLER FRAME POINTER |
| - // 1 RETURN ADDRESS |
| - *return_address = StackAt(1); |
| - return true; |
| - } |
| - ReturnPattern rp(pc()); |
| - if (rp.IsValid()) { |
| - // Stack layout: |
| - // 0 RETURN ADDRESS. |
| - *return_address = StackAt(0); |
| - return true; |
| - } |
| - return false; |
| + // Detect if we are: |
| + // push ebp <--- here |
|
srdjan
2015/05/21 16:06:34
Or corresponding code for X64
Cutch
2015/05/22 01:11:14
Added a comment about register names on X64.
|
| + // mov ebp, esp |
| + ProloguePattern pp(pc()); |
| + if (pp.IsValid()) { |
| + // Stack layout: |
| + // 0 RETURN ADDRESS. |
| + *return_address = StackAt(0); |
| + return true; |
| + } |
| + // Detect if we are: |
| + // push ebp |
| + // mov ebp, esp <--- here |
| + SetFramePointerPattern sfpp(pc()); |
| + if (sfpp.IsValid()) { |
| + // Stack layout: |
| + // 0 CALLER FRAME POINTER |
| + // 1 RETURN ADDRESS |
| + *return_address = StackAt(1); |
| + return true; |
| + } |
| + // Detect if we are: |
| + // ret <--- here |
| + ReturnPattern rp(pc()); |
| + if (rp.IsValid()) { |
| + // Stack layout: |
| + // 0 RETURN ADDRESS. |
| + *return_address = StackAt(0); |
| + return true; |
| } |
| - UNREACHABLE(); |
| return false; |
| } |
| #elif defined(TARGET_ARCH_ARM) |