Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "platform/address_sanitizer.h" | 5 #include "platform/address_sanitizer.h" |
| 6 #include "platform/memory_sanitizer.h" | 6 #include "platform/memory_sanitizer.h" |
| 7 #include "platform/utils.h" | 7 #include "platform/utils.h" |
| 8 | 8 |
| 9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
| 10 #include "vm/atomic.h" | 10 #include "vm/atomic.h" |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 34 #else | 34 #else |
| 35 DEFINE_FLAG(int, profile_period, 1000, | 35 DEFINE_FLAG(int, profile_period, 1000, |
| 36 "Time between profiler samples in microseconds. Minimum 50."); | 36 "Time between profiler samples in microseconds. Minimum 50."); |
| 37 #endif | 37 #endif |
| 38 DEFINE_FLAG(int, profile_depth, 8, | 38 DEFINE_FLAG(int, profile_depth, 8, |
| 39 "Maximum number stack frames walked. Minimum 1. Maximum 255."); | 39 "Maximum number stack frames walked. Minimum 1. Maximum 255."); |
| 40 #if defined(USING_SIMULATOR) | 40 #if defined(USING_SIMULATOR) |
| 41 DEFINE_FLAG(bool, profile_vm, true, | 41 DEFINE_FLAG(bool, profile_vm, true, |
| 42 "Always collect native stack traces."); | 42 "Always collect native stack traces."); |
| 43 #else | 43 #else |
| 44 DEFINE_FLAG(bool, profile_vm, true, | 44 DEFINE_FLAG(bool, profile_vm, false, |
| 45 "Always collect native stack traces."); | 45 "Always collect native stack traces."); |
| 46 #endif | 46 #endif |
| 47 | 47 |
| 48 bool Profiler::initialized_ = false; | 48 bool Profiler::initialized_ = false; |
| 49 SampleBuffer* Profiler::sample_buffer_ = NULL; | 49 SampleBuffer* Profiler::sample_buffer_ = NULL; |
| 50 | 50 |
| 51 static intptr_t NumberOfFramesToCollect() { | 51 static intptr_t NumberOfFramesToCollect() { |
| 52 if (FLAG_profile_depth <= 0) { | 52 if (FLAG_profile_depth <= 0) { |
| 53 return 0; | 53 return 0; |
| 54 } | 54 } |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 255 | 255 |
| 256 | 256 |
| 257 Sample* SampleBuffer::ReserveSample() { | 257 Sample* SampleBuffer::ReserveSample() { |
| 258 ASSERT(samples_ != NULL); | 258 ASSERT(samples_ != NULL); |
| 259 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); | 259 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); |
| 260 // Map back into sample buffer range. | 260 // Map back into sample buffer range. |
| 261 cursor = cursor % capacity_; | 261 cursor = cursor % capacity_; |
| 262 return At(cursor); | 262 return At(cursor); |
| 263 } | 263 } |
| 264 | 264 |
| 265 | |
| 266 // Attempts to find the true return address when a Dart frame is being setup | 265 // Attempts to find the true return address when a Dart frame is being setup |
| 267 // or torn down. | 266 // or torn down. |
| 268 // NOTE: Architecture specific implementations below. | 267 // NOTE: Architecture specific implementations below. |
| 269 class ReturnAddressLocator : public ValueObject { | 268 class ReturnAddressLocator : public ValueObject { |
| 270 public: | 269 public: |
| 271 ReturnAddressLocator(Sample* sample, const Code& code) | 270 ReturnAddressLocator(Sample* sample, const Code& code) |
| 272 : sample_(sample), | 271 : sample_(sample), |
| 273 code_(Code::ZoneHandle(code.raw())), | 272 code_(Code::ZoneHandle(code.raw())) { |
| 274 is_optimized_(code.is_optimized()) { | |
| 275 ASSERT(!code_.IsNull()); | 273 ASSERT(!code_.IsNull()); |
| 276 ASSERT(code_.ContainsInstructionAt(pc())); | 274 ASSERT(code_.ContainsInstructionAt(pc())); |
| 277 } | 275 } |
| 278 | 276 |
| 279 bool is_code_optimized() { | |
| 280 return is_optimized_; | |
| 281 } | |
| 282 | |
| 283 uword pc() { | 277 uword pc() { |
| 284 return sample_->pc(); | 278 return sample_->pc(); |
| 285 } | 279 } |
| 286 | 280 |
| 287 // Returns false on failure. | 281 // Returns false on failure. |
| 288 bool LocateReturnAddress(uword* return_address); | 282 bool LocateReturnAddress(uword* return_address); |
| 289 | 283 |
| 290 // Returns offset into code object. | 284 // Returns offset into code object. |
| 291 uword RelativePC() { | 285 intptr_t RelativePC() { |
| 292 return pc() - code_.EntryPoint(); | 286 ASSERT(pc() > code_.EntryPoint()); |
| 287 return static_cast<intptr_t>(pc() - code_.EntryPoint()); | |
| 293 } | 288 } |
| 294 | 289 |
| 295 uint8_t* CodePointer(uword offset) { | 290 uint8_t* CodePointer(intptr_t offset) { |
| 296 const uword size = code_.Size(); | 291 const intptr_t size = code_.Size(); |
| 297 ASSERT(offset < size); | 292 ASSERT(offset < size); |
| 298 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); | 293 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.EntryPoint()); |
| 299 code_pointer += offset; | 294 code_pointer += offset; |
| 300 return code_pointer; | 295 return code_pointer; |
| 301 } | 296 } |
| 302 | 297 |
| 303 uword StackAt(intptr_t i) { | 298 uword StackAt(intptr_t i) { |
| 304 ASSERT(i >= 0); | 299 ASSERT(i >= 0); |
| 305 ASSERT(i < Sample::kStackBufferSizeInWords); | 300 ASSERT(i < Sample::kStackBufferSizeInWords); |
| 306 return sample_->GetStackBuffer()[i]; | 301 return sample_->GetStackBuffer()[i]; |
| 307 } | 302 } |
| 308 | 303 |
| 309 private: | 304 private: |
| 310 Sample* sample_; | 305 Sample* sample_; |
| 311 const Code& code_; | 306 const Code& code_; |
| 312 const bool is_optimized_; | |
| 313 }; | 307 }; |
| 314 | 308 |
| 315 | 309 |
| 316 #if defined(TARGET_ARCH_IA32) | 310 #if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64) |
| 317 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 311 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 318 ASSERT(return_address != NULL); | 312 ASSERT(return_address != NULL); |
| 319 const uword offset = RelativePC(); | 313 const intptr_t offset = RelativePC(); |
| 320 const uword size = code_.Size(); | 314 ASSERT(offset >= 0); |
| 321 if (is_optimized_) { | 315 const intptr_t size = code_.Size(); |
| 322 // 0: push ebp | 316 ASSERT(offset < size); |
| 323 // 1: mov ebp, esp | 317 const intptr_t prologue_offset = code_.GetPrologueOffset(); |
| 324 // 3: ... | 318 if (offset < prologue_offset) { |
| 325 if (offset == 0x0) { | 319 // Before the prologue, return address is at the top of the stack. |
| 326 // Stack layout: | 320 // TODO(johnmccutchan): Some intrinsics and stubs do not conform to the |
| 327 // 0 RETURN ADDRESS. | 321 // expected stack layout. Use a more robust solution for those code objects. |
| 328 *return_address = StackAt(0); | 322 *return_address = StackAt(0); |
| 329 return true; | 323 return true; |
| 330 } | |
| 331 if (offset == 0x1) { | |
| 332 // Stack layout: | |
| 333 // 0 CALLER FRAME POINTER | |
| 334 // 1 RETURN ADDRESS | |
| 335 *return_address = StackAt(1); | |
| 336 return true; | |
| 337 } | |
| 338 ReturnPattern rp(pc()); | |
| 339 if (rp.IsValid()) { | |
| 340 // Stack layout: | |
| 341 // 0 RETURN ADDRESS. | |
| 342 *return_address = StackAt(0); | |
| 343 return true; | |
| 344 } | |
| 345 return false; | |
| 346 } else { | |
| 347 // 0x00: mov edi, function | |
| 348 // 0x05: incl (inc usage count) <-- this is optional. | |
| 349 // 0x08: cmpl (compare usage count) | |
| 350 // 0x0f: jump to optimize function | |
| 351 // 0x15: push ebp | |
| 352 // 0x16: mov ebp, esp | |
| 353 // 0x18: ... | |
| 354 ASSERT(size >= 0x08); | |
| 355 const uword incl_offset = 0x05; | |
| 356 const uword incl_length = 0x03; | |
| 357 const uint8_t incl_op_code = 0xFF; | |
| 358 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); | |
| 359 const uword push_fp_offset = has_incl ? 0x15 : 0x15 - incl_length; | |
| 360 if (offset <= push_fp_offset) { | |
| 361 // Stack layout: | |
| 362 // 0 RETURN ADDRESS. | |
| 363 *return_address = StackAt(0); | |
| 364 return true; | |
| 365 } | |
| 366 if (offset == (push_fp_offset + 1)) { | |
| 367 // Stack layout: | |
| 368 // 0 CALLER FRAME POINTER | |
| 369 // 1 RETURN ADDRESS | |
| 370 *return_address = StackAt(1); | |
| 371 return true; | |
| 372 } | |
| 373 ReturnPattern rp(pc()); | |
| 374 if (rp.IsValid()) { | |
| 375 // Stack layout: | |
| 376 // 0 RETURN ADDRESS. | |
| 377 *return_address = StackAt(0); | |
| 378 return true; | |
| 379 } | |
| 380 return false; | |
| 381 } | 324 } |
| 382 UNREACHABLE(); | 325 // Detect if we are: |
| 383 return false; | 326 // push ebp <--- here |
|
srdjan
2015/05/21 16:06:34
Or corresponding code for X64
Cutch
2015/05/22 01:11:14
Added a comment about register names on X64.
| |
| 384 } | 327 // mov ebp, esp |
| 385 #elif defined(TARGET_ARCH_X64) | 328 ProloguePattern pp(pc()); |
| 386 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 329 if (pp.IsValid()) { |
| 387 ASSERT(return_address != NULL); | 330 // Stack layout: |
| 388 const uword offset = RelativePC(); | 331 // 0 RETURN ADDRESS. |
| 389 const uword size = code_.Size(); | 332 *return_address = StackAt(0); |
| 390 if (is_optimized_) { | 333 return true; |
| 391 // 0x00: leaq (load pc marker) | |
| 392 // 0x07: movq (load pool pointer) | |
| 393 // 0x0c: push rpb | |
| 394 // 0x0d: movq rbp, rsp | |
| 395 // 0x10: ... | |
| 396 const uword push_fp_offset = 0x0c; | |
| 397 if (offset <= push_fp_offset) { | |
| 398 // Stack layout: | |
| 399 // 0 RETURN ADDRESS. | |
| 400 *return_address = StackAt(0); | |
| 401 return true; | |
| 402 } | |
| 403 if (offset == (push_fp_offset + 1)) { | |
| 404 // Stack layout: | |
| 405 // 0 CALLER FRAME POINTER | |
| 406 // 1 RETURN ADDRESS | |
| 407 *return_address = StackAt(1); | |
| 408 return true; | |
| 409 } | |
| 410 ReturnPattern rp(pc()); | |
| 411 if (rp.IsValid()) { | |
| 412 // Stack layout: | |
| 413 // 0 RETURN ADDRESS. | |
| 414 *return_address = StackAt(0); | |
| 415 return true; | |
| 416 } | |
| 417 return false; | |
| 418 } else { | |
| 419 // 0x00: leaq (load pc marker) | |
| 420 // 0x07: movq (load pool pointer) | |
| 421 // 0x0c: movq (load function) | |
| 422 // 0x13: incl (inc usage count) <-- this is optional. | |
| 423 // 0x16: cmpl (compare usage count) | |
| 424 // 0x1d: jl + 0x | |
| 425 // 0x23: jmp [pool pointer] | |
| 426 // 0x27: push rbp | |
| 427 // 0x28: movq rbp, rsp | |
| 428 // 0x2b: ... | |
| 429 ASSERT(size >= 0x16); | |
| 430 const uword incl_offset = 0x13; | |
| 431 const uword incl_length = 0x03; | |
| 432 const uint8_t incl_op_code = 0xFF; | |
| 433 const bool has_incl = (*CodePointer(incl_offset) == incl_op_code); | |
| 434 const uword push_fp_offset = has_incl ? 0x27 : 0x27 - incl_length; | |
| 435 if (offset <= push_fp_offset) { | |
| 436 // Stack layout: | |
| 437 // 0 RETURN ADDRESS. | |
| 438 *return_address = StackAt(0); | |
| 439 return true; | |
| 440 } | |
| 441 if (offset == (push_fp_offset + 1)) { | |
| 442 // Stack layout: | |
| 443 // 0 CALLER FRAME POINTER | |
| 444 // 1 RETURN ADDRESS | |
| 445 *return_address = StackAt(1); | |
| 446 return true; | |
| 447 } | |
| 448 ReturnPattern rp(pc()); | |
| 449 if (rp.IsValid()) { | |
| 450 // Stack layout: | |
| 451 // 0 RETURN ADDRESS. | |
| 452 *return_address = StackAt(0); | |
| 453 return true; | |
| 454 } | |
| 455 return false; | |
| 456 } | 334 } |
| 457 UNREACHABLE(); | 335 // Detect if we are: |
| 336 // push ebp | |
| 337 // mov ebp, esp <--- here | |
| 338 SetFramePointerPattern sfpp(pc()); | |
| 339 if (sfpp.IsValid()) { | |
| 340 // Stack layout: | |
| 341 // 0 CALLER FRAME POINTER | |
| 342 // 1 RETURN ADDRESS | |
| 343 *return_address = StackAt(1); | |
| 344 return true; | |
| 345 } | |
| 346 // Detect if we are: | |
| 347 // ret <--- here | |
| 348 ReturnPattern rp(pc()); | |
| 349 if (rp.IsValid()) { | |
| 350 // Stack layout: | |
| 351 // 0 RETURN ADDRESS. | |
| 352 *return_address = StackAt(0); | |
| 353 return true; | |
| 354 } | |
| 458 return false; | 355 return false; |
| 459 } | 356 } |
| 460 #elif defined(TARGET_ARCH_ARM) | 357 #elif defined(TARGET_ARCH_ARM) |
| 461 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 358 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 462 ASSERT(return_address != NULL); | 359 ASSERT(return_address != NULL); |
| 463 return false; | 360 return false; |
| 464 } | 361 } |
| 465 #elif defined(TARGET_ARCH_ARM64) | 362 #elif defined(TARGET_ARCH_ARM64) |
| 466 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { | 363 bool ReturnAddressLocator::LocateReturnAddress(uword* return_address) { |
| 467 ASSERT(return_address != NULL); | 364 ASSERT(return_address != NULL); |
| (...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1158 exited_dart_code, | 1055 exited_dart_code, |
| 1159 in_dart_code, | 1056 in_dart_code, |
| 1160 sample, | 1057 sample, |
| 1161 &native_stack_walker, | 1058 &native_stack_walker, |
| 1162 &dart_exit_stack_walker, | 1059 &dart_exit_stack_walker, |
| 1163 &dart_stack_walker, | 1060 &dart_stack_walker, |
| 1164 pc); | 1061 pc); |
| 1165 } | 1062 } |
| 1166 | 1063 |
| 1167 } // namespace dart | 1064 } // namespace dart |
| OLD | NEW |