| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 79 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); | 79 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); |
| 80 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); | 80 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); |
| 81 | 81 |
| 82 | 82 |
| 83 // Maximum size in bytes of generated code for a function to be optimized | 83 // Maximum size in bytes of generated code for a function to be optimized |
| 84 // the very first time it is seen on the stack. | 84 // the very first time it is seen on the stack. |
| 85 static const int kMaxSizeEarlyOpt = | 85 static const int kMaxSizeEarlyOpt = |
| 86 5 * FullCodeGenerator::kBackEdgeDistanceUnit; | 86 5 * FullCodeGenerator::kBackEdgeDistanceUnit; |
| 87 | 87 |
| 88 | 88 |
| 89 Atomic32 RuntimeProfiler::state_ = 0; | |
| 90 | |
| 91 // TODO(isolates): Clean up the semaphore when it is no longer required. | |
| 92 static LazySemaphore<0>::type semaphore = LAZY_SEMAPHORE_INITIALIZER; | |
| 93 | |
| 94 #ifdef DEBUG | |
| 95 bool RuntimeProfiler::has_been_globally_set_up_ = false; | |
| 96 #endif | |
| 97 bool RuntimeProfiler::enabled_ = false; | |
| 98 | |
| 99 | |
| 100 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) | 89 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
| 101 : isolate_(isolate), | 90 : isolate_(isolate), |
| 102 sampler_threshold_(kSamplerThresholdInit), | 91 sampler_threshold_(kSamplerThresholdInit), |
| 103 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), | 92 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), |
| 104 sampler_ticks_until_threshold_adjustment_( | 93 sampler_ticks_until_threshold_adjustment_( |
| 105 kSamplerTicksBetweenThresholdAdjustment), | 94 kSamplerTicksBetweenThresholdAdjustment), |
| 106 sampler_window_position_(0), | 95 sampler_window_position_(0), |
| 107 any_ic_changed_(false), | 96 any_ic_changed_(false), |
| 108 code_generated_(false) { | 97 code_generated_(false) { |
| 109 ClearSampleBuffer(); | 98 ClearSampleBuffer(); |
| 110 } | 99 } |
| 111 | 100 |
| 112 | 101 |
| 113 void RuntimeProfiler::GlobalSetUp() { | |
| 114 ASSERT(!has_been_globally_set_up_); | |
| 115 enabled_ = V8::UseCrankshaft() && FLAG_opt; | |
| 116 #ifdef DEBUG | |
| 117 has_been_globally_set_up_ = true; | |
| 118 #endif | |
| 119 } | |
| 120 | |
| 121 | |
| 122 static void GetICCounts(JSFunction* function, | 102 static void GetICCounts(JSFunction* function, |
| 123 int* ic_with_type_info_count, | 103 int* ic_with_type_info_count, |
| 124 int* ic_total_count, | 104 int* ic_total_count, |
| 125 int* percentage) { | 105 int* percentage) { |
| 126 *ic_total_count = 0; | 106 *ic_total_count = 0; |
| 127 *ic_with_type_info_count = 0; | 107 *ic_with_type_info_count = 0; |
| 128 Object* raw_info = | 108 Object* raw_info = |
| 129 function->shared()->code()->type_feedback_info(); | 109 function->shared()->code()->type_feedback_info(); |
| 130 if (raw_info->IsTypeFeedbackInfo()) { | 110 if (raw_info->IsTypeFeedbackInfo()) { |
| 131 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); | 111 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); |
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 379 // this as part of collecting them because this will interfere with | 359 // this as part of collecting them because this will interfere with |
| 380 // the sample lookup in case of recursive functions. | 360 // the sample lookup in case of recursive functions. |
| 381 for (int i = 0; i < sample_count; i++) { | 361 for (int i = 0; i < sample_count; i++) { |
| 382 AddSample(samples[i], kSamplerFrameWeight[i]); | 362 AddSample(samples[i], kSamplerFrameWeight[i]); |
| 383 } | 363 } |
| 384 } | 364 } |
| 385 } | 365 } |
| 386 | 366 |
| 387 | 367 |
| 388 void RuntimeProfiler::SetUp() { | 368 void RuntimeProfiler::SetUp() { |
| 389 ASSERT(has_been_globally_set_up_); | |
| 390 if (!FLAG_watch_ic_patching) { | 369 if (!FLAG_watch_ic_patching) { |
| 391 ClearSampleBuffer(); | 370 ClearSampleBuffer(); |
| 392 } | 371 } |
| 393 } | 372 } |
| 394 | 373 |
| 395 | 374 |
| 396 void RuntimeProfiler::Reset() { | 375 void RuntimeProfiler::Reset() { |
| 397 if (!FLAG_watch_ic_patching) { | 376 if (!FLAG_watch_ic_patching) { |
| 398 sampler_threshold_ = kSamplerThresholdInit; | 377 sampler_threshold_ = kSamplerThresholdInit; |
| 399 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; | 378 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 422 if (map_word.IsForwardingAddress()) { | 401 if (map_word.IsForwardingAddress()) { |
| 423 sampler_window_[i] = map_word.ToForwardingAddress(); | 402 sampler_window_[i] = map_word.ToForwardingAddress(); |
| 424 } else { | 403 } else { |
| 425 sampler_window_[i] = NULL; | 404 sampler_window_[i] = NULL; |
| 426 } | 405 } |
| 427 } | 406 } |
| 428 } | 407 } |
| 429 } | 408 } |
| 430 | 409 |
| 431 | 410 |
| 432 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) { | |
| 433 // The profiler thread must still be waiting. | |
| 434 ASSERT(NoBarrier_Load(&state_) >= 0); | |
| 435 // In IsolateEnteredJS we have already incremented the counter and | |
| 436 // undid the decrement done by the profiler thread. Increment again | |
| 437 // to get the right count of active isolates. | |
| 438 NoBarrier_AtomicIncrement(&state_, 1); | |
| 439 semaphore.Pointer()->Signal(); | |
| 440 } | |
| 441 | |
| 442 | |
| 443 bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() { | |
| 444 Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1); | |
| 445 ASSERT(old_state >= -1); | |
| 446 if (old_state != 0) return false; | |
| 447 semaphore.Pointer()->Wait(); | |
| 448 return true; | |
| 449 } | |
| 450 | |
| 451 | |
| 452 void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) { | |
| 453 // Do a fake increment. If the profiler is waiting on the semaphore, | |
| 454 // the returned state is 0, which can be left as an initial state in | |
| 455 // case profiling is restarted later. If the profiler is not | |
| 456 // waiting, the increment will prevent it from waiting, but has to | |
| 457 // be undone after the profiler is stopped. | |
| 458 Atomic32 new_state = NoBarrier_AtomicIncrement(&state_, 1); | |
| 459 ASSERT(new_state >= 0); | |
| 460 if (new_state == 0) { | |
| 461 // The profiler thread is waiting. Wake it up. It must check for | |
| 462 // stop conditions before attempting to wait again. | |
| 463 semaphore.Pointer()->Signal(); | |
| 464 } | |
| 465 thread->Join(); | |
| 466 // The profiler thread is now stopped. Undo the increment in case it | |
| 467 // was not waiting. | |
| 468 if (new_state != 0) { | |
| 469 NoBarrier_AtomicIncrement(&state_, -1); | |
| 470 } | |
| 471 } | |
| 472 | |
| 473 | |
| 474 void RuntimeProfiler::RemoveDeadSamples() { | 411 void RuntimeProfiler::RemoveDeadSamples() { |
| 475 for (int i = 0; i < kSamplerWindowSize; i++) { | 412 for (int i = 0; i < kSamplerWindowSize; i++) { |
| 476 Object* function = sampler_window_[i]; | 413 Object* function = sampler_window_[i]; |
| 477 if (function != NULL && | 414 if (function != NULL && |
| 478 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) { | 415 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) { |
| 479 sampler_window_[i] = NULL; | 416 sampler_window_[i] = NULL; |
| 480 } | 417 } |
| 481 } | 418 } |
| 482 } | 419 } |
| 483 | 420 |
| 484 | 421 |
| 485 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 422 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
| 486 for (int i = 0; i < kSamplerWindowSize; i++) { | 423 for (int i = 0; i < kSamplerWindowSize; i++) { |
| 487 visitor->VisitPointer(&sampler_window_[i]); | 424 visitor->VisitPointer(&sampler_window_[i]); |
| 488 } | 425 } |
| 489 } | 426 } |
| 490 | 427 |
| 491 | 428 |
| 492 } } // namespace v8::internal | 429 } } // namespace v8::internal |
| OLD | NEW |