Chromium Code Reviews| Index: src/runtime-profiler.cc |
| diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc |
| index eaa6e15603a7e94f5a021d46ae468c64efdbab78..b71b7b2b7af693e7b124c03c959392e20f14b0d9 100644 |
| --- a/src/runtime-profiler.cc |
| +++ b/src/runtime-profiler.cc |
| @@ -46,17 +46,8 @@ namespace internal { |
| // Optimization sampler constants. |
| static const int kSamplerFrameCount = 2; |
| -static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 }; |
| -static const int kSamplerTicksBetweenThresholdAdjustment = 32; |
| - |
| -static const int kSamplerThresholdInit = 3; |
| -static const int kSamplerThresholdMin = 1; |
| -static const int kSamplerThresholdDelta = 1; |
| - |
| -static const int kSamplerThresholdSizeFactorInit = 3; |
| - |
| -static const int kSizeLimit = 1500; |
|
fschneider
2011/12/12 11:31:21
The reason for looking at the size in some form wa
Jakob Kummerow
2011/12/14 08:42:31
I see the reasoning and agree in principle, but wi
|
| +static const int kProfilerTicksBeforeOptimization = 2; |
| Atomic32 RuntimeProfiler::state_ = 0; |
| @@ -70,15 +61,7 @@ bool RuntimeProfiler::has_been_globally_setup_ = false; |
| bool RuntimeProfiler::enabled_ = false; |
| -RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
| - : isolate_(isolate), |
| - sampler_threshold_(kSamplerThresholdInit), |
| - sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), |
| - sampler_ticks_until_threshold_adjustment_( |
| - kSamplerTicksBetweenThresholdAdjustment), |
| - sampler_window_position_(0) { |
| - ClearSampleBuffer(); |
| -} |
| +RuntimeProfiler::RuntimeProfiler(Isolate* isolate) : isolate_(isolate) {} |
| void RuntimeProfiler::GlobalSetup() { |
| @@ -90,13 +73,13 @@ void RuntimeProfiler::GlobalSetup() { |
| } |
| -void RuntimeProfiler::Optimize(JSFunction* function) { |
| +void RuntimeProfiler::Optimize(JSFunction* function, int reason) { |
| ASSERT(function->IsOptimizable()); |
| if (FLAG_trace_opt) { |
| PrintF("[marking "); |
| function->PrintName(); |
| PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); |
| - PrintF(" for recompilation"); |
| + PrintF(" for recompilation, reason: %d", reason); |
|
fschneider
2011/12/13 11:53:01
Print reason in English instead of just a number.
Jakob Kummerow
2011/12/14 08:42:31
Done.
|
| PrintF("]\n"); |
| } |
| @@ -148,43 +131,12 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { |
| } |
| -void RuntimeProfiler::ClearSampleBuffer() { |
| - memset(sampler_window_, 0, sizeof(sampler_window_)); |
| - memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); |
| -} |
| - |
| - |
| -int RuntimeProfiler::LookupSample(JSFunction* function) { |
| - int weight = 0; |
| - for (int i = 0; i < kSamplerWindowSize; i++) { |
| - Object* sample = sampler_window_[i]; |
| - if (sample != NULL) { |
| - if (function == sample) { |
| - weight += sampler_window_weight_[i]; |
| - } |
| - } |
| - } |
| - return weight; |
| -} |
| - |
| - |
| -void RuntimeProfiler::AddSample(JSFunction* function, int weight) { |
| - ASSERT(IsPowerOf2(kSamplerWindowSize)); |
| - sampler_window_[sampler_window_position_] = function; |
| - sampler_window_weight_[sampler_window_position_] = weight; |
| - sampler_window_position_ = (sampler_window_position_ + 1) & |
| - (kSamplerWindowSize - 1); |
| -} |
| - |
| - |
| void RuntimeProfiler::OptimizeNow() { |
| HandleScope scope(isolate_); |
| // Run through the JavaScript frames and collect them. If we already |
| // have a sample of the function, we mark it for optimizations |
| // (eagerly or lazily). |
| - JSFunction* samples[kSamplerFrameCount]; |
| - int sample_count = 0; |
| int frame_count = 0; |
| for (JavaScriptFrameIterator it(isolate_); |
| frame_count++ < kSamplerFrameCount && !it.done(); |
| @@ -192,21 +144,6 @@ void RuntimeProfiler::OptimizeNow() { |
| JavaScriptFrame* frame = it.frame(); |
| JSFunction* function = JSFunction::cast(frame->function()); |
| - // Adjust threshold each time we have processed |
| - // a certain number of ticks. |
| - if (sampler_ticks_until_threshold_adjustment_ > 0) { |
| - sampler_ticks_until_threshold_adjustment_--; |
| - if (sampler_ticks_until_threshold_adjustment_ <= 0) { |
| - // If the threshold is not already at the minimum |
| - // modify and reset the ticks until next adjustment. |
| - if (sampler_threshold_ > kSamplerThresholdMin) { |
| - sampler_threshold_ -= kSamplerThresholdDelta; |
| - sampler_ticks_until_threshold_adjustment_ = |
| - kSamplerTicksBetweenThresholdAdjustment; |
| - } |
| - } |
| - } |
| - |
| if (function->IsMarkedForLazyRecompilation()) { |
| Code* unoptimized = function->shared()->code(); |
| int nesting = unoptimized->allow_osr_at_loop_nesting_level(); |
| @@ -217,26 +154,32 @@ void RuntimeProfiler::OptimizeNow() { |
| // Do not record non-optimizable functions. |
| if (!function->IsOptimizable()) continue; |
| - samples[sample_count++] = function; |
| - |
| - int function_size = function->shared()->SourceSize(); |
| - int threshold_size_factor = (function_size > kSizeLimit) |
| - ? sampler_threshold_size_factor_ |
| - : 1; |
| - |
| - int threshold = sampler_threshold_ * threshold_size_factor; |
| - if (LookupSample(function) >= threshold) { |
| - Optimize(function); |
| + int ticks = function->profiler_ticks(); |
| + |
| + if (ticks >= kProfilerTicksBeforeOptimization - 1) { |
| + // If this particular function hasn't had any ICs patched for enough |
| + // ticks, optimize it now. |
| + Optimize(function, 1); |
|
fschneider
2011/12/13 11:53:01
Name instead of an integer constant?
Jakob Kummerow
2011/12/14 08:42:31
Done.
|
| + } else if (!any_ic_changed_ && |
| + function->shared()->code()->instruction_size() < 500) { |
| + // If no IC was patched since the last tick and this function is very |
| + // small, optimistically optimize it now. |
| + Optimize(function, 2); |
| + } else if (!code_generated_ && |
| + !any_ic_changed_ && |
| + total_code_generated_ > 0 && |
| + total_code_generated_ < 2000) { |
| + // If no code was generated and no IC was patched since the last tick, |
| + // but a little code has already been generated since last Reset(), |
| + // then type info might already be stable and we can optimize now. |
| + Optimize(function, 3); |
| + } else { |
| + function->set_profiler_ticks(ticks + 1); |
| } |
| } |
| - |
| - // Add the collected functions as samples. It's important not to do |
| - // this as part of collecting them because this will interfere with |
| - // the sample lookup in case of recursive functions. |
| - for (int i = 0; i < sample_count; i++) { |
| - AddSample(samples[i], kSamplerFrameWeight[i]); |
| - } |
| + any_ic_changed_ = false; |
| + code_generated_ = false; |
| } |
| @@ -247,7 +190,6 @@ void RuntimeProfiler::NotifyTick() { |
| void RuntimeProfiler::Setup() { |
| ASSERT(has_been_globally_setup_); |
| - ClearSampleBuffer(); |
| // If the ticker hasn't already started, make sure to do so to get |
| // the ticks for the runtime profiler. |
| if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); |
| @@ -255,10 +197,8 @@ void RuntimeProfiler::Setup() { |
| void RuntimeProfiler::Reset() { |
| - sampler_threshold_ = kSamplerThresholdInit; |
| - sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; |
| - sampler_ticks_until_threshold_adjustment_ = |
| - kSamplerTicksBetweenThresholdAdjustment; |
| + isolate_->logger()->ResetTickerInterval(); |
| + total_code_generated_ = 0; |
| } |
| @@ -267,27 +207,6 @@ void RuntimeProfiler::TearDown() { |
| } |
| -int RuntimeProfiler::SamplerWindowSize() { |
| - return kSamplerWindowSize; |
| -} |
| - |
| - |
| -// Update the pointers in the sampler window after a GC. |
| -void RuntimeProfiler::UpdateSamplesAfterScavenge() { |
| - for (int i = 0; i < kSamplerWindowSize; i++) { |
| - Object* function = sampler_window_[i]; |
| - if (function != NULL && isolate_->heap()->InNewSpace(function)) { |
| - MapWord map_word = HeapObject::cast(function)->map_word(); |
| - if (map_word.IsForwardingAddress()) { |
| - sampler_window_[i] = map_word.ToForwardingAddress(); |
| - } else { |
| - sampler_window_[i] = NULL; |
| - } |
| - } |
| - } |
| -} |
| - |
| - |
| void RuntimeProfiler::HandleWakeUp(Isolate* isolate) { |
| // The profiler thread must still be waiting. |
| ASSERT(NoBarrier_Load(&state_) >= 0); |
| @@ -335,24 +254,6 @@ void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) { |
| } |
| -void RuntimeProfiler::RemoveDeadSamples() { |
| - for (int i = 0; i < kSamplerWindowSize; i++) { |
| - Object* function = sampler_window_[i]; |
| - if (function != NULL && |
| - !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) { |
| - sampler_window_[i] = NULL; |
| - } |
| - } |
| -} |
| - |
| - |
| -void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
| - for (int i = 0; i < kSamplerWindowSize; i++) { |
| - visitor->VisitPointer(&sampler_window_[i]); |
| - } |
| -} |
| - |
| - |
| bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { |
| if (!RuntimeProfiler::IsSomeIsolateInJS()) { |
| return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); |