| Index: src/runtime-profiler.cc
|
| diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc
|
| index 69229871ea670d59aec48937ded98abfea1e131b..82ee3c4da9ff1a3ae9f6231442a09b757bcb7096 100644
|
| --- a/src/runtime-profiler.cc
|
| +++ b/src/runtime-profiler.cc
|
| @@ -57,59 +57,6 @@ RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
|
| }
|
|
|
|
|
| -static void GetICCounts(Code* shared_code,
|
| - int* ic_with_type_info_count,
|
| - int* ic_total_count,
|
| - int* percentage) {
|
| - *ic_total_count = 0;
|
| - *ic_with_type_info_count = 0;
|
| - Object* raw_info = shared_code->type_feedback_info();
|
| - if (raw_info->IsTypeFeedbackInfo()) {
|
| - TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
|
| - *ic_with_type_info_count = info->ic_with_type_info_count();
|
| - *ic_total_count = info->ic_total_count();
|
| - }
|
| - *percentage = *ic_total_count > 0
|
| - ? 100 * *ic_with_type_info_count / *ic_total_count
|
| - : 100;
|
| -}
|
| -
|
| -
|
| -void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
|
| - ASSERT(function->IsOptimizable());
|
| -
|
| - if (FLAG_trace_opt && function->PassesFilter(FLAG_hydrogen_filter)) {
|
| - PrintF("[marking ");
|
| - function->ShortPrint();
|
| - PrintF(" for recompilation, reason: %s", reason);
|
| - if (FLAG_type_info_threshold > 0) {
|
| - int typeinfo, total, percentage;
|
| - GetICCounts(function->shared()->code(), &typeinfo, &total, &percentage);
|
| - PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
|
| - }
|
| - PrintF("]\n");
|
| - }
|
| -
|
| -
|
| - if (isolate_->concurrent_recompilation_enabled() &&
|
| - !isolate_->bootstrapper()->IsActive()) {
|
| - if (isolate_->concurrent_osr_enabled() &&
|
| - isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
|
| - // Do not attempt regular recompilation if we already queued this for OSR.
|
| - // TODO(yangguo): This is necessary so that we don't install optimized
|
| - // code on a function that is already optimized, since OSR and regular
|
| - // recompilation race. This goes away as soon as OSR becomes one-shot.
|
| - return;
|
| - }
|
| - ASSERT(!function->IsInOptimizationQueue());
|
| - function->MarkForConcurrentOptimization();
|
| - } else {
|
| - // The next call to the function will trigger optimization.
|
| - function->MarkForOptimization();
|
| - }
|
| -}
|
| -
|
| -
|
| void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
|
| // See AlwaysFullCompiler (in compiler.cc) comment on why we need
|
| // Debug::has_break_points().
|
| @@ -236,13 +183,13 @@ void RuntimeProfiler::OptimizeNow() {
|
|
|
| if (ticks >= kProfilerTicksBeforeOptimization) {
|
| int typeinfo, total, percentage;
|
| - GetICCounts(shared_code, &typeinfo, &total, &percentage);
|
| + shared_code->GetICCounts(&typeinfo, &total, &percentage);
|
| if (percentage >= FLAG_type_info_threshold) {
|
| // If this particular function hasn't had any ICs patched for enough
|
| // ticks, optimize it now.
|
| - Optimize(function, "hot and stable");
|
| + Compiler::Optimize(function, "hot and stable");
|
| } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
|
| - Optimize(function, "not much type info but very hot");
|
| + Compiler::Optimize(function, "not much type info but very hot");
|
| } else {
|
| shared_code->set_profiler_ticks(ticks + 1);
|
| if (FLAG_trace_opt_verbose) {
|
| @@ -256,7 +203,7 @@ void RuntimeProfiler::OptimizeNow() {
|
| shared_code->instruction_size() < kMaxSizeEarlyOpt) {
|
| // If no IC was patched since the last tick and this function is very
|
| // small, optimistically optimize it now.
|
| - Optimize(function, "small function");
|
| + Compiler::Optimize(function, "small function");
|
| } else {
|
| shared_code->set_profiler_ticks(ticks + 1);
|
| }
|
|
|