| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/runtime-profiler.h" | 7 #include "src/runtime-profiler.h" |
| 8 | 8 |
| 9 #include "src/assembler.h" | 9 #include "src/assembler.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 103 } | 103 } |
| 104 ASSERT(!function->IsInOptimizationQueue()); | 104 ASSERT(!function->IsInOptimizationQueue()); |
| 105 function->MarkForConcurrentOptimization(); | 105 function->MarkForConcurrentOptimization(); |
| 106 } else { | 106 } else { |
| 107 // The next call to the function will trigger optimization. | 107 // The next call to the function will trigger optimization. |
| 108 function->MarkForOptimization(); | 108 function->MarkForOptimization(); |
| 109 } | 109 } |
| 110 } | 110 } |
| 111 | 111 |
| 112 | 112 |
| 113 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { | 113 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, |
| 114 int loop_nesting_levels) { |
| 115 SharedFunctionInfo* shared = function->shared(); |
| 114 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 116 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
| 115 // Debug::has_break_points(). | 117 // Debug::has_break_points(). |
| 116 if (!FLAG_use_osr || | 118 if (!FLAG_use_osr || |
| 117 isolate_->DebuggerHasBreakPoints() || | 119 isolate_->DebuggerHasBreakPoints() || |
| 118 function->IsBuiltin()) { | 120 function->IsBuiltin()) { |
| 119 return; | 121 return; |
| 120 } | 122 } |
| 121 | 123 |
| 122 SharedFunctionInfo* shared = function->shared(); | |
| 123 // If the code is not optimizable, don't try OSR. | 124 // If the code is not optimizable, don't try OSR. |
| 124 if (!shared->code()->optimizable()) return; | 125 if (!shared->code()->optimizable()) return; |
| 125 | 126 |
| 126 // We are not prepared to do OSR for a function that already has an | 127 // We are not prepared to do OSR for a function that already has an |
| 127 // allocated arguments object. The optimized code would bypass it for | 128 // allocated arguments object. The optimized code would bypass it for |
| 128 // arguments accesses, which is unsound. Don't try OSR. | 129 // arguments accesses, which is unsound. Don't try OSR. |
| 129 if (shared->uses_arguments()) return; | 130 if (shared->uses_arguments()) return; |
| 130 | 131 |
| 131 // We're using on-stack replacement: patch the unoptimized code so that | 132 // We're using on-stack replacement: patch the unoptimized code so that |
| 132 // any back edge in any unoptimized frame will trigger on-stack | 133 // any back edge in any unoptimized frame will trigger on-stack |
| 133 // replacement for that frame. | 134 // replacement for that frame. |
| 134 if (FLAG_trace_osr) { | 135 if (FLAG_trace_osr) { |
| 135 PrintF("[OSR - patching back edges in "); | 136 PrintF("[OSR - patching back edges in "); |
| 136 function->PrintName(); | 137 function->PrintName(); |
| 137 PrintF("]\n"); | 138 PrintF("]\n"); |
| 138 } | 139 } |
| 139 | 140 |
| 140 BackEdgeTable::Patch(isolate_, shared->code()); | 141 for (int i = 0; i < loop_nesting_levels; i++) { |
| 142 BackEdgeTable::Patch(isolate_, shared->code()); |
| 143 } |
| 141 } | 144 } |
| 142 | 145 |
| 143 | 146 |
| 144 void RuntimeProfiler::OptimizeNow() { | 147 void RuntimeProfiler::OptimizeNow() { |
| 145 HandleScope scope(isolate_); | 148 HandleScope scope(isolate_); |
| 146 | 149 |
| 147 if (isolate_->DebuggerHasBreakPoints()) return; | 150 if (isolate_->DebuggerHasBreakPoints()) return; |
| 148 | 151 |
| 149 DisallowHeapAllocation no_gc; | 152 DisallowHeapAllocation no_gc; |
| 150 | 153 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 168 SharedFunctionInfo* shared_function_info = functions[i]->shared(); | 171 SharedFunctionInfo* shared_function_info = functions[i]->shared(); |
| 169 int ticks = shared_function_info->profiler_ticks(); | 172 int ticks = shared_function_info->profiler_ticks(); |
| 170 if (ticks < Smi::kMaxValue) { | 173 if (ticks < Smi::kMaxValue) { |
| 171 shared_function_info->set_profiler_ticks(ticks + 1); | 174 shared_function_info->set_profiler_ticks(ticks + 1); |
| 172 } | 175 } |
| 173 } | 176 } |
| 174 | 177 |
| 175 if (shared_code->kind() != Code::FUNCTION) continue; | 178 if (shared_code->kind() != Code::FUNCTION) continue; |
| 176 if (function->IsInOptimizationQueue()) continue; | 179 if (function->IsInOptimizationQueue()) continue; |
| 177 | 180 |
| 178 if (FLAG_always_osr && | 181 if (FLAG_always_osr) { |
| 179 shared_code->allow_osr_at_loop_nesting_level() == 0) { | 182 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker); |
| 180 // Testing mode: always try an OSR compile for every function. | |
| 181 for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { | |
| 182 // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop. | |
| 183 shared_code->set_allow_osr_at_loop_nesting_level(i); | |
| 184 AttemptOnStackReplacement(function); | |
| 185 } | |
| 186 // Fall through and do a normal optimized compile as well. | 183 // Fall through and do a normal optimized compile as well. |
| 187 } else if (!frame->is_optimized() && | 184 } else if (!frame->is_optimized() && |
| 188 (function->IsMarkedForOptimization() || | 185 (function->IsMarkedForOptimization() || |
| 189 function->IsMarkedForConcurrentOptimization() || | 186 function->IsMarkedForConcurrentOptimization() || |
| 190 function->IsOptimized())) { | 187 function->IsOptimized())) { |
| 191 // Attempt OSR if we are still running unoptimized code even though the | 188 // Attempt OSR if we are still running unoptimized code even though the |
| 192 // the function has long been marked or even already been optimized. | 189 // the function has long been marked or even already been optimized. |
| 193 int ticks = shared_code->profiler_ticks(); | 190 int ticks = shared_code->profiler_ticks(); |
| 194 int allowance = kOSRCodeSizeAllowanceBase + | 191 int allowance = kOSRCodeSizeAllowanceBase + |
| 195 ticks * kOSRCodeSizeAllowancePerTick; | 192 ticks * kOSRCodeSizeAllowancePerTick; |
| 196 if (shared_code->CodeSize() > allowance) { | 193 if (shared_code->CodeSize() > allowance) { |
| 197 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); | 194 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); |
| 198 } else { | 195 } else { |
| 199 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 196 AttemptOnStackReplacement(function); |
| 200 if (nesting < Code::kMaxLoopNestingMarker) { | |
| 201 int new_nesting = nesting + 1; | |
| 202 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | |
| 203 AttemptOnStackReplacement(function); | |
| 204 } | |
| 205 } | 197 } |
| 206 continue; | 198 continue; |
| 207 } | 199 } |
| 208 | 200 |
| 209 // Only record top-level code on top of the execution stack and | 201 // Only record top-level code on top of the execution stack and |
| 210 // avoid optimizing excessively large scripts since top-level code | 202 // avoid optimizing excessively large scripts since top-level code |
| 211 // will be executed only once. | 203 // will be executed only once. |
| 212 const int kMaxToplevelSourceSize = 10 * 1024; | 204 const int kMaxToplevelSourceSize = 10 * 1024; |
| 213 if (shared->is_toplevel() && | 205 if (shared->is_toplevel() && |
| 214 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { | 206 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 259 Optimize(function, "small function"); | 251 Optimize(function, "small function"); |
| 260 } else { | 252 } else { |
| 261 shared_code->set_profiler_ticks(ticks + 1); | 253 shared_code->set_profiler_ticks(ticks + 1); |
| 262 } | 254 } |
| 263 } | 255 } |
| 264 any_ic_changed_ = false; | 256 any_ic_changed_ = false; |
| 265 } | 257 } |
| 266 | 258 |
| 267 | 259 |
| 268 } } // namespace v8::internal | 260 } } // namespace v8::internal |
| OLD | NEW |