OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
146 } else { | 146 } else { |
147 // The next call to the function will trigger optimization. | 147 // The next call to the function will trigger optimization. |
148 function->MarkForLazyRecompilation(); | 148 function->MarkForLazyRecompilation(); |
149 } | 149 } |
150 } | 150 } |
151 | 151 |
152 | 152 |
153 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { | 153 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { |
154 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 154 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
155 // Debug::has_break_points(). | 155 // Debug::has_break_points(). |
156 ASSERT(function->IsMarkedForLazyRecompilation() || | |
157 function->IsMarkedForParallelRecompilation() || | |
158 function->IsOptimized()); | |
159 if (!FLAG_use_osr || | 156 if (!FLAG_use_osr || |
160 isolate_->DebuggerHasBreakPoints() || | 157 isolate_->DebuggerHasBreakPoints() || |
161 function->IsBuiltin()) { | 158 function->IsBuiltin()) { |
162 return; | 159 return; |
163 } | 160 } |
164 | 161 |
165 SharedFunctionInfo* shared = function->shared(); | 162 SharedFunctionInfo* shared = function->shared(); |
166 // If the code is not optimizable, don't try OSR. | 163 // If the code is not optimizable, don't try OSR. |
167 if (!shared->code()->optimizable()) return; | 164 if (!shared->code()->optimizable()) return; |
168 | 165 |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
266 } | 263 } |
267 } | 264 } |
268 } | 265 } |
269 | 266 |
270 SharedFunctionInfo* shared = function->shared(); | 267 SharedFunctionInfo* shared = function->shared(); |
271 Code* shared_code = shared->code(); | 268 Code* shared_code = shared->code(); |
272 | 269 |
273 if (shared_code->kind() != Code::FUNCTION) continue; | 270 if (shared_code->kind() != Code::FUNCTION) continue; |
274 if (function->IsInRecompileQueue()) continue; | 271 if (function->IsInRecompileQueue()) continue; |
275 | 272 |
276 // Attempt OSR if we are still running unoptimized code even though the | 273 if (FLAG_always_osr && |
277 // the function has long been marked or even already been optimized. | 274 shared_code->allow_osr_at_loop_nesting_level() == 0) { |
278 if (!frame->is_optimized() && | 275 // Testing mode: always try an OSR compile for every function. |
| 276 for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { |
| 277 // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop. |
| 278 shared_code->set_allow_osr_at_loop_nesting_level(i); |
| 279 AttemptOnStackReplacement(function); |
| 280 } |
| 281 // Fall through and do a normal optimized compile as well. |
| 282 } else if (!frame->is_optimized() && |
279 (function->IsMarkedForLazyRecompilation() || | 283 (function->IsMarkedForLazyRecompilation() || |
280 function->IsMarkedForParallelRecompilation() || | 284 function->IsMarkedForParallelRecompilation() || |
281 function->IsOptimized())) { | 285 function->IsOptimized())) { |
| 286 // Attempt OSR if we are still running unoptimized code even though the |
| 287 // the function has long been marked or even already been optimized. |
282 int ticks = shared_code->profiler_ticks(); | 288 int ticks = shared_code->profiler_ticks(); |
283 int allowance = kOSRCodeSizeAllowanceBase + | 289 int allowance = kOSRCodeSizeAllowanceBase + |
284 ticks * kOSRCodeSizeAllowancePerTick; | 290 ticks * kOSRCodeSizeAllowancePerTick; |
285 if (shared_code->CodeSize() > allowance) { | 291 if (shared_code->CodeSize() > allowance) { |
286 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); | 292 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); |
287 } else { | 293 } else { |
288 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 294 int nesting = shared_code->allow_osr_at_loop_nesting_level(); |
289 if (nesting < Code::kMaxLoopNestingMarker) { | 295 if (nesting < Code::kMaxLoopNestingMarker) { |
290 int new_nesting = nesting + 1; | 296 int new_nesting = nesting + 1; |
291 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | 297 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
433 | 439 |
434 | 440 |
435 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 441 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
436 for (int i = 0; i < kSamplerWindowSize; i++) { | 442 for (int i = 0; i < kSamplerWindowSize; i++) { |
437 visitor->VisitPointer(&sampler_window_[i]); | 443 visitor->VisitPointer(&sampler_window_[i]); |
438 } | 444 } |
439 } | 445 } |
440 | 446 |
441 | 447 |
442 } } // namespace v8::internal | 448 } } // namespace v8::internal |
OLD | NEW |