| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/runtime-profiler.h" | 5 #include "src/runtime-profiler.h" |
| 6 | 6 |
| 7 #include "src/assembler.h" | 7 #include "src/assembler.h" |
| 8 #include "src/ast/scopeinfo.h" | 8 #include "src/ast/scopeinfo.h" |
| 9 #include "src/base/platform/platform.h" | 9 #include "src/base/platform/platform.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 136 } | 136 } |
| 137 | 137 |
| 138 // If the code is not optimizable, don't try OSR. | 138 // If the code is not optimizable, don't try OSR. |
| 139 if (shared->optimization_disabled()) return; | 139 if (shared->optimization_disabled()) return; |
| 140 | 140 |
| 141 // We are not prepared to do OSR for a function that already has an | 141 // We are not prepared to do OSR for a function that already has an |
| 142 // allocated arguments object. The optimized code would bypass it for | 142 // allocated arguments object. The optimized code would bypass it for |
| 143 // arguments accesses, which is unsound. Don't try OSR. | 143 // arguments accesses, which is unsound. Don't try OSR. |
| 144 if (shared->uses_arguments()) return; | 144 if (shared->uses_arguments()) return; |
| 145 | 145 |
| 146 // We're using on-stack replacement: patch the unoptimized code so that | 146 // We're using on-stack replacement: modify unoptimized code so that |
| 147 // any back edge in any unoptimized frame will trigger on-stack | 147 // certain back edges in any unoptimized frame will trigger on-stack |
| 148 // replacement for that frame. | 148 // replacement for that frame. |
| 149 // - Ignition: Store new loop nesting level in BytecodeArray header. |
| 150 // - FullCodegen: Patch back edges up to new level using BackEdgeTable. |
| 149 if (FLAG_trace_osr) { | 151 if (FLAG_trace_osr) { |
| 150 PrintF("[OSR - patching back edges in "); | 152 PrintF("[OSR - arming back edges in "); |
| 151 function->PrintName(); | 153 function->PrintName(); |
| 152 PrintF("]\n"); | 154 PrintF("]\n"); |
| 153 } | 155 } |
| 154 | 156 |
| 155 for (int i = 0; i < loop_nesting_levels; i++) { | 157 if (shared->code()->kind() == Code::FUNCTION) { |
| 156 BackEdgeTable::Patch(isolate_, shared->code()); | 158 DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code())); |
| 159 for (int i = 0; i < loop_nesting_levels; i++) { |
| 160 BackEdgeTable::Patch(isolate_, shared->code()); |
| 161 } |
| 162 } else if (shared->HasBytecodeArray()) { |
| 163 DCHECK(FLAG_ignition_osr); // Should only happen when enabled. |
| 164 int level = shared->bytecode_array()->osr_loop_nesting_level(); |
| 165 shared->bytecode_array()->set_osr_loop_nesting_level( |
| 166 Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker)); |
| 167 } else { |
| 168 UNREACHABLE(); |
| 157 } | 169 } |
| 158 } | 170 } |
| 159 | 171 |
| 160 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, | 172 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, |
| 161 int frame_count, | 173 int frame_count, |
| 162 bool frame_optimized) { | 174 bool frame_optimized) { |
| 163 SharedFunctionInfo* shared = function->shared(); | 175 SharedFunctionInfo* shared = function->shared(); |
| 164 Code* shared_code = shared->code(); | 176 Code* shared_code = shared->code(); |
| 165 if (shared_code->kind() != Code::FUNCTION) return; | 177 if (shared_code->kind() != Code::FUNCTION) return; |
| 166 if (function->IsInOptimizationQueue()) return; | 178 if (function->IsInOptimizationQueue()) return; |
| 167 | 179 |
| 168 if (FLAG_always_osr) { | 180 if (FLAG_always_osr) { |
| 169 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker); | 181 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); |
| 170 // Fall through and do a normal optimized compile as well. | 182 // Fall through and do a normal optimized compile as well. |
| 171 } else if (!frame_optimized && | 183 } else if (!frame_optimized && |
| 172 (function->IsMarkedForOptimization() || | 184 (function->IsMarkedForOptimization() || |
| 173 function->IsMarkedForConcurrentOptimization() || | 185 function->IsMarkedForConcurrentOptimization() || |
| 174 function->IsOptimized())) { | 186 function->IsOptimized())) { |
| 175 // Attempt OSR if we are still running unoptimized code even though the | 187 // Attempt OSR if we are still running unoptimized code even though the |
| 176 // the function has long been marked or even already been optimized. | 188 // the function has long been marked or even already been optimized. |
| 177 int ticks = shared_code->profiler_ticks(); | 189 int ticks = shared_code->profiler_ticks(); |
| 178 int64_t allowance = | 190 int64_t allowance = |
| 179 kOSRCodeSizeAllowanceBase + | 191 kOSRCodeSizeAllowanceBase + |
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 317 } else { | 329 } else { |
| 318 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); | 330 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); |
| 319 } | 331 } |
| 320 } | 332 } |
| 321 any_ic_changed_ = false; | 333 any_ic_changed_ = false; |
| 322 } | 334 } |
| 323 | 335 |
| 324 | 336 |
| 325 } // namespace internal | 337 } // namespace internal |
| 326 } // namespace v8 | 338 } // namespace v8 |
| OLD | NEW |