OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/runtime-profiler.h" | 5 #include "src/runtime-profiler.h" |
6 | 6 |
7 #include "src/assembler.h" | 7 #include "src/assembler.h" |
8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
120 void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { | 120 void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { |
121 TraceRecompile(function, reason, "baseline"); | 121 TraceRecompile(function, reason, "baseline"); |
122 | 122 |
123 // TODO(4280): Fix this to check function is compiled for the interpreter | 123 // TODO(4280): Fix this to check function is compiled for the interpreter |
124 // once we have a standard way to check that. For now function will only | 124 // once we have a standard way to check that. For now function will only |
125 // have a bytecode array if compiled for the interpreter. | 125 // have a bytecode array if compiled for the interpreter. |
126 DCHECK(function->shared()->HasBytecodeArray()); | 126 DCHECK(function->shared()->HasBytecodeArray()); |
127 function->MarkForBaseline(); | 127 function->MarkForBaseline(); |
128 } | 128 } |
129 | 129 |
130 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, | 130 void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame, |
131 int loop_nesting_levels) { | 131 int loop_nesting_levels) { |
| 132 JSFunction* function = frame->function(); |
132 SharedFunctionInfo* shared = function->shared(); | 133 SharedFunctionInfo* shared = function->shared(); |
133 if (!FLAG_use_osr || function->shared()->IsBuiltin()) { | 134 if (!FLAG_use_osr || function->shared()->IsBuiltin()) { |
134 return; | 135 return; |
135 } | 136 } |
136 | 137 |
137 // If the code is not optimizable, don't try OSR. | 138 // If the code is not optimizable, don't try OSR. |
138 if (shared->optimization_disabled()) return; | 139 if (shared->optimization_disabled()) return; |
139 | 140 |
140 // We are not prepared to do OSR for a function that already has an | 141 // We are not prepared to do OSR for a function that already has an |
141 // allocated arguments object. The optimized code would bypass it for | 142 // allocated arguments object. The optimized code would bypass it for |
142 // arguments accesses, which is unsound. Don't try OSR. | 143 // arguments accesses, which is unsound. Don't try OSR. |
143 if (shared->uses_arguments()) return; | 144 if (shared->uses_arguments()) return; |
144 | 145 |
145 // We're using on-stack replacement: modify unoptimized code so that | 146 // We're using on-stack replacement: modify unoptimized code so that |
146 // certain back edges in any unoptimized frame will trigger on-stack | 147 // certain back edges in any unoptimized frame will trigger on-stack |
147 // replacement for that frame. | 148 // replacement for that frame. |
148 // - Ignition: Store new loop nesting level in BytecodeArray header. | 149 // - Ignition: Store new loop nesting level in BytecodeArray header. |
149 // - FullCodegen: Patch back edges up to new level using BackEdgeTable. | 150 // - FullCodegen: Patch back edges up to new level using BackEdgeTable. |
150 if (FLAG_trace_osr) { | 151 if (FLAG_trace_osr) { |
151 PrintF("[OSR - arming back edges in "); | 152 PrintF("[OSR - arming back edges in "); |
152 function->PrintName(); | 153 function->PrintName(); |
153 PrintF("]\n"); | 154 PrintF("]\n"); |
154 } | 155 } |
155 | 156 |
156 if (shared->code()->kind() == Code::FUNCTION) { | 157 if (frame->type() == StackFrame::JAVA_SCRIPT) { |
| 158 DCHECK(shared->HasBaselineCode()); |
157 DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code())); | 159 DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code())); |
158 for (int i = 0; i < loop_nesting_levels; i++) { | 160 for (int i = 0; i < loop_nesting_levels; i++) { |
159 BackEdgeTable::Patch(isolate_, shared->code()); | 161 BackEdgeTable::Patch(isolate_, shared->code()); |
160 } | 162 } |
161 } else if (shared->HasBytecodeArray()) { | 163 } else if (frame->type() == StackFrame::INTERPRETED) { |
162 DCHECK(FLAG_ignition_osr); // Should only happen when enabled. | 164 DCHECK(shared->HasBytecodeArray()); |
| 165 if (!FLAG_ignition_osr) return; // Only use this when enabled. |
163 int level = shared->bytecode_array()->osr_loop_nesting_level(); | 166 int level = shared->bytecode_array()->osr_loop_nesting_level(); |
164 shared->bytecode_array()->set_osr_loop_nesting_level( | 167 shared->bytecode_array()->set_osr_loop_nesting_level( |
165 Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker)); | 168 Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker)); |
166 } else { | 169 } else { |
167 UNREACHABLE(); | 170 UNREACHABLE(); |
168 } | 171 } |
169 } | 172 } |
170 | 173 |
171 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, | 174 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, |
172 int frame_count, | 175 JavaScriptFrame* frame, |
173 bool frame_optimized) { | 176 int frame_count) { |
174 SharedFunctionInfo* shared = function->shared(); | 177 SharedFunctionInfo* shared = function->shared(); |
175 Code* shared_code = shared->code(); | 178 Code* shared_code = shared->code(); |
176 if (shared_code->kind() != Code::FUNCTION) return; | 179 if (shared_code->kind() != Code::FUNCTION) return; |
177 if (function->IsInOptimizationQueue()) return; | 180 if (function->IsInOptimizationQueue()) return; |
178 | 181 |
179 if (FLAG_always_osr) { | 182 if (FLAG_always_osr) { |
180 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); | 183 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); |
181 // Fall through and do a normal optimized compile as well. | 184 // Fall through and do a normal optimized compile as well. |
182 } else if (!frame_optimized && | 185 } else if (!frame->is_optimized() && |
183 (function->IsMarkedForOptimization() || | 186 (function->IsMarkedForOptimization() || |
184 function->IsMarkedForConcurrentOptimization() || | 187 function->IsMarkedForConcurrentOptimization() || |
185 function->IsOptimized())) { | 188 function->IsOptimized())) { |
186 // Attempt OSR if we are still running unoptimized code even though the | 189 // Attempt OSR if we are still running unoptimized code even though the |
187 // the function has long been marked or even already been optimized. | 190 // the function has long been marked or even already been optimized. |
188 int ticks = shared_code->profiler_ticks(); | 191 int ticks = shared_code->profiler_ticks(); |
189 int64_t allowance = | 192 int64_t allowance = |
190 kOSRCodeSizeAllowanceBase + | 193 kOSRCodeSizeAllowanceBase + |
191 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick; | 194 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick; |
192 if (shared_code->CodeSize() > allowance && | 195 if (shared_code->CodeSize() > allowance && |
193 ticks < Code::ProfilerTicksField::kMax) { | 196 ticks < Code::ProfilerTicksField::kMax) { |
194 shared_code->set_profiler_ticks(ticks + 1); | 197 shared_code->set_profiler_ticks(ticks + 1); |
195 } else { | 198 } else { |
196 AttemptOnStackReplacement(function); | 199 AttemptOnStackReplacement(frame); |
197 } | 200 } |
198 return; | 201 return; |
199 } | 202 } |
200 | 203 |
201 // Only record top-level code on top of the execution stack and | 204 // Only record top-level code on top of the execution stack and |
202 // avoid optimizing excessively large scripts since top-level code | 205 // avoid optimizing excessively large scripts since top-level code |
203 // will be executed only once. | 206 // will be executed only once. |
204 const int kMaxToplevelSourceSize = 10 * 1024; | 207 const int kMaxToplevelSourceSize = 10 * 1024; |
205 if (shared->is_toplevel() && | 208 if (shared->is_toplevel() && |
206 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { | 209 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
258 Optimize(function, "small function"); | 261 Optimize(function, "small function"); |
259 } else { | 262 } else { |
260 shared_code->set_profiler_ticks(ticks + 1); | 263 shared_code->set_profiler_ticks(ticks + 1); |
261 } | 264 } |
262 } else { | 265 } else { |
263 shared_code->set_profiler_ticks(ticks + 1); | 266 shared_code->set_profiler_ticks(ticks + 1); |
264 } | 267 } |
265 } | 268 } |
266 | 269 |
267 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, | 270 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, |
268 bool frame_optimized) { | 271 JavaScriptFrame* frame) { |
269 if (function->IsInOptimizationQueue()) return; | 272 if (function->IsInOptimizationQueue()) return; |
270 | 273 |
271 SharedFunctionInfo* shared = function->shared(); | 274 SharedFunctionInfo* shared = function->shared(); |
272 int ticks = shared->profiler_ticks(); | 275 int ticks = shared->profiler_ticks(); |
273 | 276 |
274 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller | 277 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
275 // than kMaxToplevelSourceSize. | 278 // than kMaxToplevelSourceSize. |
276 | 279 |
277 if (FLAG_ignition_osr && FLAG_always_osr) { | 280 if (FLAG_always_osr) { |
278 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); | 281 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); |
279 // Fall through and do a normal baseline compile as well. | 282 // Fall through and do a normal baseline compile as well. |
280 } else if (!frame_optimized && | 283 } else if (!frame->is_optimized() && |
281 (function->IsMarkedForBaseline() || | 284 (function->IsMarkedForBaseline() || |
282 function->IsMarkedForOptimization() || | 285 function->IsMarkedForOptimization() || |
283 function->IsMarkedForConcurrentOptimization() || | 286 function->IsMarkedForConcurrentOptimization() || |
284 function->IsOptimized())) { | 287 function->IsOptimized())) { |
285 // Attempt OSR if we are still running interpreted code even though the | 288 // Attempt OSR if we are still running interpreted code even though the |
286 // the function has long been marked or even already been optimized. | 289 // the function has long been marked or even already been optimized. |
287 int64_t allowance = | 290 int64_t allowance = |
288 kOSRCodeSizeAllowanceBaseIgnition + | 291 kOSRCodeSizeAllowanceBaseIgnition + |
289 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; | 292 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; |
290 if (FLAG_ignition_osr && shared->HasBytecodeArray() && | 293 if (shared->bytecode_array()->Size() <= allowance) { |
291 shared->bytecode_array()->Size() <= allowance) { | 294 AttemptOnStackReplacement(frame); |
292 AttemptOnStackReplacement(function); | |
293 } | 295 } |
294 return; | 296 return; |
295 } | 297 } |
296 | 298 |
297 if (shared->optimization_disabled() && | 299 if (shared->optimization_disabled() && |
298 shared->disable_optimization_reason() == kOptimizationDisabledForTest) { | 300 shared->disable_optimization_reason() == kOptimizationDisabledForTest) { |
299 // Don't baseline functions which have been marked by NeverOptimizeFunction | 301 // Don't baseline functions which have been marked by NeverOptimizeFunction |
300 // in a test. | 302 // in a test. |
301 return; | 303 return; |
302 } | 304 } |
303 | 305 |
304 if (ticks >= kProfilerTicksBeforeBaseline) { | 306 if (ticks >= kProfilerTicksBeforeBaseline) { |
305 Baseline(function, "hot enough for baseline"); | 307 Baseline(function, "hot enough for baseline"); |
306 } | 308 } |
307 } | 309 } |
308 | 310 |
309 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, | 311 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
310 bool frame_optimized) { | 312 JavaScriptFrame* frame) { |
311 if (function->IsInOptimizationQueue()) return; | 313 if (function->IsInOptimizationQueue()) return; |
312 | 314 |
313 SharedFunctionInfo* shared = function->shared(); | 315 SharedFunctionInfo* shared = function->shared(); |
314 int ticks = shared->profiler_ticks(); | 316 int ticks = shared->profiler_ticks(); |
315 | 317 |
316 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller | 318 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
317 // than kMaxToplevelSourceSize. | 319 // than kMaxToplevelSourceSize. |
318 | 320 |
319 if (FLAG_ignition_osr && FLAG_always_osr) { | 321 if (FLAG_always_osr) { |
320 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); | 322 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); |
321 // Fall through and do a normal optimized compile as well. | 323 // Fall through and do a normal optimized compile as well. |
322 } else if (!frame_optimized && | 324 } else if (!frame->is_optimized() && |
323 (function->IsMarkedForBaseline() || | 325 (function->IsMarkedForBaseline() || |
324 function->IsMarkedForOptimization() || | 326 function->IsMarkedForOptimization() || |
325 function->IsMarkedForConcurrentOptimization() || | 327 function->IsMarkedForConcurrentOptimization() || |
326 function->IsOptimized())) { | 328 function->IsOptimized())) { |
327 // Attempt OSR if we are still running interpreted code even though the | 329 // Attempt OSR if we are still running interpreted code even though the |
328 // the function has long been marked or even already been optimized. | 330 // the function has long been marked or even already been optimized. |
329 int64_t allowance = | 331 int64_t allowance = |
330 kOSRCodeSizeAllowanceBaseIgnition + | 332 kOSRCodeSizeAllowanceBaseIgnition + |
331 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; | 333 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; |
332 if (FLAG_ignition_osr && shared->HasBytecodeArray() && | 334 if (shared->bytecode_array()->Size() <= allowance) { |
333 shared->bytecode_array()->Size() <= allowance) { | 335 AttemptOnStackReplacement(frame); |
334 AttemptOnStackReplacement(function); | |
335 } | 336 } |
336 return; | 337 return; |
337 } | 338 } |
338 | 339 |
339 if (shared->optimization_disabled()) { | 340 if (shared->optimization_disabled()) { |
340 if (shared->deopt_count() >= FLAG_max_opt_count) { | 341 if (shared->deopt_count() >= FLAG_max_opt_count) { |
341 // If optimization was disabled due to many deoptimizations, | 342 // If optimization was disabled due to many deoptimizations, |
342 // then check if the function is hot and try to reenable optimization. | 343 // then check if the function is hot and try to reenable optimization. |
343 if (ticks >= kProfilerTicksBeforeReenablingOptimization) { | 344 if (ticks >= kProfilerTicksBeforeReenablingOptimization) { |
344 shared->set_profiler_ticks(0); | 345 shared->set_profiler_ticks(0); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
398 int ticks = shared_function_info->profiler_ticks(); | 399 int ticks = shared_function_info->profiler_ticks(); |
399 if (ticks < Smi::kMaxValue) { | 400 if (ticks < Smi::kMaxValue) { |
400 shared_function_info->set_profiler_ticks(ticks + 1); | 401 shared_function_info->set_profiler_ticks(ticks + 1); |
401 } | 402 } |
402 } | 403 } |
403 | 404 |
404 Compiler::CompilationTier next_tier = | 405 Compiler::CompilationTier next_tier = |
405 Compiler::NextCompilationTier(function); | 406 Compiler::NextCompilationTier(function); |
406 if (function->shared()->HasBytecodeArray()) { | 407 if (function->shared()->HasBytecodeArray()) { |
407 if (next_tier == Compiler::BASELINE) { | 408 if (next_tier == Compiler::BASELINE) { |
408 MaybeBaselineIgnition(function, frame->is_optimized()); | 409 MaybeBaselineIgnition(function, frame); |
409 } else { | 410 } else { |
410 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); | 411 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); |
411 MaybeOptimizeIgnition(function, frame->is_optimized()); | 412 MaybeOptimizeIgnition(function, frame); |
412 } | 413 } |
413 } else { | 414 } else { |
414 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); | 415 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); |
415 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); | 416 MaybeOptimizeFullCodegen(function, frame, frame_count); |
416 } | 417 } |
417 } | 418 } |
418 any_ic_changed_ = false; | 419 any_ic_changed_ = false; |
419 } | 420 } |
420 | 421 |
421 | 422 |
422 } // namespace internal | 423 } // namespace internal |
423 } // namespace v8 | 424 } // namespace v8 |
OLD | NEW |