OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/runtime-profiler.h" | 7 #include "src/runtime-profiler.h" |
8 | 8 |
9 #include "src/assembler.h" | 9 #include "src/assembler.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
103 } | 103 } |
104 ASSERT(!function->IsInOptimizationQueue()); | 104 ASSERT(!function->IsInOptimizationQueue()); |
105 function->MarkForConcurrentOptimization(); | 105 function->MarkForConcurrentOptimization(); |
106 } else { | 106 } else { |
107 // The next call to the function will trigger optimization. | 107 // The next call to the function will trigger optimization. |
108 function->MarkForOptimization(); | 108 function->MarkForOptimization(); |
109 } | 109 } |
110 } | 110 } |
111 | 111 |
112 | 112 |
113 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { | 113 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, |
114 int loop_nesting_levels) { | |
115 SharedFunctionInfo* shared = function->shared(); | |
114 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 116 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
115 // Debug::has_break_points(). | 117 // Debug::has_break_points(). |
116 if (!FLAG_use_osr || | 118 if (!FLAG_use_osr || |
117 isolate_->DebuggerHasBreakPoints() || | 119 isolate_->DebuggerHasBreakPoints() || |
118 function->IsBuiltin()) { | 120 function->IsBuiltin()) { |
119 return; | 121 return; |
120 } | 122 } |
121 | 123 |
122 SharedFunctionInfo* shared = function->shared(); | 124 |
Jakob Kummerow
2014/06/23 15:29:23
nit: don't need this empty line (why did it move a
| |
123 // If the code is not optimizable, don't try OSR. | 125 // If the code is not optimizable, don't try OSR. |
124 if (!shared->code()->optimizable()) return; | 126 if (!shared->code()->optimizable()) return; |
125 | 127 |
126 // We are not prepared to do OSR for a function that already has an | 128 // We are not prepared to do OSR for a function that already has an |
127 // allocated arguments object. The optimized code would bypass it for | 129 // allocated arguments object. The optimized code would bypass it for |
128 // arguments accesses, which is unsound. Don't try OSR. | 130 // arguments accesses, which is unsound. Don't try OSR. |
129 if (shared->uses_arguments()) return; | 131 if (shared->uses_arguments()) return; |
130 | 132 |
131 // We're using on-stack replacement: patch the unoptimized code so that | 133 // We're using on-stack replacement: patch the unoptimized code so that |
132 // any back edge in any unoptimized frame will trigger on-stack | 134 // any back edge in any unoptimized frame will trigger on-stack |
133 // replacement for that frame. | 135 // replacement for that frame. |
134 if (FLAG_trace_osr) { | 136 if (FLAG_trace_osr) { |
135 PrintF("[OSR - patching back edges in "); | 137 PrintF("[OSR - patching back edges in "); |
136 function->PrintName(); | 138 function->PrintName(); |
137 PrintF("]\n"); | 139 PrintF("]\n"); |
138 } | 140 } |
139 | 141 |
140 BackEdgeTable::Patch(isolate_, shared->code()); | 142 for (int i = 0; i < loop_nesting_levels; i++) { |
143 BackEdgeTable::Patch(isolate_, shared->code()); | |
144 } | |
141 } | 145 } |
142 | 146 |
143 | 147 |
144 void RuntimeProfiler::OptimizeNow() { | 148 void RuntimeProfiler::OptimizeNow() { |
145 HandleScope scope(isolate_); | 149 HandleScope scope(isolate_); |
146 | 150 |
147 if (isolate_->DebuggerHasBreakPoints()) return; | 151 if (isolate_->DebuggerHasBreakPoints()) return; |
148 | 152 |
149 DisallowHeapAllocation no_gc; | 153 DisallowHeapAllocation no_gc; |
150 | 154 |
(...skipping 17 matching lines...) Expand all Loading... | |
168 SharedFunctionInfo* shared_function_info = functions[i]->shared(); | 172 SharedFunctionInfo* shared_function_info = functions[i]->shared(); |
169 int ticks = shared_function_info->profiler_ticks(); | 173 int ticks = shared_function_info->profiler_ticks(); |
170 if (ticks < Smi::kMaxValue) { | 174 if (ticks < Smi::kMaxValue) { |
171 shared_function_info->set_profiler_ticks(ticks + 1); | 175 shared_function_info->set_profiler_ticks(ticks + 1); |
172 } | 176 } |
173 } | 177 } |
174 | 178 |
175 if (shared_code->kind() != Code::FUNCTION) continue; | 179 if (shared_code->kind() != Code::FUNCTION) continue; |
176 if (function->IsInOptimizationQueue()) continue; | 180 if (function->IsInOptimizationQueue()) continue; |
177 | 181 |
178 if (FLAG_always_osr && | 182 if (FLAG_always_osr) { |
179 shared_code->allow_osr_at_loop_nesting_level() == 0) { | 183 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker); |
180 // Testing mode: always try an OSR compile for every function. | |
181 for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { | |
182 // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop. | |
183 shared_code->set_allow_osr_at_loop_nesting_level(i); | |
184 AttemptOnStackReplacement(function); | |
185 } | |
186 // Fall through and do a normal optimized compile as well. | 184 // Fall through and do a normal optimized compile as well. |
187 } else if (!frame->is_optimized() && | 185 } else if (!frame->is_optimized() && |
188 (function->IsMarkedForOptimization() || | 186 (function->IsMarkedForOptimization() || |
189 function->IsMarkedForConcurrentOptimization() || | 187 function->IsMarkedForConcurrentOptimization() || |
190 function->IsOptimized())) { | 188 function->IsOptimized())) { |
191 // Attempt OSR if we are still running unoptimized code even though the | 189 // Attempt OSR if we are still running unoptimized code even though the |
192 // the function has long been marked or even already been optimized. | 190 // the function has long been marked or even already been optimized. |
193 int ticks = shared_code->profiler_ticks(); | 191 int ticks = shared_code->profiler_ticks(); |
194 int allowance = kOSRCodeSizeAllowanceBase + | 192 int allowance = kOSRCodeSizeAllowanceBase + |
195 ticks * kOSRCodeSizeAllowancePerTick; | 193 ticks * kOSRCodeSizeAllowancePerTick; |
196 if (shared_code->CodeSize() > allowance) { | 194 if (shared_code->CodeSize() > allowance) { |
197 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); | 195 if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1); |
198 } else { | 196 } else { |
199 int nesting = shared_code->allow_osr_at_loop_nesting_level(); | 197 AttemptOnStackReplacement(function); |
200 if (nesting < Code::kMaxLoopNestingMarker) { | |
201 int new_nesting = nesting + 1; | |
202 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting); | |
203 AttemptOnStackReplacement(function); | |
204 } | |
205 } | 198 } |
206 continue; | 199 continue; |
207 } | 200 } |
208 | 201 |
209 // Only record top-level code on top of the execution stack and | 202 // Only record top-level code on top of the execution stack and |
210 // avoid optimizing excessively large scripts since top-level code | 203 // avoid optimizing excessively large scripts since top-level code |
211 // will be executed only once. | 204 // will be executed only once. |
212 const int kMaxToplevelSourceSize = 10 * 1024; | 205 const int kMaxToplevelSourceSize = 10 * 1024; |
213 if (shared->is_toplevel() && | 206 if (shared->is_toplevel() && |
214 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { | 207 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) { |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
259 Optimize(function, "small function"); | 252 Optimize(function, "small function"); |
260 } else { | 253 } else { |
261 shared_code->set_profiler_ticks(ticks + 1); | 254 shared_code->set_profiler_ticks(ticks + 1); |
262 } | 255 } |
263 } | 256 } |
264 any_ic_changed_ = false; | 257 any_ic_changed_ = false; |
265 } | 258 } |
266 | 259 |
267 | 260 |
268 } } // namespace v8::internal | 261 } } // namespace v8::internal |
OLD | NEW |