Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/runtime-profiler.cc

Issue 1707693003: [Interpreter] Enable runtime profiler support for Ignition. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Really fix --debug-code Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/runtime-profiler.h ('k') | src/x64/builtins-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/runtime-profiler.h" 5 #include "src/runtime-profiler.h"
6 6
7 #include "src/assembler.h" 7 #include "src/assembler.h"
8 #include "src/ast/scopeinfo.h" 8 #include "src/ast/scopeinfo.h"
9 #include "src/base/platform/platform.h" 9 #include "src/base/platform/platform.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
51 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) 51 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
52 : isolate_(isolate), 52 : isolate_(isolate),
53 any_ic_changed_(false) { 53 any_ic_changed_(false) {
54 } 54 }
55 55
56 56
57 static void GetICCounts(SharedFunctionInfo* shared, 57 static void GetICCounts(SharedFunctionInfo* shared,
58 int* ic_with_type_info_count, int* ic_generic_count, 58 int* ic_with_type_info_count, int* ic_generic_count,
59 int* ic_total_count, int* type_info_percentage, 59 int* ic_total_count, int* type_info_percentage,
60 int* generic_percentage) { 60 int* generic_percentage) {
61 Code* shared_code = shared->code();
62 *ic_total_count = 0; 61 *ic_total_count = 0;
63 *ic_generic_count = 0; 62 *ic_generic_count = 0;
64 *ic_with_type_info_count = 0; 63 *ic_with_type_info_count = 0;
65 Object* raw_info = shared_code->type_feedback_info(); 64 if (shared->code()->kind() == Code::FUNCTION) {
66 if (raw_info->IsTypeFeedbackInfo()) { 65 Code* shared_code = shared->code();
67 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); 66 Object* raw_info = shared_code->type_feedback_info();
68 *ic_with_type_info_count = info->ic_with_type_info_count(); 67 if (raw_info->IsTypeFeedbackInfo()) {
69 *ic_generic_count = info->ic_generic_count(); 68 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
70 *ic_total_count = info->ic_total_count(); 69 *ic_with_type_info_count = info->ic_with_type_info_count();
70 *ic_generic_count = info->ic_generic_count();
71 *ic_total_count = info->ic_total_count();
72 }
71 } 73 }
72 74
73 // Harvest vector-ics as well 75 // Harvest vector-ics as well
74 TypeFeedbackVector* vector = shared->feedback_vector(); 76 TypeFeedbackVector* vector = shared->feedback_vector();
75 int with = 0, gen = 0; 77 int with = 0, gen = 0;
76 vector->ComputeCounts(&with, &gen); 78 vector->ComputeCounts(&with, &gen);
77 *ic_with_type_info_count += with; 79 *ic_with_type_info_count += with;
78 *ic_generic_count += gen; 80 *ic_generic_count += gen;
79 81
80 if (*ic_total_count > 0) { 82 if (*ic_total_count > 0) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 PrintF("[OSR - patching back edges in "); 131 PrintF("[OSR - patching back edges in ");
130 function->PrintName(); 132 function->PrintName();
131 PrintF("]\n"); 133 PrintF("]\n");
132 } 134 }
133 135
134 for (int i = 0; i < loop_nesting_levels; i++) { 136 for (int i = 0; i < loop_nesting_levels; i++) {
135 BackEdgeTable::Patch(isolate_, shared->code()); 137 BackEdgeTable::Patch(isolate_, shared->code());
136 } 138 }
137 } 139 }
138 140
141 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
142 int frame_count,
143 bool frame_optimized) {
144 SharedFunctionInfo* shared = function->shared();
145 Code* shared_code = shared->code();
146 if (shared_code->kind() != Code::FUNCTION) return;
147 if (function->IsInOptimizationQueue()) return;
139 148
140 void RuntimeProfiler::OptimizeNow() { 149 if (FLAG_always_osr) {
150 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker);
151 // Fall through and do a normal optimized compile as well.
152 } else if (!frame_optimized &&
153 (function->IsMarkedForOptimization() ||
154 function->IsMarkedForConcurrentOptimization() ||
155 function->IsOptimized())) {
156 // Attempt OSR if we are still running unoptimized code even though the
157 // the function has long been marked or even already been optimized.
158 int ticks = shared_code->profiler_ticks();
159 int64_t allowance =
160 kOSRCodeSizeAllowanceBase +
161 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
162 if (shared_code->CodeSize() > allowance &&
163 ticks < Code::ProfilerTicksField::kMax) {
164 shared_code->set_profiler_ticks(ticks + 1);
165 } else {
166 AttemptOnStackReplacement(function);
167 }
168 return;
169 }
170
171 // Only record top-level code on top of the execution stack and
172 // avoid optimizing excessively large scripts since top-level code
173 // will be executed only once.
174 const int kMaxToplevelSourceSize = 10 * 1024;
175 if (shared->is_toplevel() &&
176 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
177 return;
178 }
179
180 // Do not record non-optimizable functions.
181 if (shared->optimization_disabled()) {
182 if (shared->deopt_count() >= FLAG_max_opt_count) {
183 // If optimization was disabled due to many deoptimizations,
184 // then check if the function is hot and try to reenable optimization.
185 int ticks = shared_code->profiler_ticks();
186 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
187 shared_code->set_profiler_ticks(0);
188 shared->TryReenableOptimization();
189 } else {
190 shared_code->set_profiler_ticks(ticks + 1);
191 }
192 }
193 return;
194 }
195 if (function->IsOptimized()) return;
196
197 int ticks = shared_code->profiler_ticks();
198
199 if (ticks >= kProfilerTicksBeforeOptimization) {
200 int typeinfo, generic, total, type_percentage, generic_percentage;
201 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
202 &generic_percentage);
203 if (type_percentage >= FLAG_type_info_threshold &&
204 generic_percentage <= FLAG_generic_ic_threshold) {
205 // If this particular function hasn't had any ICs patched for enough
206 // ticks, optimize it now.
207 Optimize(function, "hot and stable");
208 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
209 Optimize(function, "not much type info but very hot");
210 } else {
211 shared_code->set_profiler_ticks(ticks + 1);
212 if (FLAG_trace_opt_verbose) {
213 PrintF("[not yet optimizing ");
214 function->PrintName();
215 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
216 type_percentage);
217 }
218 }
219 } else if (!any_ic_changed_ &&
220 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
221 // If no IC was patched since the last tick and this function is very
222 // small, optimistically optimize it now.
223 int typeinfo, generic, total, type_percentage, generic_percentage;
224 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
225 &generic_percentage);
226 if (type_percentage >= FLAG_type_info_threshold &&
227 generic_percentage <= FLAG_generic_ic_threshold) {
228 Optimize(function, "small function");
229 } else {
230 shared_code->set_profiler_ticks(ticks + 1);
231 }
232 } else {
233 shared_code->set_profiler_ticks(ticks + 1);
234 }
235 }
236
237 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
238 bool frame_optimized) {
239 if (function->IsInOptimizationQueue()) return;
240
241 SharedFunctionInfo* shared = function->shared();
242 int ticks = shared->profiler_ticks();
243
244 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
245 // than kMaxToplevelSourceSize.
246 // TODO(rmcilroy): Consider whether we should optimize small functions when
247 // they are first seen on the stack (e.g., kMaxSizeEarlyOpt).
248
249 if (!frame_optimized && (function->IsMarkedForOptimization() ||
250 function->IsMarkedForConcurrentOptimization() ||
251 function->IsOptimized())) {
252 // TODO(rmcilroy): Support OSR in these cases.
253
254 return;
255 }
256
257 // Do not optimize non-optimizable functions.
258 if (shared->optimization_disabled()) {
259 if (shared->deopt_count() >= FLAG_max_opt_count) {
260 // If optimization was disabled due to many deoptimizations,
261 // then check if the function is hot and try to reenable optimization.
262 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
263 shared->set_profiler_ticks(0);
264 shared->TryReenableOptimization();
265 }
266 }
267 return;
268 }
269
270 if (function->IsOptimized()) return;
271
272 if (ticks >= kProfilerTicksBeforeOptimization) {
273 int typeinfo, generic, total, type_percentage, generic_percentage;
274 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
275 &generic_percentage);
276 if (type_percentage >= FLAG_type_info_threshold &&
277 generic_percentage <= FLAG_generic_ic_threshold) {
278 // If this particular function hasn't had any ICs patched for enough
279 // ticks, optimize it now.
280 Optimize(function, "hot and stable");
281 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
282 Optimize(function, "not much type info but very hot");
283 } else {
284 if (FLAG_trace_opt_verbose) {
285 PrintF("[not yet optimizing ");
286 function->PrintName();
287 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
288 type_percentage);
289 }
290 }
291 }
292 }
293
294 void RuntimeProfiler::MarkCandidatesForOptimization() {
141 HandleScope scope(isolate_); 295 HandleScope scope(isolate_);
142 296
143 if (!isolate_->use_crankshaft()) return; 297 if (!isolate_->use_crankshaft()) return;
144 298
145 DisallowHeapAllocation no_gc; 299 DisallowHeapAllocation no_gc;
146 300
147 // Run through the JavaScript frames and collect them. If we already 301 // Run through the JavaScript frames and collect them. If we already
148 // have a sample of the function, we mark it for optimizations 302 // have a sample of the function, we mark it for optimizations
149 // (eagerly or lazily). 303 // (eagerly or lazily).
150 int frame_count = 0; 304 int frame_count = 0;
151 int frame_count_limit = FLAG_frame_count; 305 int frame_count_limit = FLAG_frame_count;
152 for (JavaScriptFrameIterator it(isolate_); 306 for (JavaScriptFrameIterator it(isolate_);
153 frame_count++ < frame_count_limit && !it.done(); 307 frame_count++ < frame_count_limit && !it.done();
154 it.Advance()) { 308 it.Advance()) {
155 JavaScriptFrame* frame = it.frame(); 309 JavaScriptFrame* frame = it.frame();
156 JSFunction* function = frame->function(); 310 JSFunction* function = frame->function();
157 311
158 SharedFunctionInfo* shared = function->shared();
159 Code* shared_code = shared->code();
160
161 List<JSFunction*> functions(4); 312 List<JSFunction*> functions(4);
162 frame->GetFunctions(&functions); 313 frame->GetFunctions(&functions);
163 for (int i = functions.length(); --i >= 0; ) { 314 for (int i = functions.length(); --i >= 0; ) {
164 SharedFunctionInfo* shared_function_info = functions[i]->shared(); 315 SharedFunctionInfo* shared_function_info = functions[i]->shared();
165 int ticks = shared_function_info->profiler_ticks(); 316 int ticks = shared_function_info->profiler_ticks();
166 if (ticks < Smi::kMaxValue) { 317 if (ticks < Smi::kMaxValue) {
167 shared_function_info->set_profiler_ticks(ticks + 1); 318 shared_function_info->set_profiler_ticks(ticks + 1);
168 } 319 }
169 } 320 }
170 321
171 if (shared_code->kind() != Code::FUNCTION) continue; 322 if (FLAG_ignition) {
172 if (function->IsInOptimizationQueue()) continue; 323 MaybeOptimizeIgnition(function, frame->is_optimized());
173
174 if (FLAG_always_osr) {
175 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker);
176 // Fall through and do a normal optimized compile as well.
177 } else if (!frame->is_optimized() &&
178 (function->IsMarkedForOptimization() ||
179 function->IsMarkedForConcurrentOptimization() ||
180 function->IsOptimized())) {
181 // Attempt OSR if we are still running unoptimized code even though the
182 // the function has long been marked or even already been optimized.
183 int ticks = shared_code->profiler_ticks();
184 int64_t allowance =
185 kOSRCodeSizeAllowanceBase +
186 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
187 if (shared_code->CodeSize() > allowance &&
188 ticks < Code::ProfilerTicksField::kMax) {
189 shared_code->set_profiler_ticks(ticks + 1);
190 } else {
191 AttemptOnStackReplacement(function);
192 }
193 continue;
194 }
195
196 // Only record top-level code on top of the execution stack and
197 // avoid optimizing excessively large scripts since top-level code
198 // will be executed only once.
199 const int kMaxToplevelSourceSize = 10 * 1024;
200 if (shared->is_toplevel() &&
201 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
202 continue;
203 }
204
205 // Do not record non-optimizable functions.
206 if (shared->optimization_disabled()) {
207 if (shared->deopt_count() >= FLAG_max_opt_count) {
208 // If optimization was disabled due to many deoptimizations,
209 // then check if the function is hot and try to reenable optimization.
210 int ticks = shared_code->profiler_ticks();
211 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
212 shared_code->set_profiler_ticks(0);
213 shared->TryReenableOptimization();
214 } else {
215 shared_code->set_profiler_ticks(ticks + 1);
216 }
217 }
218 continue;
219 }
220 if (function->IsOptimized()) continue;
221
222 int ticks = shared_code->profiler_ticks();
223
224 if (ticks >= kProfilerTicksBeforeOptimization) {
225 int typeinfo, generic, total, type_percentage, generic_percentage;
226 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
227 &generic_percentage);
228 if (type_percentage >= FLAG_type_info_threshold &&
229 generic_percentage <= FLAG_generic_ic_threshold) {
230 // If this particular function hasn't had any ICs patched for enough
231 // ticks, optimize it now.
232 Optimize(function, "hot and stable");
233 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
234 Optimize(function, "not much type info but very hot");
235 } else {
236 shared_code->set_profiler_ticks(ticks + 1);
237 if (FLAG_trace_opt_verbose) {
238 PrintF("[not yet optimizing ");
239 function->PrintName();
240 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
241 type_percentage);
242 }
243 }
244 } else if (!any_ic_changed_ &&
245 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
246 // If no IC was patched since the last tick and this function is very
247 // small, optimistically optimize it now.
248 int typeinfo, generic, total, type_percentage, generic_percentage;
249 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
250 &generic_percentage);
251 if (type_percentage >= FLAG_type_info_threshold &&
252 generic_percentage <= FLAG_generic_ic_threshold) {
253 Optimize(function, "small function");
254 } else {
255 shared_code->set_profiler_ticks(ticks + 1);
256 }
257 } else { 324 } else {
258 shared_code->set_profiler_ticks(ticks + 1); 325 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized());
259 } 326 }
260 } 327 }
261 any_ic_changed_ = false; 328 any_ic_changed_ = false;
262 } 329 }
263 330
264 331
265 } // namespace internal 332 } // namespace internal
266 } // namespace v8 333 } // namespace v8
OLDNEW
« no previous file with comments | « src/runtime-profiler.h ('k') | src/x64/builtins-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698