Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1105)

Side by Side Diff: src/runtime-profiler.cc

Issue 2360913003: [interpreter] Compute and use type info percentage (Closed)
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/runtime-profiler.h" 5 #include "src/runtime-profiler.h"
6 6
7 #include "src/assembler.h" 7 #include "src/assembler.h"
8 #include "src/base/platform/platform.h" 8 #include "src/base/platform/platform.h"
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 static const int kOSRCodeSizeAllowancePerTick = 48 static const int kOSRCodeSizeAllowancePerTick =
49 4 * FullCodeGenerator::kCodeSizeMultiplier; 49 4 * FullCodeGenerator::kCodeSizeMultiplier;
50 static const int kOSRCodeSizeAllowancePerTickIgnition = 50 static const int kOSRCodeSizeAllowancePerTickIgnition =
51 4 * interpreter::Interpreter::kCodeSizeMultiplier; 51 4 * interpreter::Interpreter::kCodeSizeMultiplier;
52 52
53 // Maximum size in bytes of generated code for a function to be optimized 53 // Maximum size in bytes of generated code for a function to be optimized
54 // the very first time it is seen on the stack. 54 // the very first time it is seen on the stack.
55 static const int kMaxSizeEarlyOpt = 55 static const int kMaxSizeEarlyOpt =
56 5 * FullCodeGenerator::kCodeSizeMultiplier; 56 5 * FullCodeGenerator::kCodeSizeMultiplier;
57 57
58 #define OPTIMIZATION_REASON_LIST(V) \
59 V(DoNotOptimize, "do not optimize") \
60 V(HotAndStable, "hot and stable") \
61 V(HotEnoughForBaseline, "hot enough for baseline") \
62 V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
63 V(SmallFunction, "small function")
64
65 enum class OptimizationReason : uint8_t {
66 #define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
67 OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
68 #undef OPTIMIZATION_REASON_CONSTANTS
69 };
70
71 char const* OptimizationReasonToString(OptimizationReason reason) {
72 static char const* reasons[] = {
73 #define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
74 OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
75 #undef OPTIMIZATION_REASON_TEXTS
76 };
77 size_t const index = static_cast<size_t>(reason);
78 DCHECK_LT(index, arraysize(reasons));
79 return reasons[index];
80 }
81
82 std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
83 switch (reason) {
rmcilroy 2016/09/23 08:10:34 Just call OptimizationReasonToString rather than a
klaasb 2016/09/23 08:31:39 Done.
84 #define OPTIMIZATION_REASON_NAMES(Constant, message) \
85 case OptimizationReason::k##Constant: \
86 return os << #Constant;
87 OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_NAMES)
88 #undef OPTIMIZATION_REASON_NAMES
89 }
90 UNREACHABLE();
91 return os;
92 }
58 93
59 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) 94 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
60 : isolate_(isolate), 95 : isolate_(isolate),
61 any_ic_changed_(false) { 96 any_ic_changed_(false) {
62 } 97 }
63 98
64 static void GetICCounts(JSFunction* function, int* ic_with_type_info_count, 99 static void GetICCounts(JSFunction* function, int* ic_with_type_info_count,
65 int* ic_generic_count, int* ic_total_count, 100 int* ic_generic_count, int* ic_total_count,
66 int* type_info_percentage, int* generic_percentage) { 101 int* type_info_percentage, int* generic_percentage) {
67 *ic_total_count = 0; 102 *ic_total_count = 0;
68 *ic_generic_count = 0; 103 *ic_generic_count = 0;
69 *ic_with_type_info_count = 0; 104 *ic_with_type_info_count = 0;
70 if (function->code()->kind() == Code::FUNCTION) { 105 if (function->code()->kind() == Code::FUNCTION) {
71 Code* shared_code = function->shared()->code(); 106 Code* shared_code = function->shared()->code();
72 Object* raw_info = shared_code->type_feedback_info(); 107 Object* raw_info = shared_code->type_feedback_info();
73 if (raw_info->IsTypeFeedbackInfo()) { 108 if (raw_info->IsTypeFeedbackInfo()) {
74 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); 109 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
75 *ic_with_type_info_count = info->ic_with_type_info_count(); 110 *ic_with_type_info_count = info->ic_with_type_info_count();
76 *ic_generic_count = info->ic_generic_count(); 111 *ic_generic_count = info->ic_generic_count();
77 *ic_total_count = info->ic_total_count(); 112 *ic_total_count = info->ic_total_count();
78 } 113 }
79 } 114 }
80 115
81 // Harvest vector-ics as well 116 // Harvest vector-ics as well
82 TypeFeedbackVector* vector = function->feedback_vector(); 117 TypeFeedbackVector* vector = function->feedback_vector();
83 int with = 0, gen = 0; 118 int with = 0, gen = 0, interpreter_total = 0;
rmcilroy 2016/09/23 08:10:34 nit - type_vector_ic_count
klaasb 2016/09/23 08:31:40 Done.
84 const bool is_interpreted = 119 const bool is_interpreted =
85 function->shared()->code()->is_interpreter_trampoline_builtin(); 120 function->shared()->code()->is_interpreter_trampoline_builtin();
86 121
87 vector->ComputeCounts(&with, &gen, is_interpreted); 122 vector->ComputeCounts(&with, &gen, &interpreter_total, is_interpreted);
123 if (is_interpreted) {
124 *ic_total_count = interpreter_total;
rmcilroy 2016/09/23 08:10:34 DCHECK ic_total_count is zero beforehand
klaasb 2016/09/23 08:31:40 Done.
125 }
88 *ic_with_type_info_count += with; 126 *ic_with_type_info_count += with;
89 *ic_generic_count += gen; 127 *ic_generic_count += gen;
90 128
91 if (*ic_total_count > 0) { 129 if (*ic_total_count > 0) {
92 *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count; 130 *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
93 *generic_percentage = 100 * *ic_generic_count / *ic_total_count; 131 *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
94 } else { 132 } else {
95 *type_info_percentage = 100; // Compared against lower bound. 133 *type_info_percentage = 100; // Compared against lower bound.
96 *generic_percentage = 0; // Compared against upper bound. 134 *generic_percentage = 0; // Compared against upper bound.
97 } 135 }
(...skipping 11 matching lines...) Expand all
109 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage, 147 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
110 &generic_percentage); 148 &generic_percentage);
111 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, 149 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
112 type_percentage); 150 type_percentage);
113 PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage); 151 PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
114 } 152 }
115 PrintF("]\n"); 153 PrintF("]\n");
116 } 154 }
117 } 155 }
118 156
119 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { 157 void RuntimeProfiler::Optimize(JSFunction* function,
120 TraceRecompile(function, reason, "optimized"); 158 OptimizationReason reason) {
159 DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
160 TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
121 function->AttemptConcurrentOptimization(); 161 function->AttemptConcurrentOptimization();
122 } 162 }
123 163
124 void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { 164 void RuntimeProfiler::Baseline(JSFunction* function,
125 TraceRecompile(function, reason, "baseline"); 165 OptimizationReason reason) {
166 DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
167 TraceRecompile(function, OptimizationReasonToString(reason), "baseline");
126 168
127 // TODO(4280): Fix this to check function is compiled for the interpreter 169 // TODO(4280): Fix this to check function is compiled for the interpreter
128 // once we have a standard way to check that. For now function will only 170 // once we have a standard way to check that. For now function will only
129 // have a bytecode array if compiled for the interpreter. 171 // have a bytecode array if compiled for the interpreter.
130 DCHECK(function->shared()->HasBytecodeArray()); 172 DCHECK(function->shared()->HasBytecodeArray());
131 function->MarkForBaseline(); 173 function->MarkForBaseline();
132 } 174 }
133 175
134 void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame, 176 void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
135 int loop_nesting_levels) { 177 int loop_nesting_levels) {
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
234 int ticks = shared_code->profiler_ticks(); 276 int ticks = shared_code->profiler_ticks();
235 277
236 if (ticks >= kProfilerTicksBeforeOptimization) { 278 if (ticks >= kProfilerTicksBeforeOptimization) {
237 int typeinfo, generic, total, type_percentage, generic_percentage; 279 int typeinfo, generic, total, type_percentage, generic_percentage;
238 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage, 280 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
239 &generic_percentage); 281 &generic_percentage);
240 if (type_percentage >= FLAG_type_info_threshold && 282 if (type_percentage >= FLAG_type_info_threshold &&
241 generic_percentage <= FLAG_generic_ic_threshold) { 283 generic_percentage <= FLAG_generic_ic_threshold) {
242 // If this particular function hasn't had any ICs patched for enough 284 // If this particular function hasn't had any ICs patched for enough
243 // ticks, optimize it now. 285 // ticks, optimize it now.
244 Optimize(function, "hot and stable"); 286 Optimize(function, OptimizationReason::kHotAndStable);
245 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { 287 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
246 Optimize(function, "not much type info but very hot"); 288 Optimize(function, OptimizationReason::kHotWithoutMuchTypeInfo);
247 } else { 289 } else {
248 shared_code->set_profiler_ticks(ticks + 1); 290 shared_code->set_profiler_ticks(ticks + 1);
249 if (FLAG_trace_opt_verbose) { 291 if (FLAG_trace_opt_verbose) {
250 PrintF("[not yet optimizing "); 292 PrintF("[not yet optimizing ");
251 function->PrintName(); 293 function->PrintName();
252 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total, 294 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
253 type_percentage); 295 type_percentage);
254 } 296 }
255 } 297 }
256 } else if (!any_ic_changed_ && 298 } else if (!any_ic_changed_ &&
257 shared_code->instruction_size() < kMaxSizeEarlyOpt) { 299 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
258 // If no IC was patched since the last tick and this function is very 300 // If no IC was patched since the last tick and this function is very
259 // small, optimistically optimize it now. 301 // small, optimistically optimize it now.
260 int typeinfo, generic, total, type_percentage, generic_percentage; 302 int typeinfo, generic, total, type_percentage, generic_percentage;
261 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage, 303 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
262 &generic_percentage); 304 &generic_percentage);
263 if (type_percentage >= FLAG_type_info_threshold && 305 if (type_percentage >= FLAG_type_info_threshold &&
264 generic_percentage <= FLAG_generic_ic_threshold) { 306 generic_percentage <= FLAG_generic_ic_threshold) {
265 Optimize(function, "small function"); 307 Optimize(function, OptimizationReason::kSmallFunction);
266 } else { 308 } else {
267 shared_code->set_profiler_ticks(ticks + 1); 309 shared_code->set_profiler_ticks(ticks + 1);
268 } 310 }
269 } else { 311 } else {
270 shared_code->set_profiler_ticks(ticks + 1); 312 shared_code->set_profiler_ticks(ticks + 1);
271 } 313 }
272 } 314 }
273 315
274 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, 316 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
275 JavaScriptFrame* frame) { 317 JavaScriptFrame* frame) {
276 if (function->IsInOptimizationQueue()) return; 318 if (function->IsInOptimizationQueue()) return;
277 319
320 if (MaybeOSRIgnition(function, frame)) return;
321
278 SharedFunctionInfo* shared = function->shared(); 322 SharedFunctionInfo* shared = function->shared();
279 int ticks = shared->profiler_ticks(); 323 int ticks = shared->profiler_ticks();
280 324
281 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
282 // than kMaxToplevelSourceSize.
283
284 if (FLAG_always_osr) {
285 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
286 // Fall through and do a normal baseline compile as well.
287 } else if (!frame->is_optimized() &&
288 (function->IsMarkedForBaseline() ||
289 function->IsMarkedForOptimization() ||
290 function->IsMarkedForConcurrentOptimization() ||
291 function->IsOptimized())) {
292 // Attempt OSR if we are still running interpreted code even though the
293 // the function has long been marked or even already been optimized.
294 int64_t allowance =
295 kOSRCodeSizeAllowanceBaseIgnition +
296 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
297 if (shared->bytecode_array()->Size() <= allowance) {
298 AttemptOnStackReplacement(frame);
299 }
300 return;
301 }
302
303 if (shared->optimization_disabled() && 325 if (shared->optimization_disabled() &&
304 shared->disable_optimization_reason() == kOptimizationDisabledForTest) { 326 shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
305 // Don't baseline functions which have been marked by NeverOptimizeFunction 327 // Don't baseline functions which have been marked by NeverOptimizeFunction
306 // in a test. 328 // in a test.
307 return; 329 return;
308 } 330 }
309 331
310 if (ticks >= kProfilerTicksBeforeBaseline) { 332 if (ticks >= kProfilerTicksBeforeBaseline) {
311 Baseline(function, "hot enough for baseline"); 333 Baseline(function, OptimizationReason::kHotEnoughForBaseline);
312 } 334 }
313 } 335 }
314 336
315 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, 337 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
316 JavaScriptFrame* frame) { 338 JavaScriptFrame* frame) {
317 if (function->IsInOptimizationQueue()) return; 339 if (function->IsInOptimizationQueue()) return;
318 340
341 if (MaybeOSRIgnition(function, frame)) return;
342
319 SharedFunctionInfo* shared = function->shared(); 343 SharedFunctionInfo* shared = function->shared();
320 int ticks = shared->profiler_ticks(); 344 int ticks = shared->profiler_ticks();
321 345
346 if (shared->optimization_disabled()) {
347 if (shared->deopt_count() >= FLAG_max_opt_count) {
348 // If optimization was disabled due to many deoptimizations,
349 // then check if the function is hot and try to reenable optimization.
350 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
351 shared->set_profiler_ticks(0);
352 shared->TryReenableOptimization();
353 }
354 }
355 return;
356 }
357
358 if (function->IsOptimized()) return;
359
360 auto reason = OptimizationHeuristicIgnition(function, frame);
rmcilroy 2016/09/23 08:10:34 Don't use auto unless it improves readability (htt
klaasb 2016/09/23 08:31:40 Done.
361
362 if (reason != OptimizationReason::kDoNotOptimize) {
363 Optimize(function, reason);
364 }
365 }
366
367 bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function,
368 JavaScriptFrame* frame) {
369 if (!FLAG_ignition_osr) return false;
370
371 SharedFunctionInfo* shared = function->shared();
372 int ticks = shared->profiler_ticks();
373
322 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller 374 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
323 // than kMaxToplevelSourceSize. 375 // than kMaxToplevelSourceSize.
324 376
325 if (FLAG_always_osr) { 377 if (FLAG_always_osr) {
326 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); 378 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
327 // Fall through and do a normal optimized compile as well. 379 // Do a normal baseline/optimized compile as well.
380 return false;
rmcilroy 2016/09/23 08:10:34 Hmm, not keen on this. Could you just pull out the
klaasb 2016/09/23 08:31:40 Done.
328 } else if (!frame->is_optimized() && 381 } else if (!frame->is_optimized() &&
329 (function->IsMarkedForBaseline() || 382 ((function->IsMarkedForBaseline() &&
383 OptimizationHeuristicIgnition(function, frame) !=
384 OptimizationReason::kDoNotOptimize) ||
rmcilroy 2016/09/23 08:10:34 Could you pull this out to a separate local for cl
klaasb 2016/09/23 08:31:40 Done.
330 function->IsMarkedForOptimization() || 385 function->IsMarkedForOptimization() ||
331 function->IsMarkedForConcurrentOptimization() || 386 function->IsMarkedForConcurrentOptimization() ||
332 function->IsOptimized())) { 387 function->IsOptimized())) {
333 // Attempt OSR if we are still running interpreted code even though the 388 // Attempt OSR if we are still running interpreted code even though the
334 // the function has long been marked or even already been optimized. 389 // the function has long been marked or even already been optimized.
335 int64_t allowance = 390 int64_t allowance =
336 kOSRCodeSizeAllowanceBaseIgnition + 391 kOSRCodeSizeAllowanceBaseIgnition +
337 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; 392 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
338 if (shared->bytecode_array()->Size() <= allowance) { 393 if (shared->bytecode_array()->Size() <= allowance) {
339 AttemptOnStackReplacement(frame); 394 AttemptOnStackReplacement(frame);
340 } 395 }
341 return; 396 return true;
342 } 397 }
398 return false;
399 }
343 400
344 if (shared->optimization_disabled()) { 401 OptimizationReason RuntimeProfiler::OptimizationHeuristicIgnition(
345 if (shared->deopt_count() >= FLAG_max_opt_count) { 402 JSFunction* function, JavaScriptFrame* frame) {
346 // If optimization was disabled due to many deoptimizations, 403 SharedFunctionInfo* shared = function->shared();
347 // then check if the function is hot and try to reenable optimization. 404 int ticks = shared->profiler_ticks();
348 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
349 shared->set_profiler_ticks(0);
350 shared->TryReenableOptimization();
351 }
352 }
353 return;
354 }
355 if (function->IsOptimized()) return;
356 405
357 if (ticks >= kProfilerTicksBeforeOptimization) { 406 if (ticks >= kProfilerTicksBeforeOptimization) {
358 int typeinfo, generic, total, type_percentage, generic_percentage; 407 int typeinfo, generic, total, type_percentage, generic_percentage;
359 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage, 408 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
360 &generic_percentage); 409 &generic_percentage);
361 if (type_percentage >= FLAG_type_info_threshold && 410 if (type_percentage >= FLAG_type_info_threshold &&
362 generic_percentage <= FLAG_generic_ic_threshold) { 411 generic_percentage <= FLAG_generic_ic_threshold) {
363 // If this particular function hasn't had any ICs patched for enough 412 // If this particular function hasn't had any ICs patched for enough
364 // ticks, optimize it now. 413 // ticks, optimize it now.
365 Optimize(function, "hot and stable"); 414 return OptimizationReason::kHotAndStable;
366 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { 415 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
367 Optimize(function, "not much type info but very hot"); 416 return OptimizationReason::kHotWithoutMuchTypeInfo;
368 } else { 417 } else {
369 if (FLAG_trace_opt_verbose) { 418 if (FLAG_trace_opt_verbose) {
370 PrintF("[not yet optimizing "); 419 PrintF("[not yet optimizing ");
371 function->PrintName(); 420 function->PrintName();
372 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total, 421 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
373 type_percentage); 422 type_percentage);
374 } 423 }
424 return OptimizationReason::kDoNotOptimize;
375 } 425 }
376 } 426 }
377 // TODO(rmcilroy): Consider whether we should optimize small functions when 427 // TODO(rmcilroy): Consider whether we should optimize small functions when
378 // they are first seen on the stack (e.g., kMaxSizeEarlyOpt). 428 // they are first seen on the stack (e.g., kMaxSizeEarlyOpt).
429 return OptimizationReason::kDoNotOptimize;
379 } 430 }
380 431
381 void RuntimeProfiler::MarkCandidatesForOptimization() { 432 void RuntimeProfiler::MarkCandidatesForOptimization() {
382 HandleScope scope(isolate_); 433 HandleScope scope(isolate_);
383 434
384 if (!isolate_->use_crankshaft()) return; 435 if (!isolate_->use_crankshaft()) return;
385 436
386 DisallowHeapAllocation no_gc; 437 DisallowHeapAllocation no_gc;
387 438
388 // Run through the JavaScript frames and collect them. If we already 439 // Run through the JavaScript frames and collect them. If we already
(...skipping 27 matching lines...) Expand all
416 MaybeOptimizeIgnition(function, frame); 467 MaybeOptimizeIgnition(function, frame);
417 } 468 }
418 } else { 469 } else {
419 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); 470 DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
420 MaybeOptimizeFullCodegen(function, frame, frame_count); 471 MaybeOptimizeFullCodegen(function, frame, frame_count);
421 } 472 }
422 } 473 }
423 any_ic_changed_ = false; 474 any_ic_changed_ = false;
424 } 475 }
425 476
426
427 } // namespace internal 477 } // namespace internal
428 } // namespace v8 478 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698