Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(232)

Side by Side Diff: runtime/vm/compiler.cc

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/compiler.h ('k') | runtime/vm/compiler_stats.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/compiler.h" 5 #include "vm/compiler.h"
6 6
7 #include "vm/assembler.h" 7 #include "vm/assembler.h"
8 8
9 #include "vm/ast_printer.h" 9 #include "vm/ast_printer.h"
10 #include "vm/block_scheduler.h" 10 #include "vm/block_scheduler.h"
(...skipping 29 matching lines...) Expand all
40 #include "vm/regexp_parser.h" 40 #include "vm/regexp_parser.h"
41 #include "vm/regexp_assembler.h" 41 #include "vm/regexp_assembler.h"
42 #include "vm/symbols.h" 42 #include "vm/symbols.h"
43 #include "vm/tags.h" 43 #include "vm/tags.h"
44 #include "vm/thread_registry.h" 44 #include "vm/thread_registry.h"
45 #include "vm/timeline.h" 45 #include "vm/timeline.h"
46 #include "vm/timer.h" 46 #include "vm/timer.h"
47 47
48 namespace dart { 48 namespace dart {
49 49
50 DEFINE_FLAG(bool, allocation_sinking, true, 50 DEFINE_FLAG(bool,
51 "Attempt to sink temporary allocations to side exits"); 51 allocation_sinking,
52 DEFINE_FLAG(bool, common_subexpression_elimination, true, 52 true,
53 "Do common subexpression elimination."); 53 "Attempt to sink temporary allocations to side exits");
54 DEFINE_FLAG(bool, constant_propagation, true, 54 DEFINE_FLAG(bool,
55 common_subexpression_elimination,
56 true,
57 "Do common subexpression elimination.");
58 DEFINE_FLAG(
59 bool,
60 constant_propagation,
61 true,
55 "Do conditional constant propagation/unreachable code elimination."); 62 "Do conditional constant propagation/unreachable code elimination.");
56 DEFINE_FLAG(int, max_deoptimization_counter_threshold, 16, 63 DEFINE_FLAG(
64 int,
65 max_deoptimization_counter_threshold,
66 16,
57 "How many times we allow deoptimization before we disallow optimization."); 67 "How many times we allow deoptimization before we disallow optimization.");
58 DEFINE_FLAG(bool, loop_invariant_code_motion, true, 68 DEFINE_FLAG(bool,
59 "Do loop invariant code motion."); 69 loop_invariant_code_motion,
70 true,
71 "Do loop invariant code motion.");
60 DEFINE_FLAG(charp, optimization_filter, NULL, "Optimize only named function"); 72 DEFINE_FLAG(charp, optimization_filter, NULL, "Optimize only named function");
61 DEFINE_FLAG(bool, print_flow_graph, false, "Print the IR flow graph."); 73 DEFINE_FLAG(bool, print_flow_graph, false, "Print the IR flow graph.");
62 DEFINE_FLAG(bool, print_flow_graph_optimized, false, 74 DEFINE_FLAG(bool,
63 "Print the IR flow graph when optimizing."); 75 print_flow_graph_optimized,
64 DEFINE_FLAG(bool, print_ic_data_map, false, 76 false,
65 "Print the deopt-id to ICData map in optimizing compiler."); 77 "Print the IR flow graph when optimizing.");
78 DEFINE_FLAG(bool,
79 print_ic_data_map,
80 false,
81 "Print the deopt-id to ICData map in optimizing compiler.");
66 DEFINE_FLAG(bool, print_code_source_map, false, "Print code source map."); 82 DEFINE_FLAG(bool, print_code_source_map, false, "Print code source map.");
67 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis"); 83 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis");
68 DEFINE_FLAG(bool, stress_test_background_compilation, false, 84 DEFINE_FLAG(bool,
69 "Keep background compiler running all the time"); 85 stress_test_background_compilation,
70 DEFINE_FLAG(bool, stop_on_excessive_deoptimization, false, 86 false,
71 "Debugging: stops program if deoptimizing same function too often"); 87 "Keep background compiler running all the time");
88 DEFINE_FLAG(bool,
89 stop_on_excessive_deoptimization,
90 false,
91 "Debugging: stops program if deoptimizing same function too often");
72 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations."); 92 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations.");
73 DEFINE_FLAG(bool, trace_failed_optimization_attempts, false, 93 DEFINE_FLAG(bool,
74 "Traces all failed optimization attempts"); 94 trace_failed_optimization_attempts,
75 DEFINE_FLAG(bool, trace_optimizing_compiler, false, 95 false,
76 "Trace only optimizing compiler operations."); 96 "Traces all failed optimization attempts");
97 DEFINE_FLAG(bool,
98 trace_optimizing_compiler,
99 false,
100 "Trace only optimizing compiler operations.");
77 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler."); 101 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler.");
78 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining"); 102 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining");
79 DEFINE_FLAG(bool, verify_compiler, false, 103 DEFINE_FLAG(bool,
80 "Enable compiler verification assertions"); 104 verify_compiler,
105 false,
106 "Enable compiler verification assertions");
81 107
82 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size); 108 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size);
83 DECLARE_FLAG(bool, trace_failed_optimization_attempts); 109 DECLARE_FLAG(bool, trace_failed_optimization_attempts);
84 DECLARE_FLAG(bool, trace_irregexp); 110 DECLARE_FLAG(bool, trace_irregexp);
85 111
86 112
87 #ifndef DART_PRECOMPILED_RUNTIME 113 #ifndef DART_PRECOMPILED_RUNTIME
88 114
89 115
90 bool UseKernelFrontEndFor(ParsedFunction* parsed_function) { 116 bool UseKernelFrontEndFor(ParsedFunction* parsed_function) {
(...skipping 13 matching lines...) Expand all
104 130
105 131
106 FlowGraph* DartCompilationPipeline::BuildFlowGraph( 132 FlowGraph* DartCompilationPipeline::BuildFlowGraph(
107 Zone* zone, 133 Zone* zone,
108 ParsedFunction* parsed_function, 134 ParsedFunction* parsed_function,
109 const ZoneGrowableArray<const ICData*>& ic_data_array, 135 const ZoneGrowableArray<const ICData*>& ic_data_array,
110 intptr_t osr_id) { 136 intptr_t osr_id) {
111 if (UseKernelFrontEndFor(parsed_function)) { 137 if (UseKernelFrontEndFor(parsed_function)) {
112 kernel::TreeNode* node = static_cast<kernel::TreeNode*>( 138 kernel::TreeNode* node = static_cast<kernel::TreeNode*>(
113 parsed_function->function().kernel_function()); 139 parsed_function->function().kernel_function());
114 kernel::FlowGraphBuilder builder( 140 kernel::FlowGraphBuilder builder(node, parsed_function, ic_data_array, NULL,
115 node, parsed_function, ic_data_array, NULL, osr_id); 141 osr_id);
116 FlowGraph* graph = builder.BuildGraph(); 142 FlowGraph* graph = builder.BuildGraph();
117 ASSERT(graph != NULL); 143 ASSERT(graph != NULL);
118 return graph; 144 return graph;
119 } 145 }
120 FlowGraphBuilder builder(*parsed_function, 146 FlowGraphBuilder builder(*parsed_function, ic_data_array,
121 ic_data_array,
122 NULL, // NULL = not inlining. 147 NULL, // NULL = not inlining.
123 osr_id); 148 osr_id);
124 149
125 return builder.BuildGraph(); 150 return builder.BuildGraph();
126 } 151 }
127 152
128 153
129 void DartCompilationPipeline::FinalizeCompilation(FlowGraph* flow_graph) { } 154 void DartCompilationPipeline::FinalizeCompilation(FlowGraph* flow_graph) {}
130 155
131 156
132 void IrregexpCompilationPipeline::ParseFunction( 157 void IrregexpCompilationPipeline::ParseFunction(
133 ParsedFunction* parsed_function) { 158 ParsedFunction* parsed_function) {
134 RegExpParser::ParseFunction(parsed_function); 159 RegExpParser::ParseFunction(parsed_function);
135 // Variables are allocated after compilation. 160 // Variables are allocated after compilation.
136 } 161 }
137 162
138 163
139 FlowGraph* IrregexpCompilationPipeline::BuildFlowGraph( 164 FlowGraph* IrregexpCompilationPipeline::BuildFlowGraph(
140 Zone* zone, 165 Zone* zone,
141 ParsedFunction* parsed_function, 166 ParsedFunction* parsed_function,
142 const ZoneGrowableArray<const ICData*>& ic_data_array, 167 const ZoneGrowableArray<const ICData*>& ic_data_array,
143 intptr_t osr_id) { 168 intptr_t osr_id) {
144 // Compile to the dart IR. 169 // Compile to the dart IR.
145 RegExpEngine::CompilationResult result = 170 RegExpEngine::CompilationResult result = RegExpEngine::CompileIR(
146 RegExpEngine::CompileIR(parsed_function->regexp_compile_data(), 171 parsed_function->regexp_compile_data(), parsed_function, ic_data_array);
147 parsed_function,
148 ic_data_array);
149 backtrack_goto_ = result.backtrack_goto; 172 backtrack_goto_ = result.backtrack_goto;
150 173
151 // Allocate variables now that we know the number of locals. 174 // Allocate variables now that we know the number of locals.
152 parsed_function->AllocateIrregexpVariables(result.num_stack_locals); 175 parsed_function->AllocateIrregexpVariables(result.num_stack_locals);
153 176
154 // Build the flow graph. 177 // Build the flow graph.
155 FlowGraphBuilder builder(*parsed_function, 178 FlowGraphBuilder builder(*parsed_function, ic_data_array,
156 ic_data_array,
157 NULL, // NULL = not inlining. 179 NULL, // NULL = not inlining.
158 osr_id); 180 osr_id);
159 181
160 return new(zone) FlowGraph(*parsed_function, 182 return new (zone)
161 result.graph_entry, 183 FlowGraph(*parsed_function, result.graph_entry, result.num_blocks);
162 result.num_blocks);
163 } 184 }
164 185
165 186
166 void IrregexpCompilationPipeline::FinalizeCompilation(FlowGraph* flow_graph) { 187 void IrregexpCompilationPipeline::FinalizeCompilation(FlowGraph* flow_graph) {
167 backtrack_goto_->ComputeOffsetTable(); 188 backtrack_goto_->ComputeOffsetTable();
168 } 189 }
169 190
170 191
171 CompilationPipeline* CompilationPipeline::New(Zone* zone, 192 CompilationPipeline* CompilationPipeline::New(Zone* zone,
172 const Function& function) { 193 const Function& function) {
173 if (function.IsIrregexpFunction()) { 194 if (function.IsIrregexpFunction()) {
174 return new(zone) IrregexpCompilationPipeline(); 195 return new (zone) IrregexpCompilationPipeline();
175 } else { 196 } else {
176 return new(zone) DartCompilationPipeline(); 197 return new (zone) DartCompilationPipeline();
177 } 198 }
178 } 199 }
179 200
180 201
181 // Compile a function. Should call only if the function has not been compiled. 202 // Compile a function. Should call only if the function has not been compiled.
182 // Arg0: function object. 203 // Arg0: function object.
183 DEFINE_RUNTIME_ENTRY(CompileFunction, 1) { 204 DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
184 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); 205 const Function& function = Function::CheckedHandle(arguments.ArgAt(0));
185 ASSERT(!function.HasCode()); 206 ASSERT(!function.HasCode());
186 const Error& error = 207 const Error& error =
(...skipping 17 matching lines...) Expand all
204 // so do not optimize the function. 225 // so do not optimize the function.
205 function.set_usage_counter(0); 226 function.set_usage_counter(0);
206 return false; 227 return false;
207 } 228 }
208 } 229 }
209 if (function.deoptimization_counter() >= 230 if (function.deoptimization_counter() >=
210 FLAG_max_deoptimization_counter_threshold) { 231 FLAG_max_deoptimization_counter_threshold) {
211 if (FLAG_trace_failed_optimization_attempts || 232 if (FLAG_trace_failed_optimization_attempts ||
212 FLAG_stop_on_excessive_deoptimization) { 233 FLAG_stop_on_excessive_deoptimization) {
213 THR_Print("Too many deoptimizations: %s\n", 234 THR_Print("Too many deoptimizations: %s\n",
214 function.ToFullyQualifiedCString()); 235 function.ToFullyQualifiedCString());
215 if (FLAG_stop_on_excessive_deoptimization) { 236 if (FLAG_stop_on_excessive_deoptimization) {
216 FATAL("Stop on excessive deoptimization"); 237 FATAL("Stop on excessive deoptimization");
217 } 238 }
218 } 239 }
219 // The function will not be optimized any longer. This situation can occur 240 // The function will not be optimized any longer. This situation can occur
220 // mostly with small optimization counter thresholds. 241 // mostly with small optimization counter thresholds.
221 function.SetIsOptimizable(false); 242 function.SetIsOptimizable(false);
222 function.set_usage_counter(INT_MIN); 243 function.set_usage_counter(INT_MIN);
223 return false; 244 return false;
224 } 245 }
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
345 return Error::null(); 366 return Error::null();
346 } 367 }
347 // If the class is already marked for parsing return immediately. 368 // If the class is already marked for parsing return immediately.
348 if (cls.is_marked_for_parsing()) { 369 if (cls.is_marked_for_parsing()) {
349 return Error::null(); 370 return Error::null();
350 } 371 }
351 // If the class is a typedef class there is no need to try and 372 // If the class is a typedef class there is no need to try and
352 // compile it. Just finalize it directly. 373 // compile it. Just finalize it directly.
353 if (cls.IsTypedefClass()) { 374 if (cls.IsTypedefClass()) {
354 #if defined(DEBUG) 375 #if defined(DEBUG)
355 const Class& closure_cls = Class::Handle( 376 const Class& closure_cls =
356 Isolate::Current()->object_store()->closure_class()); 377 Class::Handle(Isolate::Current()->object_store()->closure_class());
357 ASSERT(closure_cls.is_finalized()); 378 ASSERT(closure_cls.is_finalized());
358 #endif 379 #endif
359 LongJumpScope jump; 380 LongJumpScope jump;
360 if (setjmp(*jump.Set()) == 0) { 381 if (setjmp(*jump.Set()) == 0) {
361 ClassFinalizer::FinalizeClass(cls); 382 ClassFinalizer::FinalizeClass(cls);
362 return Error::null(); 383 return Error::null();
363 } else { 384 } else {
364 Thread* thread = Thread::Current(); 385 Thread* thread = Thread::Current();
365 Error& error = Error::Handle(thread->zone()); 386 Error& error = Error::Handle(thread->zone());
366 error = thread->sticky_error(); 387 error = thread->sticky_error();
367 thread->clear_sticky_error(); 388 thread->clear_sticky_error();
368 return error.raw(); 389 return error.raw();
369 } 390 }
370 } 391 }
371 392
372 Thread* const thread = Thread::Current(); 393 Thread* const thread = Thread::Current();
373 StackZone zone(thread); 394 StackZone zone(thread);
374 NOT_IN_PRODUCT( 395 #if !defined(PRODUCT)
375 VMTagScope tagScope(thread, VMTag::kCompileClassTagId); 396 VMTagScope tagScope(thread, VMTag::kCompileClassTagId);
376 TimelineDurationScope tds(thread, 397 TimelineDurationScope tds(thread, Timeline::GetCompilerStream(),
377 Timeline::GetCompilerStream(),
378 "CompileClass"); 398 "CompileClass");
379 if (tds.enabled()) { 399 if (tds.enabled()) {
380 tds.SetNumArguments(1); 400 tds.SetNumArguments(1);
381 tds.CopyArgument(0, "class", cls.ToCString()); 401 tds.CopyArgument(0, "class", cls.ToCString());
382 } 402 }
383 ) // !PRODUCT 403 #endif // !defined(PRODUCT)
384 404
385 // We remember all the classes that are being compiled in these lists. This 405 // We remember all the classes that are being compiled in these lists. This
386 // also allows us to reset the marked_for_parsing state in case we see an 406 // also allows us to reset the marked_for_parsing state in case we see an
387 // error. 407 // error.
388 GrowableHandlePtrArray<const Class> parse_list(thread->zone(), 4); 408 GrowableHandlePtrArray<const Class> parse_list(thread->zone(), 4);
389 GrowableHandlePtrArray<const Class> patch_list(thread->zone(), 4); 409 GrowableHandlePtrArray<const Class> patch_list(thread->zone(), 4);
390 410
391 // Parse the class and all the interfaces it implements and super classes. 411 // Parse the class and all the interfaces it implements and super classes.
392 LongJumpScope jump; 412 LongJumpScope jump;
393 if (setjmp(*jump.Set()) == 0) { 413 if (setjmp(*jump.Set()) == 0) {
(...skipping 11 matching lines...) Expand all
405 // exists to the corresponding lists. 425 // exists to the corresponding lists.
406 // NOTE: The parse_list array keeps growing as more classes are added 426 // NOTE: The parse_list array keeps growing as more classes are added
407 // to it by AddRelatedClassesToList. It is not OK to hoist 427 // to it by AddRelatedClassesToList. It is not OK to hoist
408 // parse_list.Length() into a local variable and iterate using the local 428 // parse_list.Length() into a local variable and iterate using the local
409 // variable. 429 // variable.
410 for (intptr_t i = 0; i < parse_list.length(); i++) { 430 for (intptr_t i = 0; i < parse_list.length(); i++) {
411 AddRelatedClassesToList(parse_list.At(i), &parse_list, &patch_list); 431 AddRelatedClassesToList(parse_list.At(i), &parse_list, &patch_list);
412 } 432 }
413 433
414 // Parse all the classes that have been added above. 434 // Parse all the classes that have been added above.
415 for (intptr_t i = (parse_list.length() - 1); i >=0 ; i--) { 435 for (intptr_t i = (parse_list.length() - 1); i >= 0; i--) {
416 const Class& parse_class = parse_list.At(i); 436 const Class& parse_class = parse_list.At(i);
417 ASSERT(!parse_class.IsNull()); 437 ASSERT(!parse_class.IsNull());
418 Parser::ParseClass(parse_class); 438 Parser::ParseClass(parse_class);
419 } 439 }
420 440
421 // Parse all the patch classes that have been added above. 441 // Parse all the patch classes that have been added above.
422 for (intptr_t i = 0; i < patch_list.length(); i++) { 442 for (intptr_t i = 0; i < patch_list.length(); i++) {
423 const Class& parse_class = patch_list.At(i); 443 const Class& parse_class = patch_list.At(i);
424 ASSERT(!parse_class.IsNull()); 444 ASSERT(!parse_class.IsNull());
425 Parser::ParseClass(parse_class); 445 Parser::ParseClass(parse_class);
426 } 446 }
427 447
428 // Finalize these classes. 448 // Finalize these classes.
429 for (intptr_t i = (parse_list.length() - 1); i >=0 ; i--) { 449 for (intptr_t i = (parse_list.length() - 1); i >= 0; i--) {
430 const Class& parse_class = parse_list.At(i); 450 const Class& parse_class = parse_list.At(i);
431 ASSERT(!parse_class.IsNull()); 451 ASSERT(!parse_class.IsNull());
432 ClassFinalizer::FinalizeClass(parse_class); 452 ClassFinalizer::FinalizeClass(parse_class);
433 parse_class.reset_is_marked_for_parsing(); 453 parse_class.reset_is_marked_for_parsing();
434 } 454 }
435 for (intptr_t i = (patch_list.length() - 1); i >=0 ; i--) { 455 for (intptr_t i = (patch_list.length() - 1); i >= 0; i--) {
436 const Class& parse_class = patch_list.At(i); 456 const Class& parse_class = patch_list.At(i);
437 ASSERT(!parse_class.IsNull()); 457 ASSERT(!parse_class.IsNull());
438 ClassFinalizer::FinalizeClass(parse_class); 458 ClassFinalizer::FinalizeClass(parse_class);
439 parse_class.reset_is_marked_for_parsing(); 459 parse_class.reset_is_marked_for_parsing();
440 } 460 }
441 461
442 return Error::null(); 462 return Error::null();
443 } else { 463 } else {
444 // Reset the marked for parsing flags. 464 // Reset the marked for parsing flags.
445 for (intptr_t i = 0; i < parse_list.length(); i++) { 465 for (intptr_t i = 0; i < parse_list.length(); i++) {
(...skipping 21 matching lines...) Expand all
467 class CompileParsedFunctionHelper : public ValueObject { 487 class CompileParsedFunctionHelper : public ValueObject {
468 public: 488 public:
469 CompileParsedFunctionHelper(ParsedFunction* parsed_function, 489 CompileParsedFunctionHelper(ParsedFunction* parsed_function,
470 bool optimized, 490 bool optimized,
471 intptr_t osr_id) 491 intptr_t osr_id)
472 : parsed_function_(parsed_function), 492 : parsed_function_(parsed_function),
473 optimized_(optimized), 493 optimized_(optimized),
474 osr_id_(osr_id), 494 osr_id_(osr_id),
475 thread_(Thread::Current()), 495 thread_(Thread::Current()),
476 loading_invalidation_gen_at_start_( 496 loading_invalidation_gen_at_start_(
477 isolate()->loading_invalidation_gen()) { 497 isolate()->loading_invalidation_gen()) {}
478 }
479 498
480 bool Compile(CompilationPipeline* pipeline); 499 bool Compile(CompilationPipeline* pipeline);
481 500
482 private: 501 private:
483 ParsedFunction* parsed_function() const { return parsed_function_; } 502 ParsedFunction* parsed_function() const { return parsed_function_; }
484 bool optimized() const { return optimized_; } 503 bool optimized() const { return optimized_; }
485 intptr_t osr_id() const { return osr_id_; } 504 intptr_t osr_id() const { return osr_id_; }
486 Thread* thread() const { return thread_; } 505 Thread* thread() const { return thread_; }
487 Isolate* isolate() const { return thread_->isolate(); } 506 Isolate* isolate() const { return thread_->isolate(); }
488 intptr_t loading_invalidation_gen_at_start() const { 507 intptr_t loading_invalidation_gen_at_start() const {
(...skipping 24 matching lines...) Expand all
513 532
514 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer); 533 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer);
515 // CreateDeoptInfo uses the object pool and needs to be done before 534 // CreateDeoptInfo uses the object pool and needs to be done before
516 // FinalizeCode. 535 // FinalizeCode.
517 const Array& deopt_info_array = 536 const Array& deopt_info_array =
518 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler)); 537 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler));
519 INC_STAT(thread(), total_code_size, 538 INC_STAT(thread(), total_code_size,
520 deopt_info_array.Length() * sizeof(uword)); 539 deopt_info_array.Length() * sizeof(uword));
521 // Allocates instruction object. Since this occurs only at safepoint, 540 // Allocates instruction object. Since this occurs only at safepoint,
522 // there can be no concurrent access to the instruction page. 541 // there can be no concurrent access to the instruction page.
523 const Code& code = Code::Handle( 542 const Code& code =
524 Code::FinalizeCode(function, assembler, optimized())); 543 Code::Handle(Code::FinalizeCode(function, assembler, optimized()));
525 code.set_is_optimized(optimized()); 544 code.set_is_optimized(optimized());
526 code.set_owner(function); 545 code.set_owner(function);
527 if (!function.IsOptimizable()) { 546 if (!function.IsOptimizable()) {
528 // A function with huge unoptimized code can become non-optimizable 547 // A function with huge unoptimized code can become non-optimizable
529 // after generating unoptimized code. 548 // after generating unoptimized code.
530 function.set_usage_counter(INT_MIN); 549 function.set_usage_counter(INT_MIN);
531 } 550 }
532 551
533 const Array& intervals = graph_compiler->inlined_code_intervals(); 552 const Array& intervals = graph_compiler->inlined_code_intervals();
534 INC_STAT(thread(), total_code_size, 553 INC_STAT(thread(), total_code_size, intervals.Length() * sizeof(uword));
535 intervals.Length() * sizeof(uword));
536 code.SetInlinedIntervals(intervals); 554 code.SetInlinedIntervals(intervals);
537 555
538 const Array& inlined_id_array = 556 const Array& inlined_id_array =
539 Array::Handle(zone, graph_compiler->InliningIdToFunction()); 557 Array::Handle(zone, graph_compiler->InliningIdToFunction());
540 INC_STAT(thread(), total_code_size, 558 INC_STAT(thread(), total_code_size,
541 inlined_id_array.Length() * sizeof(uword)); 559 inlined_id_array.Length() * sizeof(uword));
542 code.SetInlinedIdToFunction(inlined_id_array); 560 code.SetInlinedIdToFunction(inlined_id_array);
543 561
544 const Array& caller_inlining_id_map_array = 562 const Array& caller_inlining_id_map_array =
545 Array::Handle(zone, graph_compiler->CallerInliningIdMap()); 563 Array::Handle(zone, graph_compiler->CallerInliningIdMap());
546 INC_STAT(thread(), total_code_size, 564 INC_STAT(thread(), total_code_size,
547 caller_inlining_id_map_array.Length() * sizeof(uword)); 565 caller_inlining_id_map_array.Length() * sizeof(uword));
548 code.SetInlinedCallerIdMap(caller_inlining_id_map_array); 566 code.SetInlinedCallerIdMap(caller_inlining_id_map_array);
549 567
550 const Array& inlined_id_to_token_pos = 568 const Array& inlined_id_to_token_pos =
551 Array::Handle(zone, graph_compiler->InliningIdToTokenPos()); 569 Array::Handle(zone, graph_compiler->InliningIdToTokenPos());
552 INC_STAT(thread(), total_code_size, 570 INC_STAT(thread(), total_code_size,
553 inlined_id_to_token_pos.Length() * sizeof(uword)); 571 inlined_id_to_token_pos.Length() * sizeof(uword));
554 code.SetInlinedIdToTokenPos(inlined_id_to_token_pos); 572 code.SetInlinedIdToTokenPos(inlined_id_to_token_pos);
555 573
556 graph_compiler->FinalizePcDescriptors(code); 574 graph_compiler->FinalizePcDescriptors(code);
557 code.set_deopt_info_array(deopt_info_array); 575 code.set_deopt_info_array(deopt_info_array);
558 576
559 graph_compiler->FinalizeStackmaps(code); 577 graph_compiler->FinalizeStackmaps(code);
560 graph_compiler->FinalizeVarDescriptors(code); 578 graph_compiler->FinalizeVarDescriptors(code);
561 graph_compiler->FinalizeExceptionHandlers(code); 579 graph_compiler->FinalizeExceptionHandlers(code);
562 graph_compiler->FinalizeStaticCallTargetsTable(code); 580 graph_compiler->FinalizeStaticCallTargetsTable(code);
563 581
564 NOT_IN_PRODUCT( 582 #if !defined(PRODUCT)
565 // Set the code source map after setting the inlined information because 583 // Set the code source map after setting the inlined information because
566 // we use the inlined information when printing. 584 // we use the inlined information when printing.
567 const CodeSourceMap& code_source_map = 585 const CodeSourceMap& code_source_map = CodeSourceMap::Handle(
568 CodeSourceMap::Handle( 586 zone, graph_compiler->code_source_map_builder()->Finalize());
569 zone,
570 graph_compiler->code_source_map_builder()->Finalize());
571 code.set_code_source_map(code_source_map); 587 code.set_code_source_map(code_source_map);
572 if (FLAG_print_code_source_map) { 588 if (FLAG_print_code_source_map) {
573 CodeSourceMap::Dump(code_source_map, code, function); 589 CodeSourceMap::Dump(code_source_map, code, function);
574 } 590 }
575 ); 591 #endif // !defined(PRODUCT)
592
576 if (optimized()) { 593 if (optimized()) {
577 bool code_was_installed = false; 594 bool code_was_installed = false;
578 // Installs code while at safepoint. 595 // Installs code while at safepoint.
579 if (thread()->IsMutatorThread()) { 596 if (thread()->IsMutatorThread()) {
580 const bool is_osr = osr_id() != Compiler::kNoOSRDeoptId; 597 const bool is_osr = osr_id() != Compiler::kNoOSRDeoptId;
581 function.InstallOptimizedCode(code, is_osr); 598 function.InstallOptimizedCode(code, is_osr);
582 code_was_installed = true; 599 code_was_installed = true;
583 } else { 600 } else {
584 // Background compilation. 601 // Background compilation.
585 // Before installing code check generation counts if the code may 602 // Before installing code check generation counts if the code may
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
668 (*prefixes)[i]->RegisterDependentCode(code); 685 (*prefixes)[i]->RegisterDependentCode(code);
669 } 686 }
670 } 687 }
671 } 688 }
672 689
673 690
674 void CompileParsedFunctionHelper::CheckIfBackgroundCompilerIsBeingStopped() { 691 void CompileParsedFunctionHelper::CheckIfBackgroundCompilerIsBeingStopped() {
675 ASSERT(Compiler::IsBackgroundCompilation()); 692 ASSERT(Compiler::IsBackgroundCompilation());
676 if (!isolate()->background_compiler()->is_running()) { 693 if (!isolate()->background_compiler()->is_running()) {
677 // The background compiler is being stopped. 694 // The background compiler is being stopped.
678 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, 695 Compiler::AbortBackgroundCompilation(
679 "Background compilation is being stopped"); 696 Thread::kNoDeoptId, "Background compilation is being stopped");
680 } 697 }
681 } 698 }
682 699
683 700
684 // Return false if bailed out. 701 // Return false if bailed out.
685 // If optimized_result_code is not NULL then it is caller's responsibility 702 // If optimized_result_code is not NULL then it is caller's responsibility
686 // to install code. 703 // to install code.
687 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) { 704 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
688 ASSERT(!FLAG_precompiled_mode); 705 ASSERT(!FLAG_precompiled_mode);
689 const Function& function = parsed_function()->function(); 706 const Function& function = parsed_function()->function();
690 if (optimized() && !function.IsOptimizable()) { 707 if (optimized() && !function.IsOptimizable()) {
691 return false; 708 return false;
692 } 709 }
693 bool is_compiled = false; 710 bool is_compiled = false;
694 Zone* const zone = thread()->zone(); 711 Zone* const zone = thread()->zone();
695 NOT_IN_PRODUCT( 712 NOT_IN_PRODUCT(TimelineStream* compiler_timeline =
696 TimelineStream* compiler_timeline = Timeline::GetCompilerStream()); 713 Timeline::GetCompilerStream());
697 CSTAT_TIMER_SCOPE(thread(), codegen_timer); 714 CSTAT_TIMER_SCOPE(thread(), codegen_timer);
698 HANDLESCOPE(thread()); 715 HANDLESCOPE(thread());
699 716
700 // We may reattempt compilation if the function needs to be assembled using 717 // We may reattempt compilation if the function needs to be assembled using
701 // far branches on ARM and MIPS. In the else branch of the setjmp call, 718 // far branches on ARM and MIPS. In the else branch of the setjmp call,
702 // done is set to false, and use_far_branches is set to true if there is a 719 // done is set to false, and use_far_branches is set to true if there is a
703 // longjmp from the ARM or MIPS assemblers. In all other paths through this 720 // longjmp from the ARM or MIPS assemblers. In all other paths through this
704 // while loop, done is set to true. use_far_branches is always false on ia32 721 // while loop, done is set to true. use_far_branches is always false on ia32
705 // and x64. 722 // and x64.
706 volatile bool done = false; 723 volatile bool done = false;
(...skipping 11 matching lines...) Expand all
718 735
719 // Class hierarchy analysis is registered with the thread in the 736 // Class hierarchy analysis is registered with the thread in the
720 // constructor and unregisters itself upon destruction. 737 // constructor and unregisters itself upon destruction.
721 CHA cha(thread()); 738 CHA cha(thread());
722 739
723 // TimerScope needs an isolate to be properly terminated in case of a 740 // TimerScope needs an isolate to be properly terminated in case of a
724 // LongJump. 741 // LongJump.
725 { 742 {
726 CSTAT_TIMER_SCOPE(thread(), graphbuilder_timer); 743 CSTAT_TIMER_SCOPE(thread(), graphbuilder_timer);
727 ZoneGrowableArray<const ICData*>* ic_data_array = 744 ZoneGrowableArray<const ICData*>* ic_data_array =
728 new(zone) ZoneGrowableArray<const ICData*>(); 745 new (zone) ZoneGrowableArray<const ICData*>();
729 if (optimized()) { 746 if (optimized()) {
730 // Extract type feedback before the graph is built, as the graph 747 // Extract type feedback before the graph is built, as the graph
731 // builder uses it to attach it to nodes. 748 // builder uses it to attach it to nodes.
732 749
733 // In background compilation the deoptimization counter may have 750 // In background compilation the deoptimization counter may have
734 // already reached the limit. 751 // already reached the limit.
735 ASSERT(Compiler::IsBackgroundCompilation() || 752 ASSERT(Compiler::IsBackgroundCompilation() ||
736 (function.deoptimization_counter() < 753 (function.deoptimization_counter() <
737 FLAG_max_deoptimization_counter_threshold)); 754 FLAG_max_deoptimization_counter_threshold));
738 755
739 // 'Freeze' ICData in background compilation so that it does not 756 // 'Freeze' ICData in background compilation so that it does not
740 // change while compiling. 757 // change while compiling.
741 const bool clone_ic_data = Compiler::IsBackgroundCompilation(); 758 const bool clone_ic_data = Compiler::IsBackgroundCompilation();
742 function.RestoreICDataMap(ic_data_array, clone_ic_data); 759 function.RestoreICDataMap(ic_data_array, clone_ic_data);
743 760
744 if (Compiler::IsBackgroundCompilation() && 761 if (Compiler::IsBackgroundCompilation() &&
745 (function.ic_data_array() == Array::null())) { 762 (function.ic_data_array() == Array::null())) {
746 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, 763 Compiler::AbortBackgroundCompilation(
747 "RestoreICDataMap: ICData array cleared."); 764 Thread::kNoDeoptId, "RestoreICDataMap: ICData array cleared.");
748 } 765 }
749 if (FLAG_print_ic_data_map) { 766 if (FLAG_print_ic_data_map) {
750 for (intptr_t i = 0; i < ic_data_array->length(); i++) { 767 for (intptr_t i = 0; i < ic_data_array->length(); i++) {
751 if ((*ic_data_array)[i] != NULL) { 768 if ((*ic_data_array)[i] != NULL) {
752 THR_Print("%" Pd " ", i); 769 THR_Print("%" Pd " ", i);
753 FlowGraphPrinter::PrintICData(*(*ic_data_array)[i]); 770 FlowGraphPrinter::PrintICData(*(*ic_data_array)[i]);
754 } 771 }
755 } 772 }
756 } 773 }
757 } 774 }
758 775
759 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), 776 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), compiler_timeline,
760 compiler_timeline, 777 "BuildFlowGraph"));
761 "BuildFlowGraph");) 778 flow_graph = pipeline->BuildFlowGraph(zone, parsed_function(),
762 flow_graph = pipeline->BuildFlowGraph(zone, 779 *ic_data_array, osr_id());
763 parsed_function(),
764 *ic_data_array,
765 osr_id());
766 } 780 }
767 781
768 const bool print_flow_graph = 782 const bool print_flow_graph =
769 (FLAG_print_flow_graph || 783 (FLAG_print_flow_graph ||
770 (optimized() && FLAG_print_flow_graph_optimized)) && 784 (optimized() && FLAG_print_flow_graph_optimized)) &&
771 FlowGraphPrinter::ShouldPrint(function); 785 FlowGraphPrinter::ShouldPrint(function);
772 786
773 if (print_flow_graph) { 787 if (print_flow_graph) {
774 if (osr_id() == Compiler::kNoOSRDeoptId) { 788 if (osr_id() == Compiler::kNoOSRDeoptId) {
775 FlowGraphPrinter::PrintGraph("Before Optimizations", flow_graph); 789 FlowGraphPrinter::PrintGraph("Before Optimizations", flow_graph);
776 } else { 790 } else {
777 FlowGraphPrinter::PrintGraph("For OSR", flow_graph); 791 FlowGraphPrinter::PrintGraph("For OSR", flow_graph);
778 } 792 }
779 } 793 }
780 794
781 BlockScheduler block_scheduler(flow_graph); 795 BlockScheduler block_scheduler(flow_graph);
782 const bool reorder_blocks = 796 const bool reorder_blocks =
783 FlowGraph::ShouldReorderBlocks(function, optimized()); 797 FlowGraph::ShouldReorderBlocks(function, optimized());
784 if (reorder_blocks) { 798 if (reorder_blocks) {
785 NOT_IN_PRODUCT(TimelineDurationScope tds( 799 NOT_IN_PRODUCT(TimelineDurationScope tds(
786 thread(), compiler_timeline, "BlockScheduler::AssignEdgeWeights")); 800 thread(), compiler_timeline, "BlockScheduler::AssignEdgeWeights"));
787 block_scheduler.AssignEdgeWeights(); 801 block_scheduler.AssignEdgeWeights();
788 } 802 }
789 803
790 if (optimized()) { 804 if (optimized()) {
791 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), 805 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), compiler_timeline,
792 compiler_timeline,
793 "ComputeSSA")); 806 "ComputeSSA"));
794 CSTAT_TIMER_SCOPE(thread(), ssa_timer); 807 CSTAT_TIMER_SCOPE(thread(), ssa_timer);
795 // Transform to SSA (virtual register 0 and no inlining arguments). 808 // Transform to SSA (virtual register 0 and no inlining arguments).
796 flow_graph->ComputeSSA(0, NULL); 809 flow_graph->ComputeSSA(0, NULL);
797 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 810 DEBUG_ASSERT(flow_graph->VerifyUseLists());
798 if (print_flow_graph) { 811 if (print_flow_graph) {
799 FlowGraphPrinter::PrintGraph("After SSA", flow_graph); 812 FlowGraphPrinter::PrintGraph("After SSA", flow_graph);
800 } 813 }
801 } 814 }
802 815
803 // Maps inline_id_to_function[inline_id] -> function. Top scope 816 // Maps inline_id_to_function[inline_id] -> function. Top scope
804 // function has inline_id 0. The map is populated by the inliner. 817 // function has inline_id 0. The map is populated by the inliner.
805 GrowableArray<const Function*> inline_id_to_function; 818 GrowableArray<const Function*> inline_id_to_function;
806 // Token position where inlining occured. 819 // Token position where inlining occured.
807 GrowableArray<TokenPosition> inline_id_to_token_pos; 820 GrowableArray<TokenPosition> inline_id_to_token_pos;
808 // For a given inlining-id(index) specifies the caller's inlining-id. 821 // For a given inlining-id(index) specifies the caller's inlining-id.
809 GrowableArray<intptr_t> caller_inline_id; 822 GrowableArray<intptr_t> caller_inline_id;
810 // Collect all instance fields that are loaded in the graph and 823 // Collect all instance fields that are loaded in the graph and
811 // have non-generic type feedback attached to them that can 824 // have non-generic type feedback attached to them that can
812 // potentially affect optimizations. 825 // potentially affect optimizations.
813 if (optimized()) { 826 if (optimized()) {
814 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), 827 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), compiler_timeline,
815 compiler_timeline,
816 "OptimizationPasses")); 828 "OptimizationPasses"));
817 inline_id_to_function.Add(&function); 829 inline_id_to_function.Add(&function);
818 // We do not add the token position now because we don't know the 830 // We do not add the token position now because we don't know the
819 // position of the inlined call until later. A side effect of this 831 // position of the inlined call until later. A side effect of this
820 // is that the length of |inline_id_to_function| is always larger 832 // is that the length of |inline_id_to_function| is always larger
821 // than the length of |inline_id_to_token_pos| by one. 833 // than the length of |inline_id_to_token_pos| by one.
822 // Top scope function has no caller (-1). We do this because we expect 834 // Top scope function has no caller (-1). We do this because we expect
823 // all token positions to be at an inlined call. 835 // all token positions to be at an inlined call.
824 caller_inline_id.Add(-1); 836 caller_inline_id.Add(-1);
825 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer); 837 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer);
826 838
827 JitOptimizer optimizer(flow_graph); 839 JitOptimizer optimizer(flow_graph);
828 840
829 optimizer.ApplyICData(); 841 optimizer.ApplyICData();
830 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 842 DEBUG_ASSERT(flow_graph->VerifyUseLists());
831 843
832 // Optimize (a << b) & c patterns, merge operations. 844 // Optimize (a << b) & c patterns, merge operations.
833 // Run early in order to have more opportunity to optimize left shifts. 845 // Run early in order to have more opportunity to optimize left shifts.
834 flow_graph->TryOptimizePatterns(); 846 flow_graph->TryOptimizePatterns();
835 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 847 DEBUG_ASSERT(flow_graph->VerifyUseLists());
836 848
837 FlowGraphInliner::SetInliningId(flow_graph, 0); 849 FlowGraphInliner::SetInliningId(flow_graph, 0);
838 850
839 // Inlining (mutates the flow graph) 851 // Inlining (mutates the flow graph)
840 if (FLAG_use_inlining) { 852 if (FLAG_use_inlining) {
841 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 853 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
842 compiler_timeline,
843 "Inlining")); 854 "Inlining"));
844 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer); 855 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer);
845 // Propagate types to create more inlining opportunities. 856 // Propagate types to create more inlining opportunities.
846 FlowGraphTypePropagator::Propagate(flow_graph); 857 FlowGraphTypePropagator::Propagate(flow_graph);
847 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 858 DEBUG_ASSERT(flow_graph->VerifyUseLists());
848 859
849 // Use propagated class-ids to create more inlining opportunities. 860 // Use propagated class-ids to create more inlining opportunities.
850 optimizer.ApplyClassIds(); 861 optimizer.ApplyClassIds();
851 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 862 DEBUG_ASSERT(flow_graph->VerifyUseLists());
852 863
853 FlowGraphInliner inliner(flow_graph, 864 FlowGraphInliner inliner(flow_graph, &inline_id_to_function,
854 &inline_id_to_function, 865 &inline_id_to_token_pos, &caller_inline_id,
855 &inline_id_to_token_pos,
856 &caller_inline_id,
857 use_speculative_inlining, 866 use_speculative_inlining,
858 /*inlining_black_list=*/ NULL, 867 /*inlining_black_list=*/NULL,
859 /*precompiler=*/ NULL); 868 /*precompiler=*/NULL);
860 inliner.Inline(); 869 inliner.Inline();
861 // Use lists are maintained and validated by the inliner. 870 // Use lists are maintained and validated by the inliner.
862 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 871 DEBUG_ASSERT(flow_graph->VerifyUseLists());
863 } 872 }
864 873
865 // Propagate types and eliminate more type tests. 874 // Propagate types and eliminate more type tests.
866 FlowGraphTypePropagator::Propagate(flow_graph); 875 FlowGraphTypePropagator::Propagate(flow_graph);
867 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 876 DEBUG_ASSERT(flow_graph->VerifyUseLists());
868 877
869 { 878 {
870 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 879 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
871 compiler_timeline,
872 "ApplyClassIds")); 880 "ApplyClassIds"));
873 // Use propagated class-ids to optimize further. 881 // Use propagated class-ids to optimize further.
874 optimizer.ApplyClassIds(); 882 optimizer.ApplyClassIds();
875 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 883 DEBUG_ASSERT(flow_graph->VerifyUseLists());
876 } 884 }
877 885
878 // Propagate types for potentially newly added instructions by 886 // Propagate types for potentially newly added instructions by
879 // ApplyClassIds(). Must occur before canonicalization. 887 // ApplyClassIds(). Must occur before canonicalization.
880 FlowGraphTypePropagator::Propagate(flow_graph); 888 FlowGraphTypePropagator::Propagate(flow_graph);
881 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 889 DEBUG_ASSERT(flow_graph->VerifyUseLists());
882 890
883 // Do optimizations that depend on the propagated type information. 891 // Do optimizations that depend on the propagated type information.
884 if (flow_graph->Canonicalize()) { 892 if (flow_graph->Canonicalize()) {
885 // Invoke Canonicalize twice in order to fully canonicalize patterns 893 // Invoke Canonicalize twice in order to fully canonicalize patterns
886 // like "if (a & const == 0) { }". 894 // like "if (a & const == 0) { }".
887 flow_graph->Canonicalize(); 895 flow_graph->Canonicalize();
888 } 896 }
889 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 897 DEBUG_ASSERT(flow_graph->VerifyUseLists());
890 898
891 { 899 {
892 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 900 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
893 compiler_timeline,
894 "BranchSimplifier")); 901 "BranchSimplifier"));
895 BranchSimplifier::Simplify(flow_graph); 902 BranchSimplifier::Simplify(flow_graph);
896 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 903 DEBUG_ASSERT(flow_graph->VerifyUseLists());
897 904
898 IfConverter::Simplify(flow_graph); 905 IfConverter::Simplify(flow_graph);
899 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 906 DEBUG_ASSERT(flow_graph->VerifyUseLists());
900 } 907 }
901 908
902 if (FLAG_constant_propagation) { 909 if (FLAG_constant_propagation) {
903 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 910 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
904 compiler_timeline,
905 "ConstantPropagation"); 911 "ConstantPropagation");
906 ConstantPropagator::Optimize(flow_graph)); 912 ConstantPropagator::Optimize(flow_graph));
907 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 913 DEBUG_ASSERT(flow_graph->VerifyUseLists());
908 // A canonicalization pass to remove e.g. smi checks on smi constants. 914 // A canonicalization pass to remove e.g. smi checks on smi constants.
909 flow_graph->Canonicalize(); 915 flow_graph->Canonicalize();
910 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 916 DEBUG_ASSERT(flow_graph->VerifyUseLists());
911 // Canonicalization introduced more opportunities for constant 917 // Canonicalization introduced more opportunities for constant
912 // propagation. 918 // propagation.
913 ConstantPropagator::Optimize(flow_graph); 919 ConstantPropagator::Optimize(flow_graph);
914 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 920 DEBUG_ASSERT(flow_graph->VerifyUseLists());
915 } 921 }
916 922
917 // Optimistically convert loop phis that have a single non-smi input 923 // Optimistically convert loop phis that have a single non-smi input
918 // coming from the loop pre-header into smi-phis. 924 // coming from the loop pre-header into smi-phis.
919 if (FLAG_loop_invariant_code_motion) { 925 if (FLAG_loop_invariant_code_motion) {
920 LICM licm(flow_graph); 926 LICM licm(flow_graph);
921 licm.OptimisticallySpecializeSmiPhis(); 927 licm.OptimisticallySpecializeSmiPhis();
922 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 928 DEBUG_ASSERT(flow_graph->VerifyUseLists());
923 } 929 }
924 930
925 // Propagate types and eliminate even more type tests. 931 // Propagate types and eliminate even more type tests.
926 // Recompute types after constant propagation to infer more precise 932 // Recompute types after constant propagation to infer more precise
927 // types for uses that were previously reached by now eliminated phis. 933 // types for uses that were previously reached by now eliminated phis.
928 FlowGraphTypePropagator::Propagate(flow_graph); 934 FlowGraphTypePropagator::Propagate(flow_graph);
929 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 935 DEBUG_ASSERT(flow_graph->VerifyUseLists());
930 936
931 { 937 {
932 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 938 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
933 compiler_timeline,
934 "SelectRepresentations")); 939 "SelectRepresentations"));
935 // Where beneficial convert Smi operations into Int32 operations. 940 // Where beneficial convert Smi operations into Int32 operations.
936 // Only meanigful for 32bit platforms right now. 941 // Only meanigful for 32bit platforms right now.
937 flow_graph->WidenSmiToInt32(); 942 flow_graph->WidenSmiToInt32();
938 943
939 // Unbox doubles. Performed after constant propagation to minimize 944 // Unbox doubles. Performed after constant propagation to minimize
940 // interference from phis merging double values and tagged 945 // interference from phis merging double values and tagged
941 // values coming from dead paths. 946 // values coming from dead paths.
942 flow_graph->SelectRepresentations(); 947 flow_graph->SelectRepresentations();
943 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 948 DEBUG_ASSERT(flow_graph->VerifyUseLists());
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 flow_graph->RemoveRedefinitions(); 981 flow_graph->RemoveRedefinitions();
977 } 982 }
978 983
979 // Optimize (a << b) & c patterns, merge operations. 984 // Optimize (a << b) & c patterns, merge operations.
980 // Run after CSE in order to have more opportunity to merge 985 // Run after CSE in order to have more opportunity to merge
981 // instructions that have same inputs. 986 // instructions that have same inputs.
982 flow_graph->TryOptimizePatterns(); 987 flow_graph->TryOptimizePatterns();
983 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 988 DEBUG_ASSERT(flow_graph->VerifyUseLists());
984 989
985 { 990 {
986 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 991 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
987 compiler_timeline,
988 "DeadStoreElimination")); 992 "DeadStoreElimination"));
989 DeadStoreElimination::Optimize(flow_graph); 993 DeadStoreElimination::Optimize(flow_graph);
990 } 994 }
991 995
992 if (FLAG_range_analysis) { 996 if (FLAG_range_analysis) {
993 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 997 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
994 compiler_timeline,
995 "RangeAnalysis")); 998 "RangeAnalysis"));
996 // Propagate types after store-load-forwarding. Some phis may have 999 // Propagate types after store-load-forwarding. Some phis may have
997 // become smi phis that can be processed by range analysis. 1000 // become smi phis that can be processed by range analysis.
998 FlowGraphTypePropagator::Propagate(flow_graph); 1001 FlowGraphTypePropagator::Propagate(flow_graph);
999 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 1002 DEBUG_ASSERT(flow_graph->VerifyUseLists());
1000 1003
1001 // We have to perform range analysis after LICM because it 1004 // We have to perform range analysis after LICM because it
1002 // optimistically moves CheckSmi through phis into loop preheaders 1005 // optimistically moves CheckSmi through phis into loop preheaders
1003 // making some phis smi. 1006 // making some phis smi.
1004 RangeAnalysis range_analysis(flow_graph); 1007 RangeAnalysis range_analysis(flow_graph);
(...skipping 23 matching lines...) Expand all
1028 // Optimize try-blocks. 1031 // Optimize try-blocks.
1029 TryCatchAnalyzer::Optimize(flow_graph); 1032 TryCatchAnalyzer::Optimize(flow_graph);
1030 } 1033 }
1031 1034
1032 // Detach environments from the instructions that can't deoptimize. 1035 // Detach environments from the instructions that can't deoptimize.
1033 // Do it before we attempt to perform allocation sinking to minimize 1036 // Do it before we attempt to perform allocation sinking to minimize
1034 // amount of materializations it has to perform. 1037 // amount of materializations it has to perform.
1035 flow_graph->EliminateEnvironments(); 1038 flow_graph->EliminateEnvironments();
1036 1039
1037 { 1040 {
1038 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 1041 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
1039 compiler_timeline,
1040 "EliminateDeadPhis")); 1042 "EliminateDeadPhis"));
1041 DeadCodeElimination::EliminateDeadPhis(flow_graph); 1043 DeadCodeElimination::EliminateDeadPhis(flow_graph);
1042 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 1044 DEBUG_ASSERT(flow_graph->VerifyUseLists());
1043 } 1045 }
1044 1046
1045 if (flow_graph->Canonicalize()) { 1047 if (flow_graph->Canonicalize()) {
1046 flow_graph->Canonicalize(); 1048 flow_graph->Canonicalize();
1047 } 1049 }
1048 1050
1049 // Attempt to sink allocations of temporary non-escaping objects to 1051 // Attempt to sink allocations of temporary non-escaping objects to
1050 // the deoptimization path. 1052 // the deoptimization path.
1051 AllocationSinking* sinking = NULL; 1053 AllocationSinking* sinking = NULL;
1052 if (FLAG_allocation_sinking && 1054 if (FLAG_allocation_sinking &&
1053 (flow_graph->graph_entry()->SuccessorCount() == 1)) { 1055 (flow_graph->graph_entry()->SuccessorCount() == 1)) {
1054 NOT_IN_PRODUCT(TimelineDurationScope tds2( 1056 NOT_IN_PRODUCT(TimelineDurationScope tds2(
1055 thread(), compiler_timeline, "AllocationSinking::Optimize")); 1057 thread(), compiler_timeline, "AllocationSinking::Optimize"));
1056 // TODO(fschneider): Support allocation sinking with try-catch. 1058 // TODO(fschneider): Support allocation sinking with try-catch.
1057 sinking = new AllocationSinking(flow_graph); 1059 sinking = new AllocationSinking(flow_graph);
1058 sinking->Optimize(); 1060 sinking->Optimize();
1059 } 1061 }
1060 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 1062 DEBUG_ASSERT(flow_graph->VerifyUseLists());
1061 1063
1062 DeadCodeElimination::EliminateDeadPhis(flow_graph); 1064 DeadCodeElimination::EliminateDeadPhis(flow_graph);
1063 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 1065 DEBUG_ASSERT(flow_graph->VerifyUseLists());
1064 1066
1065 FlowGraphTypePropagator::Propagate(flow_graph); 1067 FlowGraphTypePropagator::Propagate(flow_graph);
1066 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 1068 DEBUG_ASSERT(flow_graph->VerifyUseLists());
1067 1069
1068 { 1070 {
1069 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 1071 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
1070 compiler_timeline,
1071 "SelectRepresentations")); 1072 "SelectRepresentations"));
1072 // Ensure that all phis inserted by optimization passes have 1073 // Ensure that all phis inserted by optimization passes have
1073 // consistent representations. 1074 // consistent representations.
1074 flow_graph->SelectRepresentations(); 1075 flow_graph->SelectRepresentations();
1075 } 1076 }
1076 1077
1077 if (flow_graph->Canonicalize()) { 1078 if (flow_graph->Canonicalize()) {
1078 // To fully remove redundant boxing (e.g. BoxDouble used only in 1079 // To fully remove redundant boxing (e.g. BoxDouble used only in
1079 // environments and UnboxDouble instructions) instruction we 1080 // environments and UnboxDouble instructions) instruction we
1080 // first need to replace all their uses and then fold them away. 1081 // first need to replace all their uses and then fold them away.
(...skipping 12 matching lines...) Expand all
1093 // referenced only from environments. Register allocator will consider 1094 // referenced only from environments. Register allocator will consider
1094 // them as part of a deoptimization environment. 1095 // them as part of a deoptimization environment.
1095 sinking->DetachMaterializations(); 1096 sinking->DetachMaterializations();
1096 } 1097 }
1097 1098
1098 // Compute and store graph informations (call & instruction counts) 1099 // Compute and store graph informations (call & instruction counts)
1099 // to be later used by the inliner. 1100 // to be later used by the inliner.
1100 FlowGraphInliner::CollectGraphInfo(flow_graph, true); 1101 FlowGraphInliner::CollectGraphInfo(flow_graph, true);
1101 1102
1102 { 1103 {
1103 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), 1104 NOT_IN_PRODUCT(TimelineDurationScope tds2(thread(), compiler_timeline,
1104 compiler_timeline,
1105 "AllocateRegisters")); 1105 "AllocateRegisters"));
1106 // Perform register allocation on the SSA graph. 1106 // Perform register allocation on the SSA graph.
1107 FlowGraphAllocator allocator(*flow_graph); 1107 FlowGraphAllocator allocator(*flow_graph);
1108 allocator.AllocateRegisters(); 1108 allocator.AllocateRegisters();
1109 } 1109 }
1110 1110
1111 if (reorder_blocks) { 1111 if (reorder_blocks) {
1112 NOT_IN_PRODUCT(TimelineDurationScope tds( 1112 NOT_IN_PRODUCT(TimelineDurationScope tds(
1113 thread(), compiler_timeline, "BlockScheduler::ReorderBlocks")); 1113 thread(), compiler_timeline, "BlockScheduler::ReorderBlocks"));
1114 block_scheduler.ReorderBlocks(); 1114 block_scheduler.ReorderBlocks();
1115 } 1115 }
1116 1116
1117 if (print_flow_graph) { 1117 if (print_flow_graph) {
1118 FlowGraphPrinter::PrintGraph("After Optimizations", flow_graph); 1118 FlowGraphPrinter::PrintGraph("After Optimizations", flow_graph);
1119 } 1119 }
1120 } 1120 }
1121 1121
1122 ASSERT(inline_id_to_function.length() == caller_inline_id.length()); 1122 ASSERT(inline_id_to_function.length() == caller_inline_id.length());
1123 Assembler assembler(use_far_branches); 1123 Assembler assembler(use_far_branches);
1124 FlowGraphCompiler graph_compiler(&assembler, flow_graph, 1124 FlowGraphCompiler graph_compiler(
1125 *parsed_function(), optimized(), 1125 &assembler, flow_graph, *parsed_function(), optimized(),
1126 inline_id_to_function, 1126 inline_id_to_function, inline_id_to_token_pos, caller_inline_id);
1127 inline_id_to_token_pos,
1128 caller_inline_id);
1129 { 1127 {
1130 CSTAT_TIMER_SCOPE(thread(), graphcompiler_timer); 1128 CSTAT_TIMER_SCOPE(thread(), graphcompiler_timer);
1131 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), 1129 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), compiler_timeline,
1132 compiler_timeline,
1133 "CompileGraph")); 1130 "CompileGraph"));
1134 graph_compiler.CompileGraph(); 1131 graph_compiler.CompileGraph();
1135 pipeline->FinalizeCompilation(flow_graph); 1132 pipeline->FinalizeCompilation(flow_graph);
1136 } 1133 }
1137 { 1134 {
1138 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), 1135 NOT_IN_PRODUCT(TimelineDurationScope tds(thread(), compiler_timeline,
1139 compiler_timeline,
1140 "FinalizeCompilation")); 1136 "FinalizeCompilation"));
1141 if (thread()->IsMutatorThread()) { 1137 if (thread()->IsMutatorThread()) {
1142 FinalizeCompilation(&assembler, &graph_compiler, flow_graph); 1138 FinalizeCompilation(&assembler, &graph_compiler, flow_graph);
1143 } else { 1139 } else {
1144 // This part of compilation must be at a safepoint. 1140 // This part of compilation must be at a safepoint.
1145 // Stop mutator thread before creating the instruction object and 1141 // Stop mutator thread before creating the instruction object and
1146 // installing code. 1142 // installing code.
1147 // Mutator thread may not run code while we are creating the 1143 // Mutator thread may not run code while we are creating the
1148 // instruction object, since the creation of instruction object 1144 // instruction object, since the creation of instruction object
1149 // changes code page access permissions (makes them temporary not 1145 // changes code page access permissions (makes them temporary not
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1202 } 1198 }
1203 is_compiled = false; 1199 is_compiled = false;
1204 } 1200 }
1205 // Reset global isolate state. 1201 // Reset global isolate state.
1206 thread()->set_deopt_id(prev_deopt_id); 1202 thread()->set_deopt_id(prev_deopt_id);
1207 } 1203 }
1208 return is_compiled; 1204 return is_compiled;
1209 } 1205 }
1210 1206
1211 1207
1212 DEBUG_ONLY( 1208 #if defined(DEBUG)
1213 // Verifies that the inliner is always in the list of inlined functions. 1209 // Verifies that the inliner is always in the list of inlined functions.
1214 // If this fails run with --trace-inlining-intervals to get more information. 1210 // If this fails run with --trace-inlining-intervals to get more information.
1215 static void CheckInliningIntervals(const Function& function) { 1211 static void CheckInliningIntervals(const Function& function) {
1216 const Code& code = Code::Handle(function.CurrentCode()); 1212 const Code& code = Code::Handle(function.CurrentCode());
1217 const Array& intervals = Array::Handle(code.GetInlinedIntervals()); 1213 const Array& intervals = Array::Handle(code.GetInlinedIntervals());
1218 if (intervals.IsNull() || (intervals.Length() == 0)) return; 1214 if (intervals.IsNull() || (intervals.Length() == 0)) return;
1219 Smi& start = Smi::Handle(); 1215 Smi& start = Smi::Handle();
1220 GrowableArray<Function*> inlined_functions; 1216 GrowableArray<Function*> inlined_functions;
1221 for (intptr_t i = 0; i < intervals.Length(); i += Code::kInlIntNumEntries) { 1217 for (intptr_t i = 0; i < intervals.Length(); i += Code::kInlIntNumEntries) {
1222 start ^= intervals.At(i + Code::kInlIntStart); 1218 start ^= intervals.At(i + Code::kInlIntStart);
1223 ASSERT(!start.IsNull()); 1219 ASSERT(!start.IsNull());
1224 if (start.IsNull()) continue; 1220 if (start.IsNull()) continue;
1225 code.GetInlinedFunctionsAt(start.Value(), &inlined_functions); 1221 code.GetInlinedFunctionsAt(start.Value(), &inlined_functions);
1226 ASSERT(inlined_functions[inlined_functions.length() - 1]->raw() == 1222 ASSERT(inlined_functions[inlined_functions.length() - 1]->raw() ==
1227 function.raw()); 1223 function.raw());
1228 } 1224 }
1229 } 1225 }
1230 ) 1226 #endif // defined(DEBUG)
1231 1227
1232 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline, 1228 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline,
1233 const Function& function, 1229 const Function& function,
1234 bool optimized, 1230 bool optimized,
1235 intptr_t osr_id) { 1231 intptr_t osr_id) {
1236 ASSERT(!FLAG_precompiled_mode); 1232 ASSERT(!FLAG_precompiled_mode);
1237 ASSERT(!optimized || function.was_compiled()); 1233 ASSERT(!optimized || function.was_compiled());
1238 LongJumpScope jump; 1234 LongJumpScope jump;
1239 if (setjmp(*jump.Set()) == 0) { 1235 if (setjmp(*jump.Set()) == 0) {
1240 Thread* const thread = Thread::Current(); 1236 Thread* const thread = Thread::Current();
1241 Isolate* const isolate = thread->isolate(); 1237 Isolate* const isolate = thread->isolate();
1242 StackZone stack_zone(thread); 1238 StackZone stack_zone(thread);
1243 Zone* const zone = stack_zone.GetZone(); 1239 Zone* const zone = stack_zone.GetZone();
1244 const bool trace_compiler = 1240 const bool trace_compiler =
1245 FLAG_trace_compiler || 1241 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
1246 (FLAG_trace_optimizing_compiler && optimized);
1247 Timer per_compile_timer(trace_compiler, "Compilation time"); 1242 Timer per_compile_timer(trace_compiler, "Compilation time");
1248 per_compile_timer.Start(); 1243 per_compile_timer.Start();
1249 1244
1250 ParsedFunction* parsed_function = new(zone) ParsedFunction( 1245 ParsedFunction* parsed_function = new (zone)
1251 thread, Function::ZoneHandle(zone, function.raw())); 1246 ParsedFunction(thread, Function::ZoneHandle(zone, function.raw()));
1252 if (trace_compiler) { 1247 if (trace_compiler) {
1253 const intptr_t token_size = function.end_token_pos().Pos() - 1248 const intptr_t token_size =
1254 function.token_pos().Pos(); 1249 function.end_token_pos().Pos() - function.token_pos().Pos();
1255 THR_Print("Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n", 1250 THR_Print("Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
1256 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "), 1251 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "),
1257 (optimized ? "optimized " : ""), 1252 (optimized ? "optimized " : ""),
1258 (Compiler::IsBackgroundCompilation() ? "(background)" : ""), 1253 (Compiler::IsBackgroundCompilation() ? "(background)" : ""),
1259 function.ToFullyQualifiedCString(), 1254 function.ToFullyQualifiedCString(),
1260 function.token_pos().ToCString(), 1255 function.token_pos().ToCString(), token_size);
1261 token_size);
1262 } 1256 }
1263 INC_STAT(thread, num_functions_compiled, 1); 1257 INC_STAT(thread, num_functions_compiled, 1);
1264 if (optimized) { 1258 if (optimized) {
1265 INC_STAT(thread, num_functions_optimized, 1); 1259 INC_STAT(thread, num_functions_optimized, 1);
1266 } 1260 }
1267 // Makes sure no classes are loaded during parsing in background. 1261 // Makes sure no classes are loaded during parsing in background.
1268 const intptr_t loading_invalidation_gen_at_start = 1262 const intptr_t loading_invalidation_gen_at_start =
1269 isolate->loading_invalidation_gen(); 1263 isolate->loading_invalidation_gen();
1270 { 1264 {
1271 HANDLESCOPE(thread); 1265 HANDLESCOPE(thread);
1272 const int64_t num_tokens_before = STAT_VALUE(thread, num_tokens_consumed); 1266 const int64_t num_tokens_before = STAT_VALUE(thread, num_tokens_consumed);
1273 pipeline->ParseFunction(parsed_function); 1267 pipeline->ParseFunction(parsed_function);
1274 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed); 1268 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed);
1275 INC_STAT(thread, 1269 INC_STAT(thread, num_func_tokens_compiled,
1276 num_func_tokens_compiled,
1277 num_tokens_after - num_tokens_before); 1270 num_tokens_after - num_tokens_before);
1278 } 1271 }
1279 1272
1280 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id); 1273 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id);
1281 1274
1282 if (Compiler::IsBackgroundCompilation()) { 1275 if (Compiler::IsBackgroundCompilation()) {
1283 if (isolate->IsTopLevelParsing() || 1276 if (isolate->IsTopLevelParsing() ||
1284 (loading_invalidation_gen_at_start != 1277 (loading_invalidation_gen_at_start !=
1285 isolate->loading_invalidation_gen())) { 1278 isolate->loading_invalidation_gen())) {
1286 // Loading occured while parsing. We need to abort here because state 1279 // Loading occured while parsing. We need to abort here because state
1287 // changed while compiling. 1280 // changed while compiling.
1288 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, 1281 Compiler::AbortBackgroundCompilation(
1282 Thread::kNoDeoptId,
1289 "Invalidated state during parsing because of script loading"); 1283 "Invalidated state during parsing because of script loading");
1290 } 1284 }
1291 } 1285 }
1292 1286
1293 const bool success = helper.Compile(pipeline); 1287 const bool success = helper.Compile(pipeline);
1294 if (success) { 1288 if (success) {
1295 if (!optimized) { 1289 if (!optimized) {
1296 function.set_was_compiled(true); 1290 function.set_was_compiled(true);
1297 } 1291 }
1298 } else { 1292 } else {
1299 if (optimized) { 1293 if (optimized) {
1300 if (Compiler::IsBackgroundCompilation()) { 1294 if (Compiler::IsBackgroundCompilation()) {
1301 // Try again later, background compilation may abort because of 1295 // Try again later, background compilation may abort because of
1302 // state change during compilation. 1296 // state change during compilation.
1303 if (FLAG_trace_compiler) { 1297 if (FLAG_trace_compiler) {
1304 THR_Print("Aborted background compilation: %s\n", 1298 THR_Print("Aborted background compilation: %s\n",
1305 function.ToFullyQualifiedCString()); 1299 function.ToFullyQualifiedCString());
1306 } 1300 }
1307 { 1301 {
1308 // If it was a bailout, then disable optimization. 1302 // If it was a bailout, then disable optimization.
1309 Error& error = Error::Handle(); 1303 Error& error = Error::Handle();
1310 // We got an error during compilation. 1304 // We got an error during compilation.
1311 error = thread->sticky_error(); 1305 error = thread->sticky_error();
1312 thread->clear_sticky_error(); 1306 thread->clear_sticky_error();
1313 if ((error.IsLanguageError() && 1307 if ((error.IsLanguageError() &&
1314 LanguageError::Cast(error).kind() == Report::kBailout) || 1308 LanguageError::Cast(error).kind() == Report::kBailout) ||
1315 error.IsUnhandledException()) { 1309 error.IsUnhandledException()) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1357 Code::Handle(function.CurrentCode()).Size(), 1351 Code::Handle(function.CurrentCode()).Size(),
1358 per_compile_timer.TotalElapsedTime()); 1352 per_compile_timer.TotalElapsedTime());
1359 } 1353 }
1360 1354
1361 if (FLAG_support_debugger) { 1355 if (FLAG_support_debugger) {
1362 isolate->debugger()->NotifyCompilation(function); 1356 isolate->debugger()->NotifyCompilation(function);
1363 } 1357 }
1364 1358
1365 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) { 1359 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
1366 Disassembler::DisassembleCode(function, optimized); 1360 Disassembler::DisassembleCode(function, optimized);
1367 } else if (FLAG_disassemble_optimized && 1361 } else if (FLAG_disassemble_optimized && optimized &&
1368 optimized &&
1369 FlowGraphPrinter::ShouldPrint(function)) { 1362 FlowGraphPrinter::ShouldPrint(function)) {
1370 Disassembler::DisassembleCode(function, true); 1363 Disassembler::DisassembleCode(function, true);
1371 } 1364 }
1372 1365
1373 DEBUG_ONLY(CheckInliningIntervals(function)); 1366 DEBUG_ONLY(CheckInliningIntervals(function));
1374 return Error::null(); 1367 return Error::null();
1375 } else { 1368 } else {
1376 Thread* const thread = Thread::Current(); 1369 Thread* const thread = Thread::Current();
1377 StackZone stack_zone(thread); 1370 StackZone stack_zone(thread);
1378 Error& error = Error::Handle(); 1371 Error& error = Error::Handle();
1379 // We got an error during compilation or it is a bailout from background 1372 // We got an error during compilation or it is a bailout from background
1380 // compilation (e.g., during parsing with EnsureIsFinalized). 1373 // compilation (e.g., during parsing with EnsureIsFinalized).
1381 error = thread->sticky_error(); 1374 error = thread->sticky_error();
1382 thread->clear_sticky_error(); 1375 thread->clear_sticky_error();
1383 if (error.raw() == Object::background_compilation_error().raw()) { 1376 if (error.raw() == Object::background_compilation_error().raw()) {
1384 // Exit compilation, retry it later. 1377 // Exit compilation, retry it later.
1385 if (FLAG_trace_bailout) { 1378 if (FLAG_trace_bailout) {
1386 THR_Print("Aborted background compilation: %s\n", 1379 THR_Print("Aborted background compilation: %s\n",
1387 function.ToFullyQualifiedCString()); 1380 function.ToFullyQualifiedCString());
1388 } 1381 }
1389 return Error::null(); 1382 return Error::null();
1390 } 1383 }
1391 // Do not attempt to optimize functions that can cause errors. 1384 // Do not attempt to optimize functions that can cause errors.
1392 function.set_is_optimizable(false); 1385 function.set_is_optimizable(false);
1393 return error.raw(); 1386 return error.raw();
1394 } 1387 }
1395 UNREACHABLE(); 1388 UNREACHABLE();
1396 return Error::null(); 1389 return Error::null();
1397 } 1390 }
1398 1391
1399 1392
1400 static RawError* ParseFunctionHelper(CompilationPipeline* pipeline, 1393 static RawError* ParseFunctionHelper(CompilationPipeline* pipeline,
1401 const Function& function, 1394 const Function& function,
1402 bool optimized, 1395 bool optimized,
1403 intptr_t osr_id) { 1396 intptr_t osr_id) {
1404 ASSERT(!FLAG_precompiled_mode); 1397 ASSERT(!FLAG_precompiled_mode);
1405 ASSERT(!optimized || function.was_compiled()); 1398 ASSERT(!optimized || function.was_compiled());
1406 LongJumpScope jump; 1399 LongJumpScope jump;
1407 if (setjmp(*jump.Set()) == 0) { 1400 if (setjmp(*jump.Set()) == 0) {
1408 Thread* const thread = Thread::Current(); 1401 Thread* const thread = Thread::Current();
1409 StackZone stack_zone(thread); 1402 StackZone stack_zone(thread);
1410 Zone* const zone = stack_zone.GetZone(); 1403 Zone* const zone = stack_zone.GetZone();
1411 const bool trace_compiler = 1404 const bool trace_compiler =
1412 FLAG_trace_compiler || 1405 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
1413 (FLAG_trace_optimizing_compiler && optimized);
1414 1406
1415 if (trace_compiler) { 1407 if (trace_compiler) {
1416 const intptr_t token_size = function.end_token_pos().Pos() - 1408 const intptr_t token_size =
1417 function.token_pos().Pos(); 1409 function.end_token_pos().Pos() - function.token_pos().Pos();
1418 THR_Print("Parsing %s%sfunction %s: '%s' @ token %s, size %" Pd "\n", 1410 THR_Print("Parsing %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
1419 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "), 1411 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "),
1420 (optimized ? "optimized " : ""), 1412 (optimized ? "optimized " : ""),
1421 (Compiler::IsBackgroundCompilation() ? "(background)" : ""), 1413 (Compiler::IsBackgroundCompilation() ? "(background)" : ""),
1422 function.ToFullyQualifiedCString(), 1414 function.ToFullyQualifiedCString(),
1423 function.token_pos().ToCString(), 1415 function.token_pos().ToCString(), token_size);
1424 token_size);
1425 } 1416 }
1426 ParsedFunction* parsed_function = new(zone) ParsedFunction( 1417 ParsedFunction* parsed_function = new (zone)
1427 thread, Function::ZoneHandle(zone, function.raw())); 1418 ParsedFunction(thread, Function::ZoneHandle(zone, function.raw()));
1428 pipeline->ParseFunction(parsed_function); 1419 pipeline->ParseFunction(parsed_function);
1429 // For now we just walk thru the AST nodes and in DEBUG mode we print 1420 // For now we just walk thru the AST nodes and in DEBUG mode we print
1430 // them otherwise just skip through them, this will be need to be 1421 // them otherwise just skip through them, this will be need to be
1431 // wired to generate the IR format. 1422 // wired to generate the IR format.
1432 #if !defined(PRODUCT) 1423 #if !defined(PRODUCT)
1433 #if defined(DEBUG) 1424 #if defined(DEBUG)
1434 AstPrinter ast_printer(true); 1425 AstPrinter ast_printer(true);
1435 #else 1426 #else
1436 AstPrinter ast_printer(false); 1427 AstPrinter ast_printer(false);
1437 #endif // defined(DEBUG). 1428 #endif // defined(DEBUG).
1438 ast_printer.PrintFunctionNodes(*parsed_function); 1429 ast_printer.PrintFunctionNodes(*parsed_function);
1439 #endif // !defined(PRODUCT). 1430 #endif // !defined(PRODUCT).
1440 return Error::null(); 1431 return Error::null();
1441 } else { 1432 } else {
1442 Thread* const thread = Thread::Current(); 1433 Thread* const thread = Thread::Current();
1443 StackZone stack_zone(thread); 1434 StackZone stack_zone(thread);
1444 Error& error = Error::Handle(); 1435 Error& error = Error::Handle();
1445 // We got an error during compilation or it is a bailout from background 1436 // We got an error during compilation or it is a bailout from background
1446 // compilation (e.g., during parsing with EnsureIsFinalized). 1437 // compilation (e.g., during parsing with EnsureIsFinalized).
1447 error = thread->sticky_error(); 1438 error = thread->sticky_error();
1448 thread->clear_sticky_error(); 1439 thread->clear_sticky_error();
1449 // Unoptimized compilation or precompilation may encounter compile-time 1440 // Unoptimized compilation or precompilation may encounter compile-time
1450 // errors, but regular optimized compilation should not. 1441 // errors, but regular optimized compilation should not.
1451 ASSERT(!optimized); 1442 ASSERT(!optimized);
1452 return error.raw(); 1443 return error.raw();
1453 } 1444 }
1454 UNREACHABLE(); 1445 UNREACHABLE();
1455 return Error::null(); 1446 return Error::null();
1456 } 1447 }
1457 1448
1458 1449
1459 RawError* Compiler::CompileFunction(Thread* thread, 1450 RawError* Compiler::CompileFunction(Thread* thread, const Function& function) {
1460 const Function& function) {
1461 #ifdef DART_PRECOMPILER 1451 #ifdef DART_PRECOMPILER
1462 if (FLAG_precompiled_mode) { 1452 if (FLAG_precompiled_mode) {
1463 return Precompiler::CompileFunction( 1453 return Precompiler::CompileFunction(
1464 /* precompiler = */ NULL, thread, thread->zone(), function); 1454 /* precompiler = */ NULL, thread, thread->zone(), function);
1465 } 1455 }
1466 #endif 1456 #endif
1457
1467 Isolate* isolate = thread->isolate(); 1458 Isolate* isolate = thread->isolate();
1468 NOT_IN_PRODUCT( 1459
1460 #if !defined(PRODUCT)
1469 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); 1461 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
1470 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "CompileFunction", function); 1462 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "CompileFunction", function);
1471 ) // !PRODUCT 1463 #endif // !defined(PRODUCT)
1472 1464
1473 if (!isolate->compilation_allowed()) { 1465 if (!isolate->compilation_allowed()) {
1474 FATAL3("Precompilation missed function %s (%s, %s)\n", 1466 FATAL3("Precompilation missed function %s (%s, %s)\n",
1475 function.ToLibNamePrefixedQualifiedCString(), 1467 function.ToLibNamePrefixedQualifiedCString(),
1476 function.token_pos().ToCString(), 1468 function.token_pos().ToCString(),
1477 Function::KindToCString(function.kind())); 1469 Function::KindToCString(function.kind()));
1478 } 1470 }
1479 1471
1480 CompilationPipeline* pipeline = 1472 CompilationPipeline* pipeline =
1481 CompilationPipeline::New(thread->zone(), function); 1473 CompilationPipeline::New(thread->zone(), function);
1482 1474
1483 return CompileFunctionHelper(pipeline, 1475 return CompileFunctionHelper(pipeline, function,
1484 function, 1476 /* optimized = */ false, kNoOSRDeoptId);
1485 /* optimized = */ false,
1486 kNoOSRDeoptId);
1487 } 1477 }
1488 1478
1489 1479
1490 RawError* Compiler::ParseFunction(Thread* thread, 1480 RawError* Compiler::ParseFunction(Thread* thread, const Function& function) {
1491 const Function& function) {
1492 Isolate* isolate = thread->isolate(); 1481 Isolate* isolate = thread->isolate();
1493 NOT_IN_PRODUCT( 1482 #if !defined(PRODUCT)
1494 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); 1483 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
1495 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "ParseFunction", function); 1484 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "ParseFunction", function);
1496 ) // !PRODUCT 1485 #endif // !defined(PRODUCT)
1497 1486
1498 if (!isolate->compilation_allowed()) { 1487 if (!isolate->compilation_allowed()) {
1499 FATAL3("Precompilation missed function %s (%s, %s)\n", 1488 FATAL3("Precompilation missed function %s (%s, %s)\n",
1500 function.ToLibNamePrefixedQualifiedCString(), 1489 function.ToLibNamePrefixedQualifiedCString(),
1501 function.token_pos().ToCString(), 1490 function.token_pos().ToCString(),
1502 Function::KindToCString(function.kind())); 1491 Function::KindToCString(function.kind()));
1503 } 1492 }
1504 1493
1505 CompilationPipeline* pipeline = 1494 CompilationPipeline* pipeline =
1506 CompilationPipeline::New(thread->zone(), function); 1495 CompilationPipeline::New(thread->zone(), function);
1507 1496
1508 return ParseFunctionHelper(pipeline, 1497 return ParseFunctionHelper(pipeline, function,
1509 function, 1498 /* optimized = */ false, kNoOSRDeoptId);
1510 /* optimized = */ false,
1511 kNoOSRDeoptId);
1512 } 1499 }
1513 1500
1514 1501
1515 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread, 1502 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread,
1516 const Function& function) { 1503 const Function& function) {
1517 if (function.unoptimized_code() != Object::null()) { 1504 if (function.unoptimized_code() != Object::null()) {
1518 return Error::null(); 1505 return Error::null();
1519 } 1506 }
1520 Code& original_code = Code::ZoneHandle(thread->zone()); 1507 Code& original_code = Code::ZoneHandle(thread->zone());
1521 if (function.HasCode()) { 1508 if (function.HasCode()) {
1522 original_code = function.CurrentCode(); 1509 original_code = function.CurrentCode();
1523 } 1510 }
1524 CompilationPipeline* pipeline = 1511 CompilationPipeline* pipeline =
1525 CompilationPipeline::New(thread->zone(), function); 1512 CompilationPipeline::New(thread->zone(), function);
1526 const Error& error = Error::Handle( 1513 const Error& error = Error::Handle(
1527 CompileFunctionHelper(pipeline, 1514 CompileFunctionHelper(pipeline, function, false, /* not optimized */
1528 function,
1529 false, /* not optimized */
1530 kNoOSRDeoptId)); 1515 kNoOSRDeoptId));
1531 if (!error.IsNull()) { 1516 if (!error.IsNull()) {
1532 return error.raw(); 1517 return error.raw();
1533 } 1518 }
1534 // Since CompileFunctionHelper replaces the current code, re-attach the 1519 // Since CompileFunctionHelper replaces the current code, re-attach the
1535 // the original code if the function was already compiled. 1520 // the original code if the function was already compiled.
1536 if (!original_code.IsNull() && 1521 if (!original_code.IsNull() &&
1537 (original_code.raw() != function.CurrentCode())) { 1522 (original_code.raw() != function.CurrentCode())) {
1538 function.AttachCode(original_code); 1523 function.AttachCode(original_code);
1539 } 1524 }
1540 ASSERT(function.unoptimized_code() != Object::null()); 1525 ASSERT(function.unoptimized_code() != Object::null());
1541 if (FLAG_trace_compiler) { 1526 if (FLAG_trace_compiler) {
1542 THR_Print("Ensure unoptimized code for %s\n", function.ToCString()); 1527 THR_Print("Ensure unoptimized code for %s\n", function.ToCString());
1543 } 1528 }
1544 return Error::null(); 1529 return Error::null();
1545 } 1530 }
1546 1531
1547 1532
1548 RawError* Compiler::CompileOptimizedFunction(Thread* thread, 1533 RawError* Compiler::CompileOptimizedFunction(Thread* thread,
1549 const Function& function, 1534 const Function& function,
1550 intptr_t osr_id) { 1535 intptr_t osr_id) {
1551 NOT_IN_PRODUCT( 1536 #if !defined(PRODUCT)
1552 VMTagScope tagScope(thread, VMTag::kCompileOptimizedTagId); 1537 VMTagScope tagScope(thread, VMTag::kCompileOptimizedTagId);
1553 const char* event_name; 1538 const char* event_name;
1554 if (osr_id != kNoOSRDeoptId) { 1539 if (osr_id != kNoOSRDeoptId) {
1555 event_name = "CompileFunctionOptimizedOSR"; 1540 event_name = "CompileFunctionOptimizedOSR";
1556 } else if (IsBackgroundCompilation()) { 1541 } else if (IsBackgroundCompilation()) {
1557 event_name = "CompileFunctionOptimizedBackground"; 1542 event_name = "CompileFunctionOptimizedBackground";
1558 } else { 1543 } else {
1559 event_name = "CompileFunctionOptimized"; 1544 event_name = "CompileFunctionOptimized";
1560 } 1545 }
1561 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, event_name, function); 1546 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, event_name, function);
1562 ) // !PRODUCT 1547 #endif // !defined(PRODUCT)
1563 1548
1564 // If we are in the optimizing in the mutator/Dart thread, then 1549 // If we are in the optimizing in the mutator/Dart thread, then
1565 // this is either an OSR compilation or background compilation is 1550 // this is either an OSR compilation or background compilation is
1566 // not currently allowed. 1551 // not currently allowed.
1567 ASSERT(!thread->IsMutatorThread() || 1552 ASSERT(!thread->IsMutatorThread() || (osr_id != kNoOSRDeoptId) ||
1568 (osr_id != kNoOSRDeoptId) ||
1569 !FLAG_background_compilation || BackgroundCompiler::IsDisabled()); 1553 !FLAG_background_compilation || BackgroundCompiler::IsDisabled());
1570 CompilationPipeline* pipeline = 1554 CompilationPipeline* pipeline =
1571 CompilationPipeline::New(thread->zone(), function); 1555 CompilationPipeline::New(thread->zone(), function);
1572 return CompileFunctionHelper(pipeline, 1556 return CompileFunctionHelper(pipeline, function, true, /* optimized */
1573 function,
1574 true, /* optimized */
1575 osr_id); 1557 osr_id);
1576 } 1558 }
1577 1559
1578 1560
1579 // This is only used from unit tests. 1561 // This is only used from unit tests.
1580 RawError* Compiler::CompileParsedFunction( 1562 RawError* Compiler::CompileParsedFunction(ParsedFunction* parsed_function) {
1581 ParsedFunction* parsed_function) {
1582 LongJumpScope jump; 1563 LongJumpScope jump;
1583 if (setjmp(*jump.Set()) == 0) { 1564 if (setjmp(*jump.Set()) == 0) {
1584 // Non-optimized code generator. 1565 // Non-optimized code generator.
1585 DartCompilationPipeline pipeline; 1566 DartCompilationPipeline pipeline;
1586 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); 1567 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1587 helper.Compile(&pipeline); 1568 helper.Compile(&pipeline);
1588 if (FLAG_disassemble) { 1569 if (FLAG_disassemble) {
1589 Disassembler::DisassembleCode(parsed_function->function(), false); 1570 Disassembler::DisassembleCode(parsed_function->function(), false);
1590 } 1571 }
1591 return Error::null(); 1572 return Error::null();
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1636 // Class dynamic lives in the vm isolate. Its array fields cannot be set to 1617 // Class dynamic lives in the vm isolate. Its array fields cannot be set to
1637 // an empty array. 1618 // an empty array.
1638 if (functions.IsNull()) { 1619 if (functions.IsNull()) {
1639 ASSERT(cls.IsDynamicClass()); 1620 ASSERT(cls.IsDynamicClass());
1640 return error.raw(); 1621 return error.raw();
1641 } 1622 }
1642 // Compile all the regular functions. 1623 // Compile all the regular functions.
1643 for (int i = 0; i < functions.Length(); i++) { 1624 for (int i = 0; i < functions.Length(); i++) {
1644 func ^= functions.At(i); 1625 func ^= functions.At(i);
1645 ASSERT(!func.IsNull()); 1626 ASSERT(!func.IsNull());
1646 if (!func.HasCode() && 1627 if (!func.HasCode() && !func.is_abstract() &&
1647 !func.is_abstract() &&
1648 !func.IsRedirectingFactory()) { 1628 !func.IsRedirectingFactory()) {
1649 if ((cls.is_mixin_app_alias() || cls.IsMixinApplication()) && 1629 if ((cls.is_mixin_app_alias() || cls.IsMixinApplication()) &&
1650 func.HasOptionalParameters()) { 1630 func.HasOptionalParameters()) {
1651 // Skipping optional parameters in mixin application. 1631 // Skipping optional parameters in mixin application.
1652 continue; 1632 continue;
1653 } 1633 }
1654 error = CompileFunction(thread, func); 1634 error = CompileFunction(thread, func);
1655 if (!error.IsNull()) { 1635 if (!error.IsNull()) {
1656 return error.raw(); 1636 return error.raw();
1657 } 1637 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1710 LongJumpScope jump; 1690 LongJumpScope jump;
1711 if (setjmp(*jump.Set()) == 0) { 1691 if (setjmp(*jump.Set()) == 0) {
1712 Thread* const thread = Thread::Current(); 1692 Thread* const thread = Thread::Current();
1713 NoOOBMessageScope no_msg_scope(thread); 1693 NoOOBMessageScope no_msg_scope(thread);
1714 NoReloadScope no_reload_scope(thread->isolate(), thread); 1694 NoReloadScope no_reload_scope(thread->isolate(), thread);
1715 // Under lazy compilation initializer has not yet been created, so create 1695 // Under lazy compilation initializer has not yet been created, so create
1716 // it now, but don't bother remembering it because it won't be used again. 1696 // it now, but don't bother remembering it because it won't be used again.
1717 ASSERT(!field.HasPrecompiledInitializer()); 1697 ASSERT(!field.HasPrecompiledInitializer());
1718 Function& initializer = Function::Handle(thread->zone()); 1698 Function& initializer = Function::Handle(thread->zone());
1719 { 1699 {
1720 NOT_IN_PRODUCT( 1700 #if !defined(PRODUCT)
1721 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); 1701 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
1722 TimelineDurationScope tds(thread, Timeline::GetCompilerStream(), 1702 TimelineDurationScope tds(thread, Timeline::GetCompilerStream(),
1723 "CompileStaticInitializer"); 1703 "CompileStaticInitializer");
1724 if (tds.enabled()) { 1704 if (tds.enabled()) {
1725 tds.SetNumArguments(1); 1705 tds.SetNumArguments(1);
1726 tds.CopyArgument(0, "field", field.ToCString()); 1706 tds.CopyArgument(0, "field", field.ToCString());
1727 } 1707 }
1728 ) 1708 #endif // !defined(PRODUCT)
1729 1709
1730 StackZone stack_zone(thread); 1710 StackZone stack_zone(thread);
1731 Zone* zone = stack_zone.GetZone(); 1711 Zone* zone = stack_zone.GetZone();
1732 ParsedFunction* parsed_function; 1712 ParsedFunction* parsed_function;
1733 1713
1734 // Create a one-time-use function to evaluate the initializer and invoke 1714 // Create a one-time-use function to evaluate the initializer and invoke
1735 // it immediately. 1715 // it immediately.
1736 if (field.kernel_field() != NULL) { 1716 if (field.kernel_field() != NULL) {
1737 // kImplicitStaticFinalGetter is used for both implicit static getters 1717 // kImplicitStaticFinalGetter is used for both implicit static getters
1738 // and static initializers. The Kernel graph builder will tell the 1718 // and static initializers. The Kernel graph builder will tell the
1739 // difference by pattern matching on the name. 1719 // difference by pattern matching on the name.
1740 const String& name = String::Handle(zone, 1720 const String& name = String::Handle(
1741 Symbols::FromConcat(thread, 1721 zone, Symbols::FromConcat(thread, Symbols::InitPrefix(),
1742 Symbols::InitPrefix(), String::Handle(zone, field.name()))); 1722 String::Handle(zone, field.name())));
1743 const Script& script = Script::Handle(zone, field.Script()); 1723 const Script& script = Script::Handle(zone, field.Script());
1744 Object& owner = Object::Handle(zone, field.Owner()); 1724 Object& owner = Object::Handle(zone, field.Owner());
1745 owner = PatchClass::New(Class::Cast(owner), script); 1725 owner = PatchClass::New(Class::Cast(owner), script);
1746 const Function& function = Function::ZoneHandle(zone, 1726 const Function& function = Function::ZoneHandle(
1747 Function::New(name, 1727 zone, Function::New(name, RawFunction::kImplicitStaticFinalGetter,
1748 RawFunction::kImplicitStaticFinalGetter, 1728 true, // is_static
1749 true, // is_static 1729 false, // is_const
1750 false, // is_const 1730 false, // is_abstract
1751 false, // is_abstract 1731 false, // is_external
1752 false, // is_external 1732 false, // is_native
1753 false, // is_native 1733 owner, TokenPosition::kNoSource));
1754 owner,
1755 TokenPosition::kNoSource));
1756 function.set_kernel_function(field.kernel_field()); 1734 function.set_kernel_function(field.kernel_field());
1757 function.set_result_type(AbstractType::Handle(zone, field.type())); 1735 function.set_result_type(AbstractType::Handle(zone, field.type()));
1758 function.set_is_reflectable(false); 1736 function.set_is_reflectable(false);
1759 function.set_is_debuggable(false); 1737 function.set_is_debuggable(false);
1760 function.set_is_inlinable(false); 1738 function.set_is_inlinable(false);
1761 parsed_function = new(zone) ParsedFunction(thread, function); 1739 parsed_function = new (zone) ParsedFunction(thread, function);
1762 } else { 1740 } else {
1763 parsed_function = Parser::ParseStaticFieldInitializer(field); 1741 parsed_function = Parser::ParseStaticFieldInitializer(field);
1764 parsed_function->AllocateVariables(); 1742 parsed_function->AllocateVariables();
1765 } 1743 }
1766 1744
1767 // Non-optimized code generator. 1745 // Non-optimized code generator.
1768 DartCompilationPipeline pipeline; 1746 DartCompilationPipeline pipeline;
1769 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); 1747 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1770 helper.Compile(&pipeline); 1748 helper.Compile(&pipeline);
1771 initializer = parsed_function->function().raw(); 1749 initializer = parsed_function->function().raw();
1772 Code::Handle(initializer.unoptimized_code()).set_var_descriptors( 1750 Code::Handle(initializer.unoptimized_code())
1773 Object::empty_var_descriptors()); 1751 .set_var_descriptors(Object::empty_var_descriptors());
1774 } 1752 }
1775 // Invoke the function to evaluate the expression. 1753 // Invoke the function to evaluate the expression.
1776 return DartEntry::InvokeFunction(initializer, Object::empty_array()); 1754 return DartEntry::InvokeFunction(initializer, Object::empty_array());
1777 } else { 1755 } else {
1778 Thread* const thread = Thread::Current(); 1756 Thread* const thread = Thread::Current();
1779 StackZone zone(thread); 1757 StackZone zone(thread);
1780 const Error& error = Error::Handle(thread->zone(), thread->sticky_error()); 1758 const Error& error = Error::Handle(thread->zone(), thread->sticky_error());
1781 thread->clear_sticky_error(); 1759 thread->clear_sticky_error();
1782 return error.raw(); 1760 return error.raw();
1783 } 1761 }
1784 UNREACHABLE(); 1762 UNREACHABLE();
1785 return Object::null(); 1763 return Object::null();
1786 } 1764 }
1787 1765
1788 1766
1789
1790 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { 1767 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) {
1791 #ifdef DART_PRECOMPILER 1768 #ifdef DART_PRECOMPILER
1792 if (FLAG_precompiled_mode) { 1769 if (FLAG_precompiled_mode) {
1793 return Precompiler::ExecuteOnce(fragment); 1770 return Precompiler::ExecuteOnce(fragment);
1794 } 1771 }
1795 #endif 1772 #endif
1796 LongJumpScope jump; 1773 LongJumpScope jump;
1797 if (setjmp(*jump.Set()) == 0) { 1774 if (setjmp(*jump.Set()) == 0) {
1798 Thread* const thread = Thread::Current(); 1775 Thread* const thread = Thread::Current();
1799 1776
(...skipping 12 matching lines...) Expand all
1812 } 1789 }
1813 } 1790 }
1814 1791
1815 // Create a dummy function object for the code generator. 1792 // Create a dummy function object for the code generator.
1816 // The function needs to be associated with a named Class: the interface 1793 // The function needs to be associated with a named Class: the interface
1817 // Function fits the bill. 1794 // Function fits the bill.
1818 const char* kEvalConst = "eval_const"; 1795 const char* kEvalConst = "eval_const";
1819 const Function& func = Function::ZoneHandle(Function::New( 1796 const Function& func = Function::ZoneHandle(Function::New(
1820 String::Handle(Symbols::New(thread, kEvalConst)), 1797 String::Handle(Symbols::New(thread, kEvalConst)),
1821 RawFunction::kRegularFunction, 1798 RawFunction::kRegularFunction,
1822 true, // static function 1799 true, // static function
1823 false, // not const function 1800 false, // not const function
1824 false, // not abstract 1801 false, // not abstract
1825 false, // not external 1802 false, // not external
1826 false, // not native 1803 false, // not native
1827 Class::Handle(Type::Handle(Type::DartFunctionType()).type_class()), 1804 Class::Handle(Type::Handle(Type::DartFunctionType()).type_class()),
1828 fragment->token_pos())); 1805 fragment->token_pos()));
1829 1806
1830 func.set_result_type(Object::dynamic_type()); 1807 func.set_result_type(Object::dynamic_type());
1831 func.set_num_fixed_parameters(0); 1808 func.set_num_fixed_parameters(0);
1832 func.SetNumOptionalParameters(0, true); 1809 func.SetNumOptionalParameters(0, true);
1833 // Manually generated AST, do not recompile. 1810 // Manually generated AST, do not recompile.
1834 func.SetIsOptimizable(false); 1811 func.SetIsOptimizable(false);
1835 func.set_is_debuggable(false); 1812 func.set_is_debuggable(false);
1836 1813
1837 // We compile the function here, even though InvokeFunction() below 1814 // We compile the function here, even though InvokeFunction() below
1838 // would compile func automatically. We are checking fewer invariants 1815 // would compile func automatically. We are checking fewer invariants
1839 // here. 1816 // here.
1840 ParsedFunction* parsed_function = new ParsedFunction(thread, func); 1817 ParsedFunction* parsed_function = new ParsedFunction(thread, func);
1841 parsed_function->SetNodeSequence(fragment); 1818 parsed_function->SetNodeSequence(fragment);
1842 fragment->scope()->AddVariable(parsed_function->EnsureExpressionTemp()); 1819 fragment->scope()->AddVariable(parsed_function->EnsureExpressionTemp());
1843 fragment->scope()->AddVariable( 1820 fragment->scope()->AddVariable(parsed_function->current_context_var());
1844 parsed_function->current_context_var());
1845 parsed_function->AllocateVariables(); 1821 parsed_function->AllocateVariables();
1846 1822
1847 // Non-optimized code generator. 1823 // Non-optimized code generator.
1848 DartCompilationPipeline pipeline; 1824 DartCompilationPipeline pipeline;
1849 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); 1825 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1850 helper.Compile(&pipeline); 1826 helper.Compile(&pipeline);
1851 Code::Handle(func.unoptimized_code()).set_var_descriptors( 1827 Code::Handle(func.unoptimized_code())
1852 Object::empty_var_descriptors()); 1828 .set_var_descriptors(Object::empty_var_descriptors());
1853 1829
1854 const Object& result = PassiveObject::Handle( 1830 const Object& result = PassiveObject::Handle(
1855 DartEntry::InvokeFunction(func, Object::empty_array())); 1831 DartEntry::InvokeFunction(func, Object::empty_array()));
1856 return result.raw(); 1832 return result.raw();
1857 } else { 1833 } else {
1858 Thread* const thread = Thread::Current(); 1834 Thread* const thread = Thread::Current();
1859 const Object& result = PassiveObject::Handle(thread->sticky_error()); 1835 const Object& result = PassiveObject::Handle(thread->sticky_error());
1860 thread->clear_sticky_error(); 1836 thread->clear_sticky_error();
1861 return result.raw(); 1837 return result.raw();
1862 } 1838 }
1863 UNREACHABLE(); 1839 UNREACHABLE();
1864 return Object::null(); 1840 return Object::null();
1865 } 1841 }
1866 1842
1867 1843
1868 void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) { 1844 void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) {
1869 if (FLAG_trace_compiler) { 1845 if (FLAG_trace_compiler) {
1870 THR_Print("ABORT background compilation: %s\n", msg); 1846 THR_Print("ABORT background compilation: %s\n", msg);
1871 } 1847 }
1872 NOT_IN_PRODUCT( 1848 #if !defined(PRODUCT)
1873 TimelineStream* stream = Timeline::GetCompilerStream(); 1849 TimelineStream* stream = Timeline::GetCompilerStream();
1874 ASSERT(stream != NULL); 1850 ASSERT(stream != NULL);
1875 TimelineEvent* event = stream->StartEvent(); 1851 TimelineEvent* event = stream->StartEvent();
1876 if (event != NULL) { 1852 if (event != NULL) {
1877 event->Instant("AbortBackgroundCompilation"); 1853 event->Instant("AbortBackgroundCompilation");
1878 event->SetNumArguments(1); 1854 event->SetNumArguments(1);
1879 event->CopyArgument(0, "reason", msg); 1855 event->CopyArgument(0, "reason", msg);
1880 event->Complete(); 1856 event->Complete();
1881 } 1857 }
1882 ) // !PRODUCT 1858 #endif // !defined(PRODUCT)
1883 ASSERT(Compiler::IsBackgroundCompilation()); 1859 ASSERT(Compiler::IsBackgroundCompilation());
1884 Thread::Current()->long_jump_base()->Jump( 1860 Thread::Current()->long_jump_base()->Jump(
1885 deopt_id, Object::background_compilation_error()); 1861 deopt_id, Object::background_compilation_error());
1886 } 1862 }
1887 1863
1888 1864
1889 // C-heap allocated background compilation queue element. 1865 // C-heap allocated background compilation queue element.
1890 class QueueElement { 1866 class QueueElement {
1891 public: 1867 public:
1892 explicit QueueElement(const Function& function) 1868 explicit QueueElement(const Function& function)
1893 : next_(NULL), 1869 : next_(NULL), function_(function.raw()) {}
1894 function_(function.raw()) {
1895 }
1896 1870
1897 virtual ~QueueElement() { 1871 virtual ~QueueElement() {
1898 next_ = NULL; 1872 next_ = NULL;
1899 function_ = Function::null(); 1873 function_ = Function::null();
1900 } 1874 }
1901 1875
1902 RawFunction* Function() const { return function_; } 1876 RawFunction* Function() const { return function_; }
1903 1877
1904 1878
1905 void set_next(QueueElement* elem) { next_ = elem; } 1879 void set_next(QueueElement* elem) { next_ = elem; }
(...skipping 10 matching lines...) Expand all
1916 1890
1917 DISALLOW_COPY_AND_ASSIGN(QueueElement); 1891 DISALLOW_COPY_AND_ASSIGN(QueueElement);
1918 }; 1892 };
1919 1893
1920 1894
1921 // Allocated in C-heap. Handles both input and output of background compilation. 1895 // Allocated in C-heap. Handles both input and output of background compilation.
1922 // It implements a FIFO queue, using Peek, Add, Remove operations. 1896 // It implements a FIFO queue, using Peek, Add, Remove operations.
1923 class BackgroundCompilationQueue { 1897 class BackgroundCompilationQueue {
1924 public: 1898 public:
1925 BackgroundCompilationQueue() : first_(NULL), last_(NULL) {} 1899 BackgroundCompilationQueue() : first_(NULL), last_(NULL) {}
1926 virtual ~BackgroundCompilationQueue() { 1900 virtual ~BackgroundCompilationQueue() { Clear(); }
1927 Clear();
1928 }
1929 1901
1930 void VisitObjectPointers(ObjectPointerVisitor* visitor) { 1902 void VisitObjectPointers(ObjectPointerVisitor* visitor) {
1931 ASSERT(visitor != NULL); 1903 ASSERT(visitor != NULL);
1932 QueueElement* p = first_; 1904 QueueElement* p = first_;
1933 while (p != NULL) { 1905 while (p != NULL) {
1934 visitor->VisitPointer(p->function_ptr()); 1906 visitor->VisitPointer(p->function_ptr());
1935 p = p->next(); 1907 p = p->next();
1936 } 1908 }
1937 } 1909 }
1938 1910
1939 bool IsEmpty() const { return first_ == NULL; } 1911 bool IsEmpty() const { return first_ == NULL; }
1940 1912
1941 void Add(QueueElement* value) { 1913 void Add(QueueElement* value) {
1942 ASSERT(value != NULL); 1914 ASSERT(value != NULL);
1943 ASSERT(value->next() == NULL); 1915 ASSERT(value->next() == NULL);
1944 if (first_ == NULL) { 1916 if (first_ == NULL) {
1945 first_ = value; 1917 first_ = value;
1946 ASSERT(last_ == NULL); 1918 ASSERT(last_ == NULL);
1947 } else { 1919 } else {
1948 ASSERT(last_ != NULL); 1920 ASSERT(last_ != NULL);
1949 last_->set_next(value); 1921 last_->set_next(value);
1950 } 1922 }
1951 last_ = value; 1923 last_ = value;
1952 ASSERT(first_ != NULL && last_ != NULL); 1924 ASSERT(first_ != NULL && last_ != NULL);
1953 } 1925 }
1954 1926
1955 QueueElement* Peek() const { 1927 QueueElement* Peek() const { return first_; }
1956 return first_;
1957 }
1958 1928
1959 RawFunction* PeekFunction() const { 1929 RawFunction* PeekFunction() const {
1960 QueueElement* e = Peek(); 1930 QueueElement* e = Peek();
1961 if (e == NULL) { 1931 if (e == NULL) {
1962 return Function::null(); 1932 return Function::null();
1963 } else { 1933 } else {
1964 return e->Function(); 1934 return e->Function();
1965 } 1935 }
1966 } 1936 }
1967 1937
(...skipping 28 matching lines...) Expand all
1996 1966
1997 private: 1967 private:
1998 QueueElement* first_; 1968 QueueElement* first_;
1999 QueueElement* last_; 1969 QueueElement* last_;
2000 1970
2001 DISALLOW_COPY_AND_ASSIGN(BackgroundCompilationQueue); 1971 DISALLOW_COPY_AND_ASSIGN(BackgroundCompilationQueue);
2002 }; 1972 };
2003 1973
2004 1974
2005 BackgroundCompiler::BackgroundCompiler(Isolate* isolate) 1975 BackgroundCompiler::BackgroundCompiler(Isolate* isolate)
2006 : isolate_(isolate), running_(true), done_(new bool()), 1976 : isolate_(isolate),
2007 queue_monitor_(new Monitor()), done_monitor_(new Monitor()), 1977 running_(true),
1978 done_(new bool()),
1979 queue_monitor_(new Monitor()),
1980 done_monitor_(new Monitor()),
2008 function_queue_(new BackgroundCompilationQueue()) { 1981 function_queue_(new BackgroundCompilationQueue()) {
2009 *done_ = false; 1982 *done_ = false;
2010 } 1983 }
2011 1984
2012 1985
2013 // Fields all deleted in ::Stop; here clear them. 1986 // Fields all deleted in ::Stop; here clear them.
2014 BackgroundCompiler::~BackgroundCompiler() { 1987 BackgroundCompiler::~BackgroundCompiler() {
2015 isolate_ = NULL; 1988 isolate_ = NULL;
2016 running_ = false; 1989 running_ = false;
2017 done_ = NULL; 1990 done_ = NULL;
2018 queue_monitor_ = NULL; 1991 queue_monitor_ = NULL;
2019 done_monitor_ = NULL; 1992 done_monitor_ = NULL;
2020 function_queue_ = NULL; 1993 function_queue_ = NULL;
2021 } 1994 }
2022 1995
2023 1996
2024 void BackgroundCompiler::Run() { 1997 void BackgroundCompiler::Run() {
2025 while (running_) { 1998 while (running_) {
2026 // Maybe something is already in the queue, check first before waiting 1999 // Maybe something is already in the queue, check first before waiting
2027 // to be notified. 2000 // to be notified.
2028 bool result = Thread::EnterIsolateAsHelper(isolate_, Thread::kCompilerTask); 2001 bool result = Thread::EnterIsolateAsHelper(isolate_, Thread::kCompilerTask);
2029 ASSERT(result); 2002 ASSERT(result);
2030 { 2003 {
2031 Thread* thread = Thread::Current(); 2004 Thread* thread = Thread::Current();
2032 StackZone stack_zone(thread); 2005 StackZone stack_zone(thread);
2033 Zone* zone = stack_zone.GetZone(); 2006 Zone* zone = stack_zone.GetZone();
2034 HANDLESCOPE(thread); 2007 HANDLESCOPE(thread);
2035 Function& function = Function::Handle(zone); 2008 Function& function = Function::Handle(zone);
2036 { MonitorLocker ml(queue_monitor_); 2009 {
2010 MonitorLocker ml(queue_monitor_);
2037 function = function_queue()->PeekFunction(); 2011 function = function_queue()->PeekFunction();
2038 } 2012 }
2039 while (running_ && !function.IsNull() && !isolate_->IsTopLevelParsing()) { 2013 while (running_ && !function.IsNull() && !isolate_->IsTopLevelParsing()) {
2040 // Check that we have aggregated and cleared the stats. 2014 // Check that we have aggregated and cleared the stats.
2041 ASSERT(thread->compiler_stats()->IsCleared()); 2015 ASSERT(thread->compiler_stats()->IsCleared());
2042 Compiler::CompileOptimizedFunction(thread, 2016 Compiler::CompileOptimizedFunction(thread, function,
2043 function,
2044 Compiler::kNoOSRDeoptId); 2017 Compiler::kNoOSRDeoptId);
2045 #ifndef PRODUCT 2018 #ifndef PRODUCT
2046 Isolate* isolate = thread->isolate(); 2019 Isolate* isolate = thread->isolate();
2047 isolate->aggregate_compiler_stats()->Add(*thread->compiler_stats()); 2020 isolate->aggregate_compiler_stats()->Add(*thread->compiler_stats());
2048 thread->compiler_stats()->Clear(); 2021 thread->compiler_stats()->Clear();
2049 #endif // PRODUCT 2022 #endif // PRODUCT
2050 2023
2051 QueueElement* qelem = NULL; 2024 QueueElement* qelem = NULL;
2052 { MonitorLocker ml(queue_monitor_); 2025 {
2026 MonitorLocker ml(queue_monitor_);
2053 if (function_queue()->IsEmpty()) { 2027 if (function_queue()->IsEmpty()) {
2054 // We are shutting down, queue was cleared. 2028 // We are shutting down, queue was cleared.
2055 function = Function::null(); 2029 function = Function::null();
2056 } else { 2030 } else {
2057 qelem = function_queue()->Remove(); 2031 qelem = function_queue()->Remove();
2058 const Function& old = Function::Handle(qelem->Function()); 2032 const Function& old = Function::Handle(qelem->Function());
2059 if ((!old.HasOptimizedCode() && old.IsOptimizable()) || 2033 if ((!old.HasOptimizedCode() && old.IsOptimizable()) ||
2060 FLAG_stress_test_background_compilation) { 2034 FLAG_stress_test_background_compilation) {
2061 if (Compiler::CanOptimizeFunction(thread, old)) { 2035 if (Compiler::CanOptimizeFunction(thread, old)) {
2062 QueueElement* repeat_qelem = new QueueElement(old); 2036 QueueElement* repeat_qelem = new QueueElement(old);
2063 function_queue()->Add(repeat_qelem); 2037 function_queue()->Add(repeat_qelem);
2064 } 2038 }
2065 } 2039 }
2066 function = function_queue()->PeekFunction(); 2040 function = function_queue()->PeekFunction();
2067 } 2041 }
2068 } 2042 }
2069 if (qelem != NULL) { 2043 if (qelem != NULL) {
2070 delete qelem; 2044 delete qelem;
2071 } 2045 }
2072 } 2046 }
2073 } 2047 }
2074 Thread::ExitIsolateAsHelper(); 2048 Thread::ExitIsolateAsHelper();
2075 { 2049 {
2076 // Wait to be notified when the work queue is not empty. 2050 // Wait to be notified when the work queue is not empty.
2077 MonitorLocker ml(queue_monitor_); 2051 MonitorLocker ml(queue_monitor_);
2078 while ((function_queue()->IsEmpty() || isolate_->IsTopLevelParsing()) 2052 while ((function_queue()->IsEmpty() || isolate_->IsTopLevelParsing()) &&
2079 && running_) { 2053 running_) {
2080 ml.Wait(); 2054 ml.Wait();
2081 } 2055 }
2082 } 2056 }
2083 } // while running 2057 } // while running
2084 2058
2085 { 2059 {
2086 // Notify that the thread is done. 2060 // Notify that the thread is done.
2087 MonitorLocker ml_done(done_monitor_); 2061 MonitorLocker ml_done(done_monitor_);
2088 *done_ = true; 2062 *done_ = true;
2089 ml_done.Notify(); 2063 ml_done.Notify();
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2127 Monitor* queue_monitor = task->queue_monitor_; 2101 Monitor* queue_monitor = task->queue_monitor_;
2128 Monitor* done_monitor = task->done_monitor_; 2102 Monitor* done_monitor = task->done_monitor_;
2129 bool* task_done = task->done_; 2103 bool* task_done = task->done_;
2130 // Wake up compiler task and stop it. 2104 // Wake up compiler task and stop it.
2131 { 2105 {
2132 MonitorLocker ml(queue_monitor); 2106 MonitorLocker ml(queue_monitor);
2133 task->running_ = false; 2107 task->running_ = false;
2134 function_queue->Clear(); 2108 function_queue->Clear();
2135 // 'task' will be deleted by thread pool. 2109 // 'task' will be deleted by thread pool.
2136 task = NULL; 2110 task = NULL;
2137 ml.Notify(); // Stop waiting for the queue. 2111 ml.Notify(); // Stop waiting for the queue.
2138 } 2112 }
2139 2113
2140 { 2114 {
2141 MonitorLocker ml_done(done_monitor); 2115 MonitorLocker ml_done(done_monitor);
2142 while (!(*task_done)) { 2116 while (!(*task_done)) {
2143 ml_done.WaitWithSafepointCheck(Thread::Current()); 2117 ml_done.WaitWithSafepointCheck(Thread::Current());
2144 } 2118 }
2145 } 2119 }
2146 delete task_done; 2120 delete task_done;
2147 delete done_monitor; 2121 delete done_monitor;
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
2183 Isolate* isolate = thread->isolate(); 2157 Isolate* isolate = thread->isolate();
2184 MutexLocker ml(isolate->mutex()); 2158 MutexLocker ml(isolate->mutex());
2185 isolate->enable_background_compiler(); 2159 isolate->enable_background_compiler();
2186 } 2160 }
2187 2161
2188 2162
2189 void BackgroundCompiler::EnsureInit(Thread* thread) { 2163 void BackgroundCompiler::EnsureInit(Thread* thread) {
2190 ASSERT(thread->IsMutatorThread()); 2164 ASSERT(thread->IsMutatorThread());
2191 // Finalize NoSuchMethodError, _Mint; occasionally needed in optimized 2165 // Finalize NoSuchMethodError, _Mint; occasionally needed in optimized
2192 // compilation. 2166 // compilation.
2193 Class& cls = Class::Handle(thread->zone(), 2167 Class& cls = Class::Handle(
2194 Library::LookupCoreClass(Symbols::NoSuchMethodError())); 2168 thread->zone(), Library::LookupCoreClass(Symbols::NoSuchMethodError()));
2195 ASSERT(!cls.IsNull()); 2169 ASSERT(!cls.IsNull());
2196 Error& error = Error::Handle(thread->zone(), 2170 Error& error = Error::Handle(thread->zone(), cls.EnsureIsFinalized(thread));
2197 cls.EnsureIsFinalized(thread));
2198 ASSERT(error.IsNull()); 2171 ASSERT(error.IsNull());
2199 cls = Library::LookupCoreClass(Symbols::_Mint()); 2172 cls = Library::LookupCoreClass(Symbols::_Mint());
2200 ASSERT(!cls.IsNull()); 2173 ASSERT(!cls.IsNull());
2201 error = cls.EnsureIsFinalized(thread); 2174 error = cls.EnsureIsFinalized(thread);
2202 ASSERT(error.IsNull()); 2175 ASSERT(error.IsNull());
2203 2176
2204 bool start_task = false; 2177 bool start_task = false;
2205 Isolate* isolate = thread->isolate(); 2178 Isolate* isolate = thread->isolate();
2206 { 2179 {
2207 MutexLocker ml(isolate->mutex()); 2180 MutexLocker ml(isolate->mutex());
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2258 return Error::null(); 2231 return Error::null();
2259 } 2232 }
2260 2233
2261 2234
2262 RawError* Compiler::CompileClass(const Class& cls) { 2235 RawError* Compiler::CompileClass(const Class& cls) {
2263 UNREACHABLE(); 2236 UNREACHABLE();
2264 return Error::null(); 2237 return Error::null();
2265 } 2238 }
2266 2239
2267 2240
2268 RawError* Compiler::CompileFunction(Thread* thread, 2241 RawError* Compiler::CompileFunction(Thread* thread, const Function& function) {
2269 const Function& function) {
2270 UNREACHABLE(); 2242 UNREACHABLE();
2271 return Error::null(); 2243 return Error::null();
2272 } 2244 }
2273 2245
2274 2246
2275 RawError* Compiler::ParseFunction(Thread* thread, 2247 RawError* Compiler::ParseFunction(Thread* thread, const Function& function) {
2276 const Function& function) {
2277 UNREACHABLE(); 2248 UNREACHABLE();
2278 return Error::null(); 2249 return Error::null();
2279 } 2250 }
2280 2251
2281 2252
2282 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread, 2253 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread,
2283 const Function& function) { 2254 const Function& function) {
2284 UNREACHABLE(); 2255 UNREACHABLE();
2285 return Error::null(); 2256 return Error::null();
2286 } 2257 }
2287 2258
2288 2259
2289 RawError* Compiler::CompileOptimizedFunction(Thread* thread, 2260 RawError* Compiler::CompileOptimizedFunction(Thread* thread,
2290 const Function& function, 2261 const Function& function,
2291 intptr_t osr_id) { 2262 intptr_t osr_id) {
2292 UNREACHABLE(); 2263 UNREACHABLE();
2293 return Error::null(); 2264 return Error::null();
2294 } 2265 }
2295 2266
2296 2267
2297 RawError* Compiler::CompileParsedFunction( 2268 RawError* Compiler::CompileParsedFunction(ParsedFunction* parsed_function) {
2298 ParsedFunction* parsed_function) {
2299 UNREACHABLE(); 2269 UNREACHABLE();
2300 return Error::null(); 2270 return Error::null();
2301 } 2271 }
2302 2272
2303 2273
2304 void Compiler::ComputeLocalVarDescriptors(const Code& code) { 2274 void Compiler::ComputeLocalVarDescriptors(const Code& code) {
2305 UNREACHABLE(); 2275 UNREACHABLE();
2306 } 2276 }
2307 2277
2308 2278
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2368 2338
2369 2339
2370 bool BackgroundCompiler::IsDisabled() { 2340 bool BackgroundCompiler::IsDisabled() {
2371 UNREACHABLE(); 2341 UNREACHABLE();
2372 return true; 2342 return true;
2373 } 2343 }
2374 2344
2375 #endif // DART_PRECOMPILED_RUNTIME 2345 #endif // DART_PRECOMPILED_RUNTIME
2376 2346
2377 } // namespace dart 2347 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/compiler.h ('k') | runtime/vm/compiler_stats.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698