OLD | NEW |
---|---|
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/compiler.h" | 5 #include "vm/compiler.h" |
6 | 6 |
7 #include "vm/assembler.h" | 7 #include "vm/assembler.h" |
8 | 8 |
9 #include "vm/ast_printer.h" | 9 #include "vm/ast_printer.h" |
10 #include "vm/block_scheduler.h" | 10 #include "vm/block_scheduler.h" |
(...skipping 12 matching lines...) Expand all Loading... | |
23 #include "vm/flow_graph_compiler.h" | 23 #include "vm/flow_graph_compiler.h" |
24 #include "vm/flow_graph_inliner.h" | 24 #include "vm/flow_graph_inliner.h" |
25 #include "vm/flow_graph_optimizer.h" | 25 #include "vm/flow_graph_optimizer.h" |
26 #include "vm/flow_graph_type_propagator.h" | 26 #include "vm/flow_graph_type_propagator.h" |
27 #include "vm/il_printer.h" | 27 #include "vm/il_printer.h" |
28 #include "vm/longjump.h" | 28 #include "vm/longjump.h" |
29 #include "vm/object.h" | 29 #include "vm/object.h" |
30 #include "vm/object_store.h" | 30 #include "vm/object_store.h" |
31 #include "vm/os.h" | 31 #include "vm/os.h" |
32 #include "vm/parser.h" | 32 #include "vm/parser.h" |
33 #include "vm/precompiler.h" | |
33 #include "vm/regexp_parser.h" | 34 #include "vm/regexp_parser.h" |
34 #include "vm/regexp_assembler.h" | 35 #include "vm/regexp_assembler.h" |
35 #include "vm/scanner.h" | |
36 #include "vm/symbols.h" | 36 #include "vm/symbols.h" |
37 #include "vm/tags.h" | 37 #include "vm/tags.h" |
38 #include "vm/thread_registry.h" | 38 #include "vm/thread_registry.h" |
39 #include "vm/timer.h" | 39 #include "vm/timer.h" |
40 | 40 |
41 namespace dart { | 41 namespace dart { |
42 | 42 |
43 DEFINE_FLAG(bool, allocation_sinking, true, | 43 DEFINE_FLAG(bool, allocation_sinking, true, |
44 "Attempt to sink temporary allocations to side exits"); | 44 "Attempt to sink temporary allocations to side exits"); |
45 DEFINE_FLAG(bool, common_subexpression_elimination, true, | 45 DEFINE_FLAG(bool, common_subexpression_elimination, true, |
(...skipping 13 matching lines...) Expand all Loading... | |
59 "Print the deopt-id to ICData map in optimizing compiler."); | 59 "Print the deopt-id to ICData map in optimizing compiler."); |
60 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis"); | 60 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis"); |
61 DEFINE_FLAG(bool, reorder_basic_blocks, true, "Enable basic-block reordering."); | 61 DEFINE_FLAG(bool, reorder_basic_blocks, true, "Enable basic-block reordering."); |
62 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations."); | 62 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations."); |
63 DEFINE_FLAG(bool, trace_optimizing_compiler, false, | 63 DEFINE_FLAG(bool, trace_optimizing_compiler, false, |
64 "Trace only optimizing compiler operations."); | 64 "Trace only optimizing compiler operations."); |
65 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler."); | 65 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler."); |
66 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining"); | 66 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining"); |
67 DEFINE_FLAG(bool, verify_compiler, false, | 67 DEFINE_FLAG(bool, verify_compiler, false, |
68 "Enable compiler verification assertions"); | 68 "Enable compiler verification assertions"); |
69 DEFINE_FLAG(int, max_speculative_inlining_attempts, 1, | |
70 "Max number of attempts with speculative inlining (precompilation only)"); | |
71 | 69 |
72 DECLARE_FLAG(bool, background_compilation); | 70 DECLARE_FLAG(bool, background_compilation); |
73 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size); | 71 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size); |
74 DECLARE_FLAG(bool, load_deferred_eagerly); | 72 DECLARE_FLAG(bool, load_deferred_eagerly); |
75 DECLARE_FLAG(bool, trace_failed_optimization_attempts); | 73 DECLARE_FLAG(bool, trace_failed_optimization_attempts); |
76 DECLARE_FLAG(bool, trace_inlining_intervals); | 74 DECLARE_FLAG(bool, trace_inlining_intervals); |
77 DECLARE_FLAG(bool, trace_irregexp); | 75 DECLARE_FLAG(bool, trace_irregexp); |
78 DECLARE_FLAG(bool, precompilation); | 76 DECLARE_FLAG(bool, precompilation); |
79 | 77 |
80 | 78 |
81 #ifndef DART_PRECOMPILED_RUNTIME | 79 #ifndef DART_PRECOMPILED_RUNTIME |
82 | 80 |
83 // TODO(zerny): Factor out unoptimizing/optimizing pipelines and remove | 81 void DartCompilationPipeline::ParseFunction(ParsedFunction* parsed_function) { |
84 // separate helpers functions & `optimizing` args. | 82 Parser::ParseFunction(parsed_function); |
85 class CompilationPipeline : public ZoneAllocated { | 83 parsed_function->AllocateVariables(); |
86 public: | 84 } |
87 static CompilationPipeline* New(Zone* zone, const Function& function); | |
88 | |
89 virtual void ParseFunction(ParsedFunction* parsed_function) = 0; | |
90 virtual FlowGraph* BuildFlowGraph( | |
91 Zone* zone, | |
92 ParsedFunction* parsed_function, | |
93 const ZoneGrowableArray<const ICData*>& ic_data_array, | |
94 intptr_t osr_id) = 0; | |
95 virtual void FinalizeCompilation() = 0; | |
96 virtual ~CompilationPipeline() { } | |
97 }; | |
98 | 85 |
99 | 86 |
100 class DartCompilationPipeline : public CompilationPipeline { | 87 FlowGraph* DartCompilationPipeline::BuildFlowGraph( |
101 public: | 88 Zone* zone, |
102 virtual void ParseFunction(ParsedFunction* parsed_function) { | 89 ParsedFunction* parsed_function, |
103 Parser::ParseFunction(parsed_function); | 90 const ZoneGrowableArray<const ICData*>& ic_data_array, |
104 parsed_function->AllocateVariables(); | 91 intptr_t osr_id) { |
105 } | 92 // Build the flow graph. |
93 FlowGraphBuilder builder(*parsed_function, | |
94 ic_data_array, | |
95 NULL, // NULL = not inlining. | |
96 osr_id); | |
106 | 97 |
107 virtual FlowGraph* BuildFlowGraph( | 98 return builder.BuildGraph(); |
108 Zone* zone, | 99 } |
109 ParsedFunction* parsed_function, | |
110 const ZoneGrowableArray<const ICData*>& ic_data_array, | |
111 intptr_t osr_id) { | |
112 // Build the flow graph. | |
113 FlowGraphBuilder builder(*parsed_function, | |
114 ic_data_array, | |
115 NULL, // NULL = not inlining. | |
116 osr_id); | |
117 | |
118 return builder.BuildGraph(); | |
119 } | |
120 | |
121 virtual void FinalizeCompilation() { } | |
122 }; | |
123 | 100 |
124 | 101 |
125 class IrregexpCompilationPipeline : public CompilationPipeline { | 102 void DartCompilationPipeline::FinalizeCompilation() { } |
126 public: | |
127 IrregexpCompilationPipeline() : backtrack_goto_(NULL) { } | |
128 | 103 |
129 virtual void ParseFunction(ParsedFunction* parsed_function) { | |
130 RegExpParser::ParseFunction(parsed_function); | |
131 // Variables are allocated after compilation. | |
132 } | |
133 | 104 |
134 virtual FlowGraph* BuildFlowGraph( | 105 void IrregexpCompilationPipeline::ParseFunction( |
135 Zone* zone, | 106 ParsedFunction* parsed_function) { |
136 ParsedFunction* parsed_function, | 107 RegExpParser::ParseFunction(parsed_function); |
137 const ZoneGrowableArray<const ICData*>& ic_data_array, | 108 // Variables are allocated after compilation. |
138 intptr_t osr_id) { | 109 } |
139 // Compile to the dart IR. | |
140 RegExpEngine::CompilationResult result = | |
141 RegExpEngine::CompileIR(parsed_function->regexp_compile_data(), | |
142 parsed_function, | |
143 ic_data_array); | |
144 backtrack_goto_ = result.backtrack_goto; | |
145 | 110 |
146 // Allocate variables now that we know the number of locals. | 111 FlowGraph* IrregexpCompilationPipeline::BuildFlowGraph( |
147 parsed_function->AllocateIrregexpVariables(result.num_stack_locals); | 112 Zone* zone, |
113 ParsedFunction* parsed_function, | |
114 const ZoneGrowableArray<const ICData*>& ic_data_array, | |
115 intptr_t osr_id) { | |
116 // Compile to the dart IR. | |
117 RegExpEngine::CompilationResult result = | |
118 RegExpEngine::CompileIR(parsed_function->regexp_compile_data(), | |
119 parsed_function, | |
120 ic_data_array); | |
121 backtrack_goto_ = result.backtrack_goto; | |
148 | 122 |
149 // Build the flow graph. | 123 // Allocate variables now that we know the number of locals. |
150 FlowGraphBuilder builder(*parsed_function, | 124 parsed_function->AllocateIrregexpVariables(result.num_stack_locals); |
151 ic_data_array, | |
152 NULL, // NULL = not inlining. | |
153 osr_id); | |
154 | 125 |
155 return new(zone) FlowGraph(*parsed_function, | 126 // Build the flow graph. |
156 result.graph_entry, | 127 FlowGraphBuilder builder(*parsed_function, |
157 result.num_blocks); | 128 ic_data_array, |
158 } | 129 NULL, // NULL = not inlining. |
130 osr_id); | |
159 | 131 |
160 virtual void FinalizeCompilation() { | 132 return new(zone) FlowGraph(*parsed_function, |
161 backtrack_goto_->ComputeOffsetTable(); | 133 result.graph_entry, |
162 } | 134 result.num_blocks); |
135 } | |
163 | 136 |
164 private: | 137 void IrregexpCompilationPipeline::FinalizeCompilation() { |
165 IndirectGotoInstr* backtrack_goto_; | 138 backtrack_goto_->ComputeOffsetTable(); |
166 }; | 139 } |
167 | |
168 | 140 |
169 CompilationPipeline* CompilationPipeline::New(Zone* zone, | 141 CompilationPipeline* CompilationPipeline::New(Zone* zone, |
170 const Function& function) { | 142 const Function& function) { |
171 if (function.IsIrregexpFunction()) { | 143 if (function.IsIrregexpFunction()) { |
172 return new(zone) IrregexpCompilationPipeline(); | 144 return new(zone) IrregexpCompilationPipeline(); |
173 } else { | 145 } else { |
174 return new(zone) DartCompilationPipeline(); | 146 return new(zone) DartCompilationPipeline(); |
175 } | 147 } |
176 } | 148 } |
177 | 149 |
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
441 const uint32_t prefix_invalidation_gen_at_start_; | 413 const uint32_t prefix_invalidation_gen_at_start_; |
442 | 414 |
443 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper); | 415 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper); |
444 }; | 416 }; |
445 | 417 |
446 | 418 |
447 void CompileParsedFunctionHelper::FinalizeCompilation( | 419 void CompileParsedFunctionHelper::FinalizeCompilation( |
448 Assembler* assembler, | 420 Assembler* assembler, |
449 FlowGraphCompiler* graph_compiler, | 421 FlowGraphCompiler* graph_compiler, |
450 FlowGraph* flow_graph) { | 422 FlowGraph* flow_graph) { |
423 ASSERT(!FLAG_precompilation); | |
451 const Function& function = parsed_function()->function(); | 424 const Function& function = parsed_function()->function(); |
452 Zone* const zone = thread()->zone(); | 425 Zone* const zone = thread()->zone(); |
453 | 426 |
454 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer); | 427 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer); |
455 // CreateDeoptInfo uses the object pool and needs to be done before | 428 // CreateDeoptInfo uses the object pool and needs to be done before |
456 // FinalizeCode. | 429 // FinalizeCode. |
457 const Array& deopt_info_array = | 430 const Array& deopt_info_array = |
458 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler)); | 431 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler)); |
459 INC_STAT(thread(), total_code_size, | 432 INC_STAT(thread(), total_code_size, |
460 deopt_info_array.Length() * sizeof(uword)); | 433 deopt_info_array.Length() * sizeof(uword)); |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
539 ASSERT(!is_osr); // OSR is compiled in background. | 512 ASSERT(!is_osr); // OSR is compiled in background. |
540 function.InstallOptimizedCode(code, is_osr); | 513 function.InstallOptimizedCode(code, is_osr); |
541 } | 514 } |
542 if (function.usage_counter() < 0) { | 515 if (function.usage_counter() < 0) { |
543 // Reset to 0 so that it can be recompiled if needed. | 516 // Reset to 0 so that it can be recompiled if needed. |
544 function.set_usage_counter(0); | 517 function.set_usage_counter(0); |
545 } | 518 } |
546 } | 519 } |
547 | 520 |
548 // Register code with the classes it depends on because of CHA and | 521 // Register code with the classes it depends on because of CHA and |
549 // fields it depends on because of store guards, unless we cannot | 522 // fields it depends on because of store guards. |
550 // deopt. | 523 // Deoptimize field dependent code first, before registering |
551 if (!FLAG_precompilation) { | 524 // this yet uninstalled code as dependent on a field. |
552 // Deoptimize field dependent code first, before registering | 525 // TODO(srdjan): Debugging dart2js crashes; |
553 // this yet uninstalled code as dependent on a field. | 526 // FlowGraphOptimizer::VisitStoreInstanceField populates |
554 // TODO(srdjan): Debugging dart2js crashes; | 527 // deoptimize_dependent_code() list, currently disabled. |
555 // FlowGraphOptimizer::VisitStoreInstanceField populates | 528 for (intptr_t i = 0; |
556 // deoptimize_dependent_code() list, currently disabled. | 529 i < flow_graph->deoptimize_dependent_code().length(); |
557 for (intptr_t i = 0; | 530 i++) { |
558 i < flow_graph->deoptimize_dependent_code().length(); | 531 const Field* field = flow_graph->deoptimize_dependent_code()[i]; |
559 i++) { | 532 field->DeoptimizeDependentCode(); |
560 const Field* field = flow_graph->deoptimize_dependent_code()[i]; | 533 } |
561 field->DeoptimizeDependentCode(); | 534 for (intptr_t i = 0; |
562 } | 535 i < thread()->cha()->leaf_classes().length(); |
563 for (intptr_t i = 0; | 536 ++i) { |
564 i < thread()->cha()->leaf_classes().length(); | 537 thread()->cha()->leaf_classes()[i]->RegisterCHACode(code); |
565 ++i) { | 538 } |
566 thread()->cha()->leaf_classes()[i]->RegisterCHACode(code); | 539 for (intptr_t i = 0; |
567 } | 540 i < flow_graph->guarded_fields()->length(); |
568 for (intptr_t i = 0; | 541 i++) { |
569 i < flow_graph->guarded_fields()->length(); | 542 const Field* field = (*flow_graph->guarded_fields())[i]; |
570 i++) { | 543 field->RegisterDependentCode(code); |
571 const Field* field = (*flow_graph->guarded_fields())[i]; | |
572 field->RegisterDependentCode(code); | |
573 } | |
574 } | 544 } |
575 } else { // not optimized. | 545 } else { // not optimized. |
576 if (!FLAG_precompilation && | 546 if (function.ic_data_array() == Array::null()) { |
577 (function.ic_data_array() == Array::null())) { | |
578 function.SaveICDataMap( | 547 function.SaveICDataMap( |
579 graph_compiler->deopt_id_to_ic_data(), | 548 graph_compiler->deopt_id_to_ic_data(), |
580 Array::Handle(zone, graph_compiler->edge_counters_array())); | 549 Array::Handle(zone, graph_compiler->edge_counters_array())); |
581 } | 550 } |
582 function.set_unoptimized_code(code); | 551 function.set_unoptimized_code(code); |
583 function.AttachCode(code); | 552 function.AttachCode(code); |
584 } | 553 } |
585 if (parsed_function()->HasDeferredPrefixes()) { | 554 if (parsed_function()->HasDeferredPrefixes()) { |
586 ASSERT(!FLAG_load_deferred_eagerly); | 555 ASSERT(!FLAG_load_deferred_eagerly); |
587 ZoneGrowableArray<const LibraryPrefix*>* prefixes = | 556 ZoneGrowableArray<const LibraryPrefix*>* prefixes = |
588 parsed_function()->deferred_prefixes(); | 557 parsed_function()->deferred_prefixes(); |
589 for (intptr_t i = 0; i < prefixes->length(); i++) { | 558 for (intptr_t i = 0; i < prefixes->length(); i++) { |
590 (*prefixes)[i]->RegisterDependentCode(code); | 559 (*prefixes)[i]->RegisterDependentCode(code); |
591 } | 560 } |
592 } | 561 } |
593 } | 562 } |
594 | 563 |
595 | 564 |
596 // Return false if bailed out. | 565 // Return false if bailed out. |
597 // If optimized_result_code is not NULL then it is caller's responsibility | 566 // If optimized_result_code is not NULL then it is caller's responsibility |
598 // to install code. | 567 // to install code. |
599 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) { | 568 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) { |
569 ASSERT(!FLAG_precompilation); | |
600 const Function& function = parsed_function()->function(); | 570 const Function& function = parsed_function()->function(); |
601 if (optimized() && !function.IsOptimizable()) { | 571 if (optimized() && !function.IsOptimizable()) { |
602 return false; | 572 return false; |
603 } | 573 } |
604 bool is_compiled = false; | 574 bool is_compiled = false; |
605 Zone* const zone = thread()->zone(); | 575 Zone* const zone = thread()->zone(); |
606 TimelineStream* compiler_timeline = isolate()->GetCompilerStream(); | 576 TimelineStream* compiler_timeline = isolate()->GetCompilerStream(); |
607 CSTAT_TIMER_SCOPE(thread(), codegen_timer); | 577 CSTAT_TIMER_SCOPE(thread(), codegen_timer); |
608 HANDLESCOPE(thread()); | 578 HANDLESCOPE(thread()); |
609 | 579 |
610 // We may reattempt compilation if the function needs to be assembled using | 580 // We may reattempt compilation if the function needs to be assembled using |
611 // far branches on ARM and MIPS. In the else branch of the setjmp call, | 581 // far branches on ARM and MIPS. In the else branch of the setjmp call, |
612 // done is set to false, and use_far_branches is set to true if there is a | 582 // done is set to false, and use_far_branches is set to true if there is a |
613 // longjmp from the ARM or MIPS assemblers. In all other paths through this | 583 // longjmp from the ARM or MIPS assemblers. In all other paths through this |
614 // while loop, done is set to true. use_far_branches is always false on ia32 | 584 // while loop, done is set to true. use_far_branches is always false on ia32 |
615 // and x64. | 585 // and x64. |
616 bool done = false; | 586 bool done = false; |
617 // volatile because the variable may be clobbered by a longjmp. | 587 // volatile because the variable may be clobbered by a longjmp. |
618 volatile bool use_far_branches = false; | 588 volatile bool use_far_branches = false; |
619 volatile bool use_speculative_inlining = | 589 volatile bool use_speculative_inlining = false; |
rmacnak
2016/02/04 00:36:39
volatile -> const
Florian Schneider
2016/02/05 01:55:52
Done.
| |
620 FLAG_max_speculative_inlining_attempts > 0; | |
621 GrowableArray<intptr_t> inlining_black_list; | |
622 | 590 |
623 while (!done) { | 591 while (!done) { |
624 const intptr_t prev_deopt_id = thread()->deopt_id(); | 592 const intptr_t prev_deopt_id = thread()->deopt_id(); |
625 thread()->set_deopt_id(0); | 593 thread()->set_deopt_id(0); |
626 LongJumpScope jump; | 594 LongJumpScope jump; |
627 const intptr_t val = setjmp(*jump.Set()); | 595 const intptr_t val = setjmp(*jump.Set()); |
628 if (val == 0) { | 596 if (val == 0) { |
629 FlowGraph* flow_graph = NULL; | 597 FlowGraph* flow_graph = NULL; |
630 | 598 |
631 // Class hierarchy analysis is registered with the isolate in the | 599 // Class hierarchy analysis is registered with the isolate in the |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
716 TimelineDurationScope tds(thread(), | 684 TimelineDurationScope tds(thread(), |
717 compiler_timeline, | 685 compiler_timeline, |
718 "OptimizationPasses"); | 686 "OptimizationPasses"); |
719 inline_id_to_function.Add(&function); | 687 inline_id_to_function.Add(&function); |
720 // Top scope function has no caller (-1). | 688 // Top scope function has no caller (-1). |
721 caller_inline_id.Add(-1); | 689 caller_inline_id.Add(-1); |
722 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer); | 690 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer); |
723 | 691 |
724 FlowGraphOptimizer optimizer(flow_graph, | 692 FlowGraphOptimizer optimizer(flow_graph, |
725 use_speculative_inlining, | 693 use_speculative_inlining, |
726 &inlining_black_list); | 694 NULL); |
727 if (FLAG_precompilation) { | |
728 optimizer.PopulateWithICData(); | |
729 | |
730 optimizer.ApplyClassIds(); | |
731 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | |
732 | |
733 FlowGraphTypePropagator::Propagate(flow_graph); | |
734 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | |
735 } | |
736 optimizer.ApplyICData(); | 695 optimizer.ApplyICData(); |
737 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 696 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
738 | 697 |
739 // Optimize (a << b) & c patterns, merge operations. | 698 // Optimize (a << b) & c patterns, merge operations. |
740 // Run early in order to have more opportunity to optimize left shifts. | 699 // Run early in order to have more opportunity to optimize left shifts. |
741 optimizer.TryOptimizePatterns(); | 700 optimizer.TryOptimizePatterns(); |
742 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 701 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
743 | 702 |
744 FlowGraphInliner::SetInliningId(flow_graph, 0); | 703 FlowGraphInliner::SetInliningId(flow_graph, 0); |
745 | 704 |
746 // Inlining (mutates the flow graph) | 705 // Inlining (mutates the flow graph) |
747 if (FLAG_use_inlining) { | 706 if (FLAG_use_inlining) { |
748 TimelineDurationScope tds2(thread(), | 707 TimelineDurationScope tds2(thread(), |
749 compiler_timeline, | 708 compiler_timeline, |
750 "Inlining"); | 709 "Inlining"); |
751 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer); | 710 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer); |
752 // Propagate types to create more inlining opportunities. | 711 // Propagate types to create more inlining opportunities. |
753 FlowGraphTypePropagator::Propagate(flow_graph); | 712 FlowGraphTypePropagator::Propagate(flow_graph); |
754 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 713 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
755 | 714 |
756 // Use propagated class-ids to create more inlining opportunities. | 715 // Use propagated class-ids to create more inlining opportunities. |
757 optimizer.ApplyClassIds(); | 716 optimizer.ApplyClassIds(); |
758 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 717 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
759 | 718 |
760 FlowGraphInliner inliner(flow_graph, | 719 FlowGraphInliner inliner(flow_graph, |
761 &inline_id_to_function, | 720 &inline_id_to_function, |
762 &caller_inline_id, | 721 &caller_inline_id, |
763 use_speculative_inlining, | 722 use_speculative_inlining, |
764 &inlining_black_list); | 723 NULL); |
765 inliner.Inline(); | 724 inliner.Inline(); |
766 // Use lists are maintained and validated by the inliner. | 725 // Use lists are maintained and validated by the inliner. |
767 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 726 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
768 } | 727 } |
769 | 728 |
770 // Propagate types and eliminate more type tests. | 729 // Propagate types and eliminate more type tests. |
771 FlowGraphTypePropagator::Propagate(flow_graph); | 730 FlowGraphTypePropagator::Propagate(flow_graph); |
772 DEBUG_ASSERT(flow_graph->VerifyUseLists()); | 731 DEBUG_ASSERT(flow_graph->VerifyUseLists()); |
773 | 732 |
774 { | 733 { |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1078 const Error& error = Error::Handle( | 1037 const Error& error = Error::Handle( |
1079 isolate()->object_store()->sticky_error()); | 1038 isolate()->object_store()->sticky_error()); |
1080 | 1039 |
1081 if (error.raw() == Object::branch_offset_error().raw()) { | 1040 if (error.raw() == Object::branch_offset_error().raw()) { |
1082 // Compilation failed due to an out of range branch offset in the | 1041 // Compilation failed due to an out of range branch offset in the |
1083 // assembler. We try again (done = false) with far branches enabled. | 1042 // assembler. We try again (done = false) with far branches enabled. |
1084 done = false; | 1043 done = false; |
1085 ASSERT(!use_far_branches); | 1044 ASSERT(!use_far_branches); |
1086 use_far_branches = true; | 1045 use_far_branches = true; |
1087 } else if (error.raw() == Object::speculative_inlining_error().raw()) { | 1046 } else if (error.raw() == Object::speculative_inlining_error().raw()) { |
1088 // The return value of setjmp is the deopt id of the check instruction | 1047 // Can only happen with precompilation. |
1089 // that caused the bailout. | 1048 UNREACHABLE(); |
1090 done = false; | |
1091 #if defined(DEBUG) | |
1092 ASSERT(FLAG_precompilation); | |
1093 ASSERT(use_speculative_inlining); | |
1094 for (intptr_t i = 0; i < inlining_black_list.length(); ++i) { | |
1095 ASSERT(inlining_black_list[i] != val); | |
1096 } | |
1097 #endif | |
1098 inlining_black_list.Add(val); | |
1099 const intptr_t max_attempts = FLAG_max_speculative_inlining_attempts; | |
1100 if (inlining_black_list.length() >= max_attempts) { | |
1101 use_speculative_inlining = false; | |
1102 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) { | |
1103 THR_Print("Disabled speculative inlining after %" Pd " attempts.\n", | |
1104 inlining_black_list.length()); | |
1105 } | |
1106 } | |
1107 } else { | 1049 } else { |
1108 // If the error isn't due to an out of range branch offset, we don't | 1050 // If the error isn't due to an out of range branch offset, we don't |
1109 // try again (done = true), and indicate that we did not finish | 1051 // try again (done = true), and indicate that we did not finish |
1110 // compiling (is_compiled = false). | 1052 // compiling (is_compiled = false). |
1111 if (FLAG_trace_bailout) { | 1053 if (FLAG_trace_bailout) { |
1112 THR_Print("%s\n", error.ToErrorCString()); | 1054 THR_Print("%s\n", error.ToErrorCString()); |
1113 } | 1055 } |
1114 done = true; | 1056 done = true; |
1115 } | 1057 } |
1116 | 1058 |
1117 // Clear the error if it was not a real error, but just a bailout. | 1059 // Clear the error if it was not a real error, but just a bailout. |
1118 if (error.IsLanguageError() && | 1060 if (error.IsLanguageError() && |
1119 (LanguageError::Cast(error).kind() == Report::kBailout)) { | 1061 (LanguageError::Cast(error).kind() == Report::kBailout)) { |
1120 isolate()->object_store()->clear_sticky_error(); | 1062 isolate()->object_store()->clear_sticky_error(); |
1121 } | 1063 } |
1122 is_compiled = false; | 1064 is_compiled = false; |
1123 } | 1065 } |
1124 // Reset global isolate state. | 1066 // Reset global isolate state. |
1125 thread()->set_deopt_id(prev_deopt_id); | 1067 thread()->set_deopt_id(prev_deopt_id); |
1126 } | 1068 } |
1127 return is_compiled; | 1069 return is_compiled; |
1128 } | 1070 } |
1129 | 1071 |
1130 | 1072 |
1131 static void DisassembleCode(const Function& function, bool optimized) { | 1073 void Compiler::DisassembleCode(const Function& function, bool optimized) { |
1132 const char* function_fullname = function.ToFullyQualifiedCString(); | 1074 const char* function_fullname = function.ToFullyQualifiedCString(); |
1133 THR_Print("Code for %sfunction '%s' {\n", | 1075 THR_Print("Code for %sfunction '%s' {\n", |
1134 optimized ? "optimized " : "", | 1076 optimized ? "optimized " : "", |
1135 function_fullname); | 1077 function_fullname); |
1136 const Code& code = Code::Handle(function.CurrentCode()); | 1078 const Code& code = Code::Handle(function.CurrentCode()); |
1137 code.Disassemble(); | 1079 code.Disassemble(); |
1138 THR_Print("}\n"); | 1080 THR_Print("}\n"); |
1139 | 1081 |
1140 THR_Print("Pointer offsets for function: {\n"); | 1082 THR_Print("Pointer offsets for function: {\n"); |
1141 // Pointer offsets are stored in descending order. | 1083 // Pointer offsets are stored in descending order. |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1287 function.raw()); | 1229 function.raw()); |
1288 } | 1230 } |
1289 } | 1231 } |
1290 #endif | 1232 #endif |
1291 | 1233 |
1292 | 1234 |
1293 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline, | 1235 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline, |
1294 const Function& function, | 1236 const Function& function, |
1295 bool optimized, | 1237 bool optimized, |
1296 intptr_t osr_id) { | 1238 intptr_t osr_id) { |
1297 // Check that we optimize if 'FLAG_precompilation' is set to true, | 1239 ASSERT(!FLAG_precompilation); |
1298 // except if the function is marked as not optimizable. | |
1299 ASSERT(!function.IsOptimizable() || | |
1300 !FLAG_precompilation || optimized); | |
1301 ASSERT(!FLAG_precompilation || !function.HasCode()); | |
1302 LongJumpScope jump; | 1240 LongJumpScope jump; |
1303 if (setjmp(*jump.Set()) == 0) { | 1241 if (setjmp(*jump.Set()) == 0) { |
1304 Thread* const thread = Thread::Current(); | 1242 Thread* const thread = Thread::Current(); |
1305 Isolate* const isolate = thread->isolate(); | 1243 Isolate* const isolate = thread->isolate(); |
1306 StackZone stack_zone(thread); | 1244 StackZone stack_zone(thread); |
1307 Zone* const zone = stack_zone.GetZone(); | 1245 Zone* const zone = stack_zone.GetZone(); |
1308 const bool trace_compiler = | 1246 const bool trace_compiler = |
1309 FLAG_trace_compiler || | 1247 FLAG_trace_compiler || |
1310 (FLAG_trace_optimizing_compiler && optimized); | 1248 (FLAG_trace_optimizing_compiler && optimized); |
1311 Timer per_compile_timer(trace_compiler, "Compilation time"); | 1249 Timer per_compile_timer(trace_compiler, "Compilation time"); |
(...skipping 21 matching lines...) Expand all Loading... | |
1333 pipeline->ParseFunction(parsed_function); | 1271 pipeline->ParseFunction(parsed_function); |
1334 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed); | 1272 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed); |
1335 INC_STAT(thread, | 1273 INC_STAT(thread, |
1336 num_func_tokens_compiled, | 1274 num_func_tokens_compiled, |
1337 num_tokens_after - num_tokens_before); | 1275 num_tokens_after - num_tokens_before); |
1338 } | 1276 } |
1339 | 1277 |
1340 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id); | 1278 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id); |
1341 const bool success = helper.Compile(pipeline); | 1279 const bool success = helper.Compile(pipeline); |
1342 if (!success) { | 1280 if (!success) { |
1343 if (optimized && !FLAG_precompilation) { | 1281 if (optimized) { |
1344 // Optimizer bailed out. Disable optimizations and never try again. | 1282 // Optimizer bailed out. Disable optimizations and never try again. |
1345 if (trace_compiler) { | 1283 if (trace_compiler) { |
1346 THR_Print("--> disabling optimizations for '%s'\n", | 1284 THR_Print("--> disabling optimizations for '%s'\n", |
1347 function.ToFullyQualifiedCString()); | 1285 function.ToFullyQualifiedCString()); |
1348 } else if (FLAG_trace_failed_optimization_attempts) { | 1286 } else if (FLAG_trace_failed_optimization_attempts) { |
1349 THR_Print("Cannot optimize: %s\n", | 1287 THR_Print("Cannot optimize: %s\n", |
1350 function.ToFullyQualifiedCString()); | 1288 function.ToFullyQualifiedCString()); |
1351 } | 1289 } |
1352 function.SetIsOptimizable(false); | 1290 function.SetIsOptimizable(false); |
1353 return Error::null(); | 1291 return Error::null(); |
(...skipping 15 matching lines...) Expand all Loading... | |
1369 THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n", | 1307 THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n", |
1370 function.ToFullyQualifiedCString(), | 1308 function.ToFullyQualifiedCString(), |
1371 Code::Handle(function.CurrentCode()).EntryPoint(), | 1309 Code::Handle(function.CurrentCode()).EntryPoint(), |
1372 Code::Handle(function.CurrentCode()).Size(), | 1310 Code::Handle(function.CurrentCode()).Size(), |
1373 per_compile_timer.TotalElapsedTime()); | 1311 per_compile_timer.TotalElapsedTime()); |
1374 } | 1312 } |
1375 | 1313 |
1376 isolate->debugger()->NotifyCompilation(function); | 1314 isolate->debugger()->NotifyCompilation(function); |
1377 | 1315 |
1378 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) { | 1316 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) { |
1379 DisassembleCode(function, optimized); | 1317 Compiler::DisassembleCode(function, optimized); |
1380 } else if (FLAG_disassemble_optimized && | 1318 } else if (FLAG_disassemble_optimized && |
1381 optimized && | 1319 optimized && |
1382 FlowGraphPrinter::ShouldPrint(function)) { | 1320 FlowGraphPrinter::ShouldPrint(function)) { |
1383 // TODO(fschneider): Print unoptimized code along with the optimized code. | 1321 // TODO(fschneider): Print unoptimized code along with the optimized code. |
1384 THR_Print("*** BEGIN CODE\n"); | 1322 THR_Print("*** BEGIN CODE\n"); |
1385 DisassembleCode(function, true); | 1323 Compiler::DisassembleCode(function, true); |
1386 THR_Print("*** END CODE\n"); | 1324 THR_Print("*** END CODE\n"); |
1387 } | 1325 } |
1388 #if defined(DEBUG) | 1326 #if defined(DEBUG) |
1389 CheckInliningIntervals(function); | 1327 CheckInliningIntervals(function); |
1390 #endif | 1328 #endif |
1391 return Error::null(); | 1329 return Error::null(); |
1392 } else { | 1330 } else { |
1393 Thread* const thread = Thread::Current(); | 1331 Thread* const thread = Thread::Current(); |
1394 Isolate* const isolate = thread->isolate(); | 1332 Isolate* const isolate = thread->isolate(); |
1395 StackZone stack_zone(thread); | 1333 StackZone stack_zone(thread); |
1396 Error& error = Error::Handle(); | 1334 Error& error = Error::Handle(); |
1397 // We got an error during compilation. | 1335 // We got an error during compilation. |
1398 error = isolate->object_store()->sticky_error(); | 1336 error = isolate->object_store()->sticky_error(); |
1399 isolate->object_store()->clear_sticky_error(); | 1337 isolate->object_store()->clear_sticky_error(); |
1400 // Unoptimized compilation or precompilation may encounter compile-time | 1338 // Unoptimized compilation may encounter compile-time |
1401 // errors, but regular optimized compilation should not. | 1339 // errors, but regular optimized compilation should not. |
1402 ASSERT(!optimized || FLAG_precompilation); | 1340 ASSERT(!optimized); |
1403 // Do not attempt to optimize functions that can cause errors. | 1341 // Do not attempt to optimize functions that can cause errors. |
1404 function.set_is_optimizable(false); | 1342 function.set_is_optimizable(false); |
1405 return error.raw(); | 1343 return error.raw(); |
1406 } | 1344 } |
1407 UNREACHABLE(); | 1345 UNREACHABLE(); |
1408 return Error::null(); | 1346 return Error::null(); |
1409 } | 1347 } |
1410 | 1348 |
1411 | 1349 |
1412 RawError* Compiler::CompileFunction(Thread* thread, | 1350 RawError* Compiler::CompileFunction(Thread* thread, |
1413 const Function& function) { | 1351 const Function& function) { |
1352 #ifdef DART_PRECOMPILER | |
1353 if (FLAG_precompilation) { | |
1354 return Precompiler::CompileFunction(thread, function); | |
1355 } | |
1356 #endif | |
1414 Isolate* isolate = thread->isolate(); | 1357 Isolate* isolate = thread->isolate(); |
1415 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); | 1358 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); |
1416 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "Function", function); | 1359 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "Function", function); |
1417 | 1360 |
1418 if (!isolate->compilation_allowed()) { | 1361 if (!isolate->compilation_allowed()) { |
1419 FATAL3("Precompilation missed function %s (%s, %s)\n", | 1362 FATAL3("Precompilation missed function %s (%s, %s)\n", |
1420 function.ToLibNamePrefixedQualifiedCString(), | 1363 function.ToLibNamePrefixedQualifiedCString(), |
1421 function.token_pos().ToCString(), | 1364 function.token_pos().ToCString(), |
1422 Function::KindToCString(function.kind())); | 1365 Function::KindToCString(function.kind())); |
1423 } | 1366 } |
1424 | 1367 |
1425 CompilationPipeline* pipeline = | 1368 CompilationPipeline* pipeline = |
1426 CompilationPipeline::New(thread->zone(), function); | 1369 CompilationPipeline::New(thread->zone(), function); |
1427 | 1370 |
1428 const bool optimized = | |
1429 FLAG_precompilation && function.IsOptimizable(); | |
1430 | |
1431 return CompileFunctionHelper(pipeline, | 1371 return CompileFunctionHelper(pipeline, |
1432 function, | 1372 function, |
1433 optimized, | 1373 /* optimized = */ false, |
1434 kNoOSRDeoptId); | 1374 kNoOSRDeoptId); |
1435 } | 1375 } |
1436 | 1376 |
1437 | 1377 |
1438 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread, | 1378 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread, |
1439 const Function& function) { | 1379 const Function& function) { |
1440 if (function.unoptimized_code() != Object::null()) { | 1380 if (function.unoptimized_code() != Object::null()) { |
1441 return Error::null(); | 1381 return Error::null(); |
1442 } | 1382 } |
1443 Code& original_code = Code::ZoneHandle(thread->zone()); | 1383 Code& original_code = Code::ZoneHandle(thread->zone()); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1558 return error.raw(); | 1498 return error.raw(); |
1559 } | 1499 } |
1560 func.ClearICDataArray(); | 1500 func.ClearICDataArray(); |
1561 func.ClearCode(); | 1501 func.ClearCode(); |
1562 } | 1502 } |
1563 } | 1503 } |
1564 return error.raw(); | 1504 return error.raw(); |
1565 } | 1505 } |
1566 | 1506 |
1567 | 1507 |
1568 void Compiler::CompileStaticInitializer(const Field& field) { | |
1569 ASSERT(field.is_static()); | |
1570 if (field.HasPrecompiledInitializer()) { | |
1571 // TODO(rmacnak): Investigate why this happens for _enum_names. | |
1572 OS::Print("Warning: Ignoring repeated request for initializer for %s\n", | |
1573 field.ToCString()); | |
1574 return; | |
1575 } | |
1576 Thread* thread = Thread::Current(); | |
1577 StackZone zone(thread); | |
1578 | |
1579 ParsedFunction* parsed_function = Parser::ParseStaticFieldInitializer(field); | |
1580 | |
1581 parsed_function->AllocateVariables(); | |
1582 // Non-optimized code generator. | |
1583 DartCompilationPipeline pipeline; | |
1584 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); | |
1585 helper.Compile(&pipeline); | |
1586 const Function& initializer = parsed_function->function(); | |
1587 field.SetPrecompiledInitializer(initializer); | |
1588 } | |
1589 | |
1590 | |
1591 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { | 1508 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { |
1509 #ifdef DART_PRECOMPILER | |
1510 if (FLAG_precompilation) { | |
1511 return Precompiler::EvaluateStaticInitializer(field); | |
1512 } | |
1513 #endif | |
1592 ASSERT(field.is_static()); | 1514 ASSERT(field.is_static()); |
1593 // The VM sets the field's value to transiton_sentinel prior to | 1515 // The VM sets the field's value to transiton_sentinel prior to |
1594 // evaluating the initializer value. | 1516 // evaluating the initializer value. |
1595 ASSERT(field.StaticValue() == Object::transition_sentinel().raw()); | 1517 ASSERT(field.StaticValue() == Object::transition_sentinel().raw()); |
1596 LongJumpScope jump; | 1518 LongJumpScope jump; |
1597 if (setjmp(*jump.Set()) == 0) { | 1519 if (setjmp(*jump.Set()) == 0) { |
1598 // Under precompilation, the initializer may have already been compiled, in | 1520 // Under lazy compilation initializer has not yet been created, so create |
1599 // which case use it. Under lazy compilation or early in precompilation, the | 1521 // it now, but don't bother remembering it because it won't be used again. |
1600 // initializer has not yet been created, so create it now, but don't bother | 1522 ASSERT(!field.HasPrecompiledInitializer()); |
1601 // remembering it because it won't be used again. | 1523 Thread* const thread = Thread::Current(); |
1602 Function& initializer = Function::Handle(); | 1524 StackZone zone(thread); |
1603 if (!field.HasPrecompiledInitializer()) { | 1525 ParsedFunction* parsed_function = |
1604 Thread* const thread = Thread::Current(); | 1526 Parser::ParseStaticFieldInitializer(field); |
1605 StackZone zone(thread); | |
1606 ParsedFunction* parsed_function = | |
1607 Parser::ParseStaticFieldInitializer(field); | |
1608 | 1527 |
1609 parsed_function->AllocateVariables(); | 1528 parsed_function->AllocateVariables(); |
1610 // Non-optimized code generator. | 1529 // Non-optimized code generator. |
1611 DartCompilationPipeline pipeline; | 1530 DartCompilationPipeline pipeline; |
1612 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); | 1531 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); |
1613 helper.Compile(&pipeline); | 1532 helper.Compile(&pipeline); |
1614 initializer = parsed_function->function().raw(); | 1533 const Function& initializer = |
1615 Code::Handle(initializer.unoptimized_code()).set_var_descriptors( | 1534 Function::Handle(parsed_function->function().raw()); |
1616 Object::empty_var_descriptors()); | 1535 Code::Handle(initializer.unoptimized_code()).set_var_descriptors( |
1617 } else { | 1536 Object::empty_var_descriptors()); |
1618 initializer ^= field.PrecompiledInitializer(); | |
1619 } | |
1620 // Invoke the function to evaluate the expression. | 1537 // Invoke the function to evaluate the expression. |
1621 return DartEntry::InvokeFunction(initializer, Object::empty_array()); | 1538 return DartEntry::InvokeFunction(initializer, Object::empty_array()); |
1622 } else { | 1539 } else { |
1623 Thread* const thread = Thread::Current(); | 1540 Thread* const thread = Thread::Current(); |
1624 Isolate* const isolate = thread->isolate(); | 1541 Isolate* const isolate = thread->isolate(); |
1625 StackZone zone(thread); | 1542 StackZone zone(thread); |
1626 const Error& error = | 1543 const Error& error = |
1627 Error::Handle(thread->zone(), isolate->object_store()->sticky_error()); | 1544 Error::Handle(thread->zone(), isolate->object_store()->sticky_error()); |
1628 isolate->object_store()->clear_sticky_error(); | 1545 isolate->object_store()->clear_sticky_error(); |
1629 return error.raw(); | 1546 return error.raw(); |
1630 } | 1547 } |
1631 UNREACHABLE(); | 1548 UNREACHABLE(); |
1632 return Object::null(); | 1549 return Object::null(); |
1633 } | 1550 } |
1634 | 1551 |
1635 | 1552 |
1636 | 1553 |
1637 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { | 1554 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { |
1555 #ifdef DART_PRECOMPILER | |
1556 if (FLAG_precompilation) { | |
1557 return Precompiler::ExecuteOnce(fragment); | |
1558 } | |
1559 #endif | |
1638 LongJumpScope jump; | 1560 LongJumpScope jump; |
1639 if (setjmp(*jump.Set()) == 0) { | 1561 if (setjmp(*jump.Set()) == 0) { |
1640 Thread* const thread = Thread::Current(); | 1562 Thread* const thread = Thread::Current(); |
1641 if (FLAG_trace_compiler) { | 1563 if (FLAG_trace_compiler) { |
1642 THR_Print("compiling expression: "); | 1564 THR_Print("compiling expression: "); |
1643 AstPrinter::PrintNode(fragment); | 1565 AstPrinter::PrintNode(fragment); |
1644 } | 1566 } |
1645 | 1567 |
1646 // Create a dummy function object for the code generator. | 1568 // Create a dummy function object for the code generator. |
1647 // The function needs to be associated with a named Class: the interface | 1569 // The function needs to be associated with a named Class: the interface |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1998 UNREACHABLE(); | 1920 UNREACHABLE(); |
1999 } | 1921 } |
2000 | 1922 |
2001 | 1923 |
2002 RawError* Compiler::CompileAllFunctions(const Class& cls) { | 1924 RawError* Compiler::CompileAllFunctions(const Class& cls) { |
2003 UNREACHABLE(); | 1925 UNREACHABLE(); |
2004 return Error::null(); | 1926 return Error::null(); |
2005 } | 1927 } |
2006 | 1928 |
2007 | 1929 |
2008 void Compiler::CompileStaticInitializer(const Field& field) { | |
2009 UNREACHABLE(); | |
2010 } | |
2011 | |
2012 | |
2013 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { | 1930 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { |
2014 ASSERT(field.HasPrecompiledInitializer()); | 1931 ASSERT(field.HasPrecompiledInitializer()); |
2015 const Function& initializer = | 1932 const Function& initializer = |
2016 Function::Handle(field.PrecompiledInitializer()); | 1933 Function::Handle(field.PrecompiledInitializer()); |
2017 return DartEntry::InvokeFunction(initializer, Object::empty_array()); | 1934 return DartEntry::InvokeFunction(initializer, Object::empty_array()); |
2018 } | 1935 } |
2019 | 1936 |
2020 | 1937 |
2021 | |
2022 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { | 1938 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { |
2023 UNREACHABLE(); | 1939 UNREACHABLE(); |
2024 return Object::null(); | 1940 return Object::null(); |
2025 } | 1941 } |
2026 | 1942 |
2027 | 1943 |
2028 void BackgroundCompiler::CompileOptimized(const Function& function) { | 1944 void BackgroundCompiler::CompileOptimized(const Function& function) { |
2029 UNREACHABLE(); | 1945 UNREACHABLE(); |
2030 } | 1946 } |
2031 | 1947 |
2032 | 1948 |
2033 void BackgroundCompiler::VisitPointers(ObjectPointerVisitor* visitor) { | 1949 void BackgroundCompiler::VisitPointers(ObjectPointerVisitor* visitor) { |
2034 UNREACHABLE(); | 1950 UNREACHABLE(); |
2035 } | 1951 } |
2036 | 1952 |
2037 | 1953 |
2038 void BackgroundCompiler::Stop(BackgroundCompiler* task) { | 1954 void BackgroundCompiler::Stop(BackgroundCompiler* task) { |
2039 UNREACHABLE(); | 1955 UNREACHABLE(); |
2040 } | 1956 } |
2041 | 1957 |
2042 | 1958 |
2043 void BackgroundCompiler::EnsureInit(Thread* thread) { | 1959 void BackgroundCompiler::EnsureInit(Thread* thread) { |
2044 UNREACHABLE(); | 1960 UNREACHABLE(); |
2045 } | 1961 } |
2046 | 1962 |
2047 #endif // DART_PRECOMPILED_RUNTIME | 1963 #endif // DART_PRECOMPILED_RUNTIME |
2048 | 1964 |
2049 } // namespace dart | 1965 } // namespace dart |
OLD | NEW |