Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(442)

Side by Side Diff: runtime/vm/compiler.cc

Issue 1663163003: Initial split of precompilation code from compiler.cc (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: rebased Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/compiler.h ('k') | runtime/vm/dart_api_impl.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/compiler.h" 5 #include "vm/compiler.h"
6 6
7 #include "vm/assembler.h" 7 #include "vm/assembler.h"
8 8
9 #include "vm/ast_printer.h" 9 #include "vm/ast_printer.h"
10 #include "vm/block_scheduler.h" 10 #include "vm/block_scheduler.h"
11 #include "vm/branch_optimizer.h" 11 #include "vm/branch_optimizer.h"
12 #include "vm/cha.h" 12 #include "vm/cha.h"
13 #include "vm/code_generator.h" 13 #include "vm/code_generator.h"
14 #include "vm/code_patcher.h" 14 #include "vm/code_patcher.h"
15 #include "vm/constant_propagator.h" 15 #include "vm/constant_propagator.h"
16 #include "vm/dart_entry.h" 16 #include "vm/dart_entry.h"
17 #include "vm/debugger.h" 17 #include "vm/debugger.h"
18 #include "vm/deopt_instructions.h" 18 #include "vm/deopt_instructions.h"
19 #include "vm/disassembler.h"
19 #include "vm/exceptions.h" 20 #include "vm/exceptions.h"
20 #include "vm/flags.h" 21 #include "vm/flags.h"
21 #include "vm/flow_graph.h" 22 #include "vm/flow_graph.h"
22 #include "vm/flow_graph_allocator.h" 23 #include "vm/flow_graph_allocator.h"
23 #include "vm/flow_graph_builder.h" 24 #include "vm/flow_graph_builder.h"
24 #include "vm/flow_graph_compiler.h" 25 #include "vm/flow_graph_compiler.h"
25 #include "vm/flow_graph_inliner.h" 26 #include "vm/flow_graph_inliner.h"
26 #include "vm/flow_graph_optimizer.h" 27 #include "vm/flow_graph_optimizer.h"
27 #include "vm/flow_graph_type_propagator.h" 28 #include "vm/flow_graph_type_propagator.h"
28 #include "vm/il_printer.h" 29 #include "vm/il_printer.h"
29 #include "vm/longjump.h" 30 #include "vm/longjump.h"
30 #include "vm/object.h" 31 #include "vm/object.h"
31 #include "vm/object_store.h" 32 #include "vm/object_store.h"
32 #include "vm/os.h" 33 #include "vm/os.h"
33 #include "vm/parser.h" 34 #include "vm/parser.h"
35 #include "vm/precompiler.h"
34 #include "vm/redundancy_elimination.h" 36 #include "vm/redundancy_elimination.h"
35 #include "vm/regexp_parser.h" 37 #include "vm/regexp_parser.h"
36 #include "vm/regexp_assembler.h" 38 #include "vm/regexp_assembler.h"
37 #include "vm/scanner.h"
38 #include "vm/symbols.h" 39 #include "vm/symbols.h"
39 #include "vm/tags.h" 40 #include "vm/tags.h"
40 #include "vm/thread_registry.h" 41 #include "vm/thread_registry.h"
41 #include "vm/timer.h" 42 #include "vm/timer.h"
42 43
43 namespace dart { 44 namespace dart {
44 45
45 DEFINE_FLAG(bool, allocation_sinking, true, 46 DEFINE_FLAG(bool, allocation_sinking, true,
46 "Attempt to sink temporary allocations to side exits"); 47 "Attempt to sink temporary allocations to side exits");
47 DEFINE_FLAG(bool, common_subexpression_elimination, true, 48 DEFINE_FLAG(bool, common_subexpression_elimination, true,
(...skipping 11 matching lines...) Expand all
59 "Print the deopt-id to ICData map in optimizing compiler."); 60 "Print the deopt-id to ICData map in optimizing compiler.");
60 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis"); 61 DEFINE_FLAG(bool, range_analysis, true, "Enable range analysis");
61 DEFINE_FLAG(bool, reorder_basic_blocks, true, "Enable basic-block reordering."); 62 DEFINE_FLAG(bool, reorder_basic_blocks, true, "Enable basic-block reordering.");
62 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations."); 63 DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations.");
63 DEFINE_FLAG(bool, trace_optimizing_compiler, false, 64 DEFINE_FLAG(bool, trace_optimizing_compiler, false,
64 "Trace only optimizing compiler operations."); 65 "Trace only optimizing compiler operations.");
65 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler."); 66 DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler.");
66 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining"); 67 DEFINE_FLAG(bool, use_inlining, true, "Enable call-site inlining");
67 DEFINE_FLAG(bool, verify_compiler, false, 68 DEFINE_FLAG(bool, verify_compiler, false,
68 "Enable compiler verification assertions"); 69 "Enable compiler verification assertions");
69 DEFINE_FLAG(int, max_speculative_inlining_attempts, 1,
70 "Max number of attempts with speculative inlining (precompilation only)");
71 70
72 DECLARE_FLAG(bool, background_compilation); 71 DECLARE_FLAG(bool, background_compilation);
73 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size); 72 DECLARE_FLAG(bool, huge_method_cutoff_in_code_size);
74 DECLARE_FLAG(bool, load_deferred_eagerly); 73 DECLARE_FLAG(bool, load_deferred_eagerly);
75 DECLARE_FLAG(bool, trace_failed_optimization_attempts); 74 DECLARE_FLAG(bool, trace_failed_optimization_attempts);
76 DECLARE_FLAG(bool, trace_inlining_intervals);
77 DECLARE_FLAG(bool, trace_irregexp); 75 DECLARE_FLAG(bool, trace_irregexp);
78 DECLARE_FLAG(bool, precompilation); 76 DECLARE_FLAG(bool, precompilation);
79 77
80 78
81 #ifndef DART_PRECOMPILED_RUNTIME 79 #ifndef DART_PRECOMPILED_RUNTIME
82 80
83 // TODO(zerny): Factor out unoptimizing/optimizing pipelines and remove 81 void DartCompilationPipeline::ParseFunction(ParsedFunction* parsed_function) {
84 // separate helpers functions & `optimizing` args. 82 Parser::ParseFunction(parsed_function);
85 class CompilationPipeline : public ZoneAllocated { 83 parsed_function->AllocateVariables();
86 public: 84 }
87 static CompilationPipeline* New(Zone* zone, const Function& function);
88
89 virtual void ParseFunction(ParsedFunction* parsed_function) = 0;
90 virtual FlowGraph* BuildFlowGraph(
91 Zone* zone,
92 ParsedFunction* parsed_function,
93 const ZoneGrowableArray<const ICData*>& ic_data_array,
94 intptr_t osr_id) = 0;
95 virtual void FinalizeCompilation() = 0;
96 virtual ~CompilationPipeline() { }
97 };
98 85
99 86
100 class DartCompilationPipeline : public CompilationPipeline { 87 FlowGraph* DartCompilationPipeline::BuildFlowGraph(
101 public: 88 Zone* zone,
102 virtual void ParseFunction(ParsedFunction* parsed_function) { 89 ParsedFunction* parsed_function,
103 Parser::ParseFunction(parsed_function); 90 const ZoneGrowableArray<const ICData*>& ic_data_array,
104 parsed_function->AllocateVariables(); 91 intptr_t osr_id) {
105 } 92 // Build the flow graph.
93 FlowGraphBuilder builder(*parsed_function,
94 ic_data_array,
95 NULL, // NULL = not inlining.
96 osr_id);
106 97
107 virtual FlowGraph* BuildFlowGraph( 98 return builder.BuildGraph();
108 Zone* zone, 99 }
109 ParsedFunction* parsed_function,
110 const ZoneGrowableArray<const ICData*>& ic_data_array,
111 intptr_t osr_id) {
112 // Build the flow graph.
113 FlowGraphBuilder builder(*parsed_function,
114 ic_data_array,
115 NULL, // NULL = not inlining.
116 osr_id);
117
118 return builder.BuildGraph();
119 }
120
121 virtual void FinalizeCompilation() { }
122 };
123 100
124 101
125 class IrregexpCompilationPipeline : public CompilationPipeline { 102 void DartCompilationPipeline::FinalizeCompilation() { }
126 public:
127 IrregexpCompilationPipeline() : backtrack_goto_(NULL) { }
128 103
129 virtual void ParseFunction(ParsedFunction* parsed_function) {
130 RegExpParser::ParseFunction(parsed_function);
131 // Variables are allocated after compilation.
132 }
133 104
134 virtual FlowGraph* BuildFlowGraph( 105 void IrregexpCompilationPipeline::ParseFunction(
135 Zone* zone, 106 ParsedFunction* parsed_function) {
136 ParsedFunction* parsed_function, 107 RegExpParser::ParseFunction(parsed_function);
137 const ZoneGrowableArray<const ICData*>& ic_data_array, 108 // Variables are allocated after compilation.
138 intptr_t osr_id) { 109 }
139 // Compile to the dart IR.
140 RegExpEngine::CompilationResult result =
141 RegExpEngine::CompileIR(parsed_function->regexp_compile_data(),
142 parsed_function,
143 ic_data_array);
144 backtrack_goto_ = result.backtrack_goto;
145 110
146 // Allocate variables now that we know the number of locals. 111 FlowGraph* IrregexpCompilationPipeline::BuildFlowGraph(
147 parsed_function->AllocateIrregexpVariables(result.num_stack_locals); 112 Zone* zone,
113 ParsedFunction* parsed_function,
114 const ZoneGrowableArray<const ICData*>& ic_data_array,
115 intptr_t osr_id) {
116 // Compile to the dart IR.
117 RegExpEngine::CompilationResult result =
118 RegExpEngine::CompileIR(parsed_function->regexp_compile_data(),
119 parsed_function,
120 ic_data_array);
121 backtrack_goto_ = result.backtrack_goto;
148 122
149 // Build the flow graph. 123 // Allocate variables now that we know the number of locals.
150 FlowGraphBuilder builder(*parsed_function, 124 parsed_function->AllocateIrregexpVariables(result.num_stack_locals);
151 ic_data_array,
152 NULL, // NULL = not inlining.
153 osr_id);
154 125
155 return new(zone) FlowGraph(*parsed_function, 126 // Build the flow graph.
156 result.graph_entry, 127 FlowGraphBuilder builder(*parsed_function,
157 result.num_blocks); 128 ic_data_array,
158 } 129 NULL, // NULL = not inlining.
130 osr_id);
159 131
160 virtual void FinalizeCompilation() { 132 return new(zone) FlowGraph(*parsed_function,
161 backtrack_goto_->ComputeOffsetTable(); 133 result.graph_entry,
162 } 134 result.num_blocks);
135 }
163 136
164 private: 137 void IrregexpCompilationPipeline::FinalizeCompilation() {
165 IndirectGotoInstr* backtrack_goto_; 138 backtrack_goto_->ComputeOffsetTable();
166 }; 139 }
167
168 140
169 CompilationPipeline* CompilationPipeline::New(Zone* zone, 141 CompilationPipeline* CompilationPipeline::New(Zone* zone,
170 const Function& function) { 142 const Function& function) {
171 if (function.IsIrregexpFunction()) { 143 if (function.IsIrregexpFunction()) {
172 return new(zone) IrregexpCompilationPipeline(); 144 return new(zone) IrregexpCompilationPipeline();
173 } else { 145 } else {
174 return new(zone) DartCompilationPipeline(); 146 return new(zone) DartCompilationPipeline();
175 } 147 }
176 } 148 }
177 149
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
438 const uint32_t prefix_invalidation_gen_at_start_; 410 const uint32_t prefix_invalidation_gen_at_start_;
439 411
440 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper); 412 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper);
441 }; 413 };
442 414
443 415
444 void CompileParsedFunctionHelper::FinalizeCompilation( 416 void CompileParsedFunctionHelper::FinalizeCompilation(
445 Assembler* assembler, 417 Assembler* assembler,
446 FlowGraphCompiler* graph_compiler, 418 FlowGraphCompiler* graph_compiler,
447 FlowGraph* flow_graph) { 419 FlowGraph* flow_graph) {
420 ASSERT(!FLAG_precompilation);
448 const Function& function = parsed_function()->function(); 421 const Function& function = parsed_function()->function();
449 Zone* const zone = thread()->zone(); 422 Zone* const zone = thread()->zone();
450 423
451 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer); 424 CSTAT_TIMER_SCOPE(thread(), codefinalizer_timer);
452 // CreateDeoptInfo uses the object pool and needs to be done before 425 // CreateDeoptInfo uses the object pool and needs to be done before
453 // FinalizeCode. 426 // FinalizeCode.
454 const Array& deopt_info_array = 427 const Array& deopt_info_array =
455 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler)); 428 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler));
456 INC_STAT(thread(), total_code_size, 429 INC_STAT(thread(), total_code_size,
457 deopt_info_array.Length() * sizeof(uword)); 430 deopt_info_array.Length() * sizeof(uword));
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
536 ASSERT(!is_osr); // OSR is compiled in background. 509 ASSERT(!is_osr); // OSR is compiled in background.
537 function.InstallOptimizedCode(code, is_osr); 510 function.InstallOptimizedCode(code, is_osr);
538 } 511 }
539 if (function.usage_counter() < 0) { 512 if (function.usage_counter() < 0) {
540 // Reset to 0 so that it can be recompiled if needed. 513 // Reset to 0 so that it can be recompiled if needed.
541 function.set_usage_counter(0); 514 function.set_usage_counter(0);
542 } 515 }
543 } 516 }
544 517
545 // Register code with the classes it depends on because of CHA and 518 // Register code with the classes it depends on because of CHA and
546 // fields it depends on because of store guards, unless we cannot 519 // fields it depends on because of store guards.
547 // deopt. 520 // Deoptimize field dependent code first, before registering
548 if (!FLAG_precompilation) { 521 // this yet uninstalled code as dependent on a field.
549 // Deoptimize field dependent code first, before registering 522 // TODO(srdjan): Debugging dart2js crashes;
550 // this yet uninstalled code as dependent on a field. 523 // FlowGraphOptimizer::VisitStoreInstanceField populates
551 // TODO(srdjan): Debugging dart2js crashes; 524 // deoptimize_dependent_code() list, currently disabled.
552 // FlowGraphOptimizer::VisitStoreInstanceField populates 525 for (intptr_t i = 0;
553 // deoptimize_dependent_code() list, currently disabled. 526 i < flow_graph->deoptimize_dependent_code().length();
554 for (intptr_t i = 0; 527 i++) {
555 i < flow_graph->deoptimize_dependent_code().length(); 528 const Field* field = flow_graph->deoptimize_dependent_code()[i];
556 i++) { 529 field->DeoptimizeDependentCode();
557 const Field* field = flow_graph->deoptimize_dependent_code()[i]; 530 }
558 field->DeoptimizeDependentCode(); 531 for (intptr_t i = 0;
559 } 532 i < thread()->cha()->leaf_classes().length();
560 for (intptr_t i = 0; 533 ++i) {
561 i < thread()->cha()->leaf_classes().length(); 534 thread()->cha()->leaf_classes()[i]->RegisterCHACode(code);
562 ++i) { 535 }
563 thread()->cha()->leaf_classes()[i]->RegisterCHACode(code); 536 for (intptr_t i = 0;
564 } 537 i < flow_graph->guarded_fields()->length();
565 for (intptr_t i = 0; 538 i++) {
566 i < flow_graph->guarded_fields()->length(); 539 const Field* field = (*flow_graph->guarded_fields())[i];
567 i++) { 540 field->RegisterDependentCode(code);
568 const Field* field = (*flow_graph->guarded_fields())[i];
569 field->RegisterDependentCode(code);
570 }
571 } 541 }
572 } else { // not optimized. 542 } else { // not optimized.
573 if (!FLAG_precompilation && 543 if (function.ic_data_array() == Array::null()) {
574 (function.ic_data_array() == Array::null())) {
575 function.SaveICDataMap( 544 function.SaveICDataMap(
576 graph_compiler->deopt_id_to_ic_data(), 545 graph_compiler->deopt_id_to_ic_data(),
577 Array::Handle(zone, graph_compiler->edge_counters_array())); 546 Array::Handle(zone, graph_compiler->edge_counters_array()));
578 } 547 }
579 function.set_unoptimized_code(code); 548 function.set_unoptimized_code(code);
580 function.AttachCode(code); 549 function.AttachCode(code);
581 } 550 }
582 if (parsed_function()->HasDeferredPrefixes()) { 551 if (parsed_function()->HasDeferredPrefixes()) {
583 ASSERT(!FLAG_load_deferred_eagerly); 552 ASSERT(!FLAG_load_deferred_eagerly);
584 ZoneGrowableArray<const LibraryPrefix*>* prefixes = 553 ZoneGrowableArray<const LibraryPrefix*>* prefixes =
585 parsed_function()->deferred_prefixes(); 554 parsed_function()->deferred_prefixes();
586 for (intptr_t i = 0; i < prefixes->length(); i++) { 555 for (intptr_t i = 0; i < prefixes->length(); i++) {
587 (*prefixes)[i]->RegisterDependentCode(code); 556 (*prefixes)[i]->RegisterDependentCode(code);
588 } 557 }
589 } 558 }
590 } 559 }
591 560
592 561
593 // Return false if bailed out. 562 // Return false if bailed out.
594 // If optimized_result_code is not NULL then it is caller's responsibility 563 // If optimized_result_code is not NULL then it is caller's responsibility
595 // to install code. 564 // to install code.
596 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) { 565 bool CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
566 ASSERT(!FLAG_precompilation);
597 const Function& function = parsed_function()->function(); 567 const Function& function = parsed_function()->function();
598 if (optimized() && !function.IsOptimizable()) { 568 if (optimized() && !function.IsOptimizable()) {
599 return false; 569 return false;
600 } 570 }
601 bool is_compiled = false; 571 bool is_compiled = false;
602 Zone* const zone = thread()->zone(); 572 Zone* const zone = thread()->zone();
603 TimelineStream* compiler_timeline = isolate()->GetCompilerStream(); 573 TimelineStream* compiler_timeline = isolate()->GetCompilerStream();
604 CSTAT_TIMER_SCOPE(thread(), codegen_timer); 574 CSTAT_TIMER_SCOPE(thread(), codegen_timer);
605 HANDLESCOPE(thread()); 575 HANDLESCOPE(thread());
606 576
607 // We may reattempt compilation if the function needs to be assembled using 577 // We may reattempt compilation if the function needs to be assembled using
608 // far branches on ARM and MIPS. In the else branch of the setjmp call, 578 // far branches on ARM and MIPS. In the else branch of the setjmp call,
609 // done is set to false, and use_far_branches is set to true if there is a 579 // done is set to false, and use_far_branches is set to true if there is a
610 // longjmp from the ARM or MIPS assemblers. In all other paths through this 580 // longjmp from the ARM or MIPS assemblers. In all other paths through this
611 // while loop, done is set to true. use_far_branches is always false on ia32 581 // while loop, done is set to true. use_far_branches is always false on ia32
612 // and x64. 582 // and x64.
613 bool done = false; 583 bool done = false;
614 // volatile because the variable may be clobbered by a longjmp. 584 // volatile because the variable may be clobbered by a longjmp.
615 volatile bool use_far_branches = false; 585 volatile bool use_far_branches = false;
616 volatile bool use_speculative_inlining = 586 const bool use_speculative_inlining = false;
617 FLAG_max_speculative_inlining_attempts > 0;
618 GrowableArray<intptr_t> inlining_black_list;
619 587
620 while (!done) { 588 while (!done) {
621 const intptr_t prev_deopt_id = thread()->deopt_id(); 589 const intptr_t prev_deopt_id = thread()->deopt_id();
622 thread()->set_deopt_id(0); 590 thread()->set_deopt_id(0);
623 LongJumpScope jump; 591 LongJumpScope jump;
624 const intptr_t val = setjmp(*jump.Set()); 592 const intptr_t val = setjmp(*jump.Set());
625 if (val == 0) { 593 if (val == 0) {
626 FlowGraph* flow_graph = NULL; 594 FlowGraph* flow_graph = NULL;
627 595
628 // Class hierarchy analysis is registered with the isolate in the 596 // Class hierarchy analysis is registered with the isolate in the
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
717 TimelineDurationScope tds(thread(), 685 TimelineDurationScope tds(thread(),
718 compiler_timeline, 686 compiler_timeline,
719 "OptimizationPasses"); 687 "OptimizationPasses");
720 inline_id_to_function.Add(&function); 688 inline_id_to_function.Add(&function);
721 // Top scope function has no caller (-1). 689 // Top scope function has no caller (-1).
722 caller_inline_id.Add(-1); 690 caller_inline_id.Add(-1);
723 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer); 691 CSTAT_TIMER_SCOPE(thread(), graphoptimizer_timer);
724 692
725 FlowGraphOptimizer optimizer(flow_graph, 693 FlowGraphOptimizer optimizer(flow_graph,
726 use_speculative_inlining, 694 use_speculative_inlining,
727 &inlining_black_list); 695 NULL);
728 if (FLAG_precompilation) {
729 optimizer.PopulateWithICData();
730
731 optimizer.ApplyClassIds();
732 DEBUG_ASSERT(flow_graph->VerifyUseLists());
733
734 FlowGraphTypePropagator::Propagate(flow_graph);
735 DEBUG_ASSERT(flow_graph->VerifyUseLists());
736 }
737 optimizer.ApplyICData(); 696 optimizer.ApplyICData();
738 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 697 DEBUG_ASSERT(flow_graph->VerifyUseLists());
739 698
740 // Optimize (a << b) & c patterns, merge operations. 699 // Optimize (a << b) & c patterns, merge operations.
741 // Run early in order to have more opportunity to optimize left shifts. 700 // Run early in order to have more opportunity to optimize left shifts.
742 optimizer.TryOptimizePatterns(); 701 optimizer.TryOptimizePatterns();
743 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 702 DEBUG_ASSERT(flow_graph->VerifyUseLists());
744 703
745 FlowGraphInliner::SetInliningId(flow_graph, 0); 704 FlowGraphInliner::SetInliningId(flow_graph, 0);
746 705
747 // Inlining (mutates the flow graph) 706 // Inlining (mutates the flow graph)
748 if (FLAG_use_inlining) { 707 if (FLAG_use_inlining) {
749 TimelineDurationScope tds2(thread(), 708 TimelineDurationScope tds2(thread(),
750 compiler_timeline, 709 compiler_timeline,
751 "Inlining"); 710 "Inlining");
752 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer); 711 CSTAT_TIMER_SCOPE(thread(), graphinliner_timer);
753 // Propagate types to create more inlining opportunities. 712 // Propagate types to create more inlining opportunities.
754 FlowGraphTypePropagator::Propagate(flow_graph); 713 FlowGraphTypePropagator::Propagate(flow_graph);
755 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 714 DEBUG_ASSERT(flow_graph->VerifyUseLists());
756 715
757 // Use propagated class-ids to create more inlining opportunities. 716 // Use propagated class-ids to create more inlining opportunities.
758 optimizer.ApplyClassIds(); 717 optimizer.ApplyClassIds();
759 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 718 DEBUG_ASSERT(flow_graph->VerifyUseLists());
760 719
761 FlowGraphInliner inliner(flow_graph, 720 FlowGraphInliner inliner(flow_graph,
762 &inline_id_to_function, 721 &inline_id_to_function,
763 &caller_inline_id, 722 &caller_inline_id,
764 use_speculative_inlining, 723 use_speculative_inlining,
765 &inlining_black_list); 724 NULL);
766 inliner.Inline(); 725 inliner.Inline();
767 // Use lists are maintained and validated by the inliner. 726 // Use lists are maintained and validated by the inliner.
768 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 727 DEBUG_ASSERT(flow_graph->VerifyUseLists());
769 } 728 }
770 729
771 // Propagate types and eliminate more type tests. 730 // Propagate types and eliminate more type tests.
772 FlowGraphTypePropagator::Propagate(flow_graph); 731 FlowGraphTypePropagator::Propagate(flow_graph);
773 DEBUG_ASSERT(flow_graph->VerifyUseLists()); 732 DEBUG_ASSERT(flow_graph->VerifyUseLists());
774 733
775 { 734 {
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
1078 // We bailed out or we encountered an error. 1037 // We bailed out or we encountered an error.
1079 const Error& error = Error::Handle(thread()->sticky_error()); 1038 const Error& error = Error::Handle(thread()->sticky_error());
1080 1039
1081 if (error.raw() == Object::branch_offset_error().raw()) { 1040 if (error.raw() == Object::branch_offset_error().raw()) {
1082 // Compilation failed due to an out of range branch offset in the 1041 // Compilation failed due to an out of range branch offset in the
1083 // assembler. We try again (done = false) with far branches enabled. 1042 // assembler. We try again (done = false) with far branches enabled.
1084 done = false; 1043 done = false;
1085 ASSERT(!use_far_branches); 1044 ASSERT(!use_far_branches);
1086 use_far_branches = true; 1045 use_far_branches = true;
1087 } else if (error.raw() == Object::speculative_inlining_error().raw()) { 1046 } else if (error.raw() == Object::speculative_inlining_error().raw()) {
1088 // The return value of setjmp is the deopt id of the check instruction 1047 // Can only happen with precompilation.
1089 // that caused the bailout. 1048 UNREACHABLE();
1090 done = false;
1091 #if defined(DEBUG)
1092 ASSERT(FLAG_precompilation);
1093 ASSERT(use_speculative_inlining);
1094 for (intptr_t i = 0; i < inlining_black_list.length(); ++i) {
1095 ASSERT(inlining_black_list[i] != val);
1096 }
1097 #endif
1098 inlining_black_list.Add(val);
1099 const intptr_t max_attempts = FLAG_max_speculative_inlining_attempts;
1100 if (inlining_black_list.length() >= max_attempts) {
1101 use_speculative_inlining = false;
1102 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
1103 THR_Print("Disabled speculative inlining after %" Pd " attempts.\n",
1104 inlining_black_list.length());
1105 }
1106 }
1107 } else { 1049 } else {
1108 // If the error isn't due to an out of range branch offset, we don't 1050 // If the error isn't due to an out of range branch offset, we don't
1109 // try again (done = true), and indicate that we did not finish 1051 // try again (done = true), and indicate that we did not finish
1110 // compiling (is_compiled = false). 1052 // compiling (is_compiled = false).
1111 if (FLAG_trace_bailout) { 1053 if (FLAG_trace_bailout) {
1112 THR_Print("%s\n", error.ToErrorCString()); 1054 THR_Print("%s\n", error.ToErrorCString());
1113 } 1055 }
1114 done = true; 1056 done = true;
1115 } 1057 }
1116 1058
1117 // Clear the error if it was not a real error, but just a bailout. 1059 // Clear the error if it was not a real error, but just a bailout.
1118 if (error.IsLanguageError() && 1060 if (error.IsLanguageError() &&
1119 (LanguageError::Cast(error).kind() == Report::kBailout)) { 1061 (LanguageError::Cast(error).kind() == Report::kBailout)) {
1120 thread()->clear_sticky_error(); 1062 thread()->clear_sticky_error();
1121 } 1063 }
1122 is_compiled = false; 1064 is_compiled = false;
1123 } 1065 }
1124 // Reset global isolate state. 1066 // Reset global isolate state.
1125 thread()->set_deopt_id(prev_deopt_id); 1067 thread()->set_deopt_id(prev_deopt_id);
1126 } 1068 }
1127 return is_compiled; 1069 return is_compiled;
1128 } 1070 }
1129 1071
1130 1072
1131 static void DisassembleCode(const Function& function, bool optimized) {
1132 const char* function_fullname = function.ToFullyQualifiedCString();
1133 THR_Print("Code for %sfunction '%s' {\n",
1134 optimized ? "optimized " : "",
1135 function_fullname);
1136 const Code& code = Code::Handle(function.CurrentCode());
1137 code.Disassemble();
1138 THR_Print("}\n");
1139
1140 THR_Print("Pointer offsets for function: {\n");
1141 // Pointer offsets are stored in descending order.
1142 Object& obj = Object::Handle();
1143 for (intptr_t i = code.pointer_offsets_length() - 1; i >= 0; i--) {
1144 const uword addr = code.GetPointerOffsetAt(i) + code.EntryPoint();
1145 obj = *reinterpret_cast<RawObject**>(addr);
1146 THR_Print(" %d : %#" Px " '%s'\n",
1147 code.GetPointerOffsetAt(i), addr, obj.ToCString());
1148 }
1149 THR_Print("}\n");
1150
1151 THR_Print("PC Descriptors for function '%s' {\n", function_fullname);
1152 PcDescriptors::PrintHeaderString();
1153 const PcDescriptors& descriptors =
1154 PcDescriptors::Handle(code.pc_descriptors());
1155 THR_Print("%s}\n", descriptors.ToCString());
1156
1157 uword start = Instructions::Handle(code.instructions()).EntryPoint();
1158 const Array& deopt_table = Array::Handle(code.deopt_info_array());
1159 intptr_t deopt_table_length = DeoptTable::GetLength(deopt_table);
1160 if (deopt_table_length > 0) {
1161 THR_Print("DeoptInfo: {\n");
1162 Smi& offset = Smi::Handle();
1163 TypedData& info = TypedData::Handle();
1164 Smi& reason_and_flags = Smi::Handle();
1165 for (intptr_t i = 0; i < deopt_table_length; ++i) {
1166 DeoptTable::GetEntry(deopt_table, i, &offset, &info, &reason_and_flags);
1167 const intptr_t reason =
1168 DeoptTable::ReasonField::decode(reason_and_flags.Value());
1169 ASSERT((0 <= reason) && (reason < ICData::kDeoptNumReasons));
1170 THR_Print("%4" Pd ": 0x%" Px " %s (%s)\n",
1171 i,
1172 start + offset.Value(),
1173 DeoptInfo::ToCString(deopt_table, info),
1174 DeoptReasonToCString(
1175 static_cast<ICData::DeoptReasonId>(reason)));
1176 }
1177 THR_Print("}\n");
1178 }
1179
1180 const ObjectPool& object_pool = ObjectPool::Handle(code.GetObjectPool());
1181 object_pool.DebugPrint();
1182
1183 THR_Print("Stackmaps for function '%s' {\n", function_fullname);
1184 if (code.stackmaps() != Array::null()) {
1185 const Array& stackmap_table = Array::Handle(code.stackmaps());
1186 Stackmap& map = Stackmap::Handle();
1187 for (intptr_t i = 0; i < stackmap_table.Length(); ++i) {
1188 map ^= stackmap_table.At(i);
1189 THR_Print("%s\n", map.ToCString());
1190 }
1191 }
1192 THR_Print("}\n");
1193
1194 THR_Print("Variable Descriptors for function '%s' {\n",
1195 function_fullname);
1196 const LocalVarDescriptors& var_descriptors =
1197 LocalVarDescriptors::Handle(code.GetLocalVarDescriptors());
1198 intptr_t var_desc_length =
1199 var_descriptors.IsNull() ? 0 : var_descriptors.Length();
1200 String& var_name = String::Handle();
1201 for (intptr_t i = 0; i < var_desc_length; i++) {
1202 var_name = var_descriptors.GetName(i);
1203 RawLocalVarDescriptors::VarInfo var_info;
1204 var_descriptors.GetInfo(i, &var_info);
1205 const int8_t kind = var_info.kind();
1206 if (kind == RawLocalVarDescriptors::kSavedCurrentContext) {
1207 THR_Print(" saved current CTX reg offset %d\n", var_info.index());
1208 } else {
1209 if (kind == RawLocalVarDescriptors::kContextLevel) {
1210 THR_Print(" context level %d scope %d", var_info.index(),
1211 var_info.scope_id);
1212 } else if (kind == RawLocalVarDescriptors::kStackVar) {
1213 THR_Print(" stack var '%s' offset %d",
1214 var_name.ToCString(), var_info.index());
1215 } else {
1216 ASSERT(kind == RawLocalVarDescriptors::kContextVar);
1217 THR_Print(" context var '%s' level %d offset %d",
1218 var_name.ToCString(), var_info.scope_id, var_info.index());
1219 }
1220 THR_Print(" (valid %s-%s)\n", var_info.begin_pos.ToCString(),
1221 var_info.end_pos.ToCString());
1222 }
1223 }
1224 THR_Print("}\n");
1225
1226 THR_Print("Exception Handlers for function '%s' {\n", function_fullname);
1227 const ExceptionHandlers& handlers =
1228 ExceptionHandlers::Handle(code.exception_handlers());
1229 THR_Print("%s}\n", handlers.ToCString());
1230
1231 {
1232 THR_Print("Static call target functions {\n");
1233 const Array& table = Array::Handle(code.static_calls_target_table());
1234 Smi& offset = Smi::Handle();
1235 Function& function = Function::Handle();
1236 Code& code = Code::Handle();
1237 for (intptr_t i = 0; i < table.Length();
1238 i += Code::kSCallTableEntryLength) {
1239 offset ^= table.At(i + Code::kSCallTableOffsetEntry);
1240 function ^= table.At(i + Code::kSCallTableFunctionEntry);
1241 code ^= table.At(i + Code::kSCallTableCodeEntry);
1242 if (function.IsNull()) {
1243 Class& cls = Class::Handle();
1244 cls ^= code.owner();
1245 if (cls.IsNull()) {
1246 const String& code_name = String::Handle(code.Name());
1247 THR_Print(" 0x%" Px ": %s, %p\n",
1248 start + offset.Value(),
1249 code_name.ToCString(),
1250 code.raw());
1251 } else {
1252 THR_Print(" 0x%" Px ": allocation stub for %s, %p\n",
1253 start + offset.Value(),
1254 cls.ToCString(),
1255 code.raw());
1256 }
1257 } else {
1258 THR_Print(" 0x%" Px ": %s, %p\n",
1259 start + offset.Value(),
1260 function.ToFullyQualifiedCString(),
1261 code.raw());
1262 }
1263 }
1264 THR_Print("}\n");
1265 }
1266 if (optimized && FLAG_trace_inlining_intervals) {
1267 code.DumpInlinedIntervals();
1268 }
1269 }
1270
1271
1272 #if defined(DEBUG) 1073 #if defined(DEBUG)
1273 // Verifies that the inliner is always in the list of inlined functions. 1074 // Verifies that the inliner is always in the list of inlined functions.
1274 // If this fails run with --trace-inlining-intervals to get more information. 1075 // If this fails run with --trace-inlining-intervals to get more information.
1275 static void CheckInliningIntervals(const Function& function) { 1076 static void CheckInliningIntervals(const Function& function) {
1276 const Code& code = Code::Handle(function.CurrentCode()); 1077 const Code& code = Code::Handle(function.CurrentCode());
1277 const Array& intervals = Array::Handle(code.GetInlinedIntervals()); 1078 const Array& intervals = Array::Handle(code.GetInlinedIntervals());
1278 if (intervals.IsNull() || (intervals.Length() == 0)) return; 1079 if (intervals.IsNull() || (intervals.Length() == 0)) return;
1279 Smi& start = Smi::Handle(); 1080 Smi& start = Smi::Handle();
1280 GrowableArray<Function*> inlined_functions; 1081 GrowableArray<Function*> inlined_functions;
1281 for (intptr_t i = 0; i < intervals.Length(); i += Code::kInlIntNumEntries) { 1082 for (intptr_t i = 0; i < intervals.Length(); i += Code::kInlIntNumEntries) {
1282 start ^= intervals.At(i + Code::kInlIntStart); 1083 start ^= intervals.At(i + Code::kInlIntStart);
1283 ASSERT(!start.IsNull()); 1084 ASSERT(!start.IsNull());
1284 if (start.IsNull()) continue; 1085 if (start.IsNull()) continue;
1285 code.GetInlinedFunctionsAt(start.Value(), &inlined_functions); 1086 code.GetInlinedFunctionsAt(start.Value(), &inlined_functions);
1286 ASSERT(inlined_functions[inlined_functions.length() - 1]->raw() == 1087 ASSERT(inlined_functions[inlined_functions.length() - 1]->raw() ==
1287 function.raw()); 1088 function.raw());
1288 } 1089 }
1289 } 1090 }
1290 #endif 1091 #endif
1291 1092
1292 1093
1293 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline, 1094 static RawError* CompileFunctionHelper(CompilationPipeline* pipeline,
1294 const Function& function, 1095 const Function& function,
1295 bool optimized, 1096 bool optimized,
1296 intptr_t osr_id) { 1097 intptr_t osr_id) {
1297 // Check that we optimize if 'FLAG_precompilation' is set to true, 1098 ASSERT(!FLAG_precompilation);
1298 // except if the function is marked as not optimizable.
1299 ASSERT(!function.IsOptimizable() ||
1300 !FLAG_precompilation || optimized);
1301 ASSERT(!FLAG_precompilation || !function.HasCode());
1302 LongJumpScope jump; 1099 LongJumpScope jump;
1303 if (setjmp(*jump.Set()) == 0) { 1100 if (setjmp(*jump.Set()) == 0) {
1304 Thread* const thread = Thread::Current(); 1101 Thread* const thread = Thread::Current();
1305 Isolate* const isolate = thread->isolate(); 1102 Isolate* const isolate = thread->isolate();
1306 StackZone stack_zone(thread); 1103 StackZone stack_zone(thread);
1307 Zone* const zone = stack_zone.GetZone(); 1104 Zone* const zone = stack_zone.GetZone();
1308 const bool trace_compiler = 1105 const bool trace_compiler =
1309 FLAG_trace_compiler || 1106 FLAG_trace_compiler ||
1310 (FLAG_trace_optimizing_compiler && optimized); 1107 (FLAG_trace_optimizing_compiler && optimized);
1311 Timer per_compile_timer(trace_compiler, "Compilation time"); 1108 Timer per_compile_timer(trace_compiler, "Compilation time");
(...skipping 21 matching lines...) Expand all
1333 pipeline->ParseFunction(parsed_function); 1130 pipeline->ParseFunction(parsed_function);
1334 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed); 1131 const int64_t num_tokens_after = STAT_VALUE(thread, num_tokens_consumed);
1335 INC_STAT(thread, 1132 INC_STAT(thread,
1336 num_func_tokens_compiled, 1133 num_func_tokens_compiled,
1337 num_tokens_after - num_tokens_before); 1134 num_tokens_after - num_tokens_before);
1338 } 1135 }
1339 1136
1340 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id); 1137 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id);
1341 const bool success = helper.Compile(pipeline); 1138 const bool success = helper.Compile(pipeline);
1342 if (!success) { 1139 if (!success) {
1343 if (optimized && !FLAG_precompilation) { 1140 if (optimized) {
1344 // Optimizer bailed out. Disable optimizations and never try again. 1141 // Optimizer bailed out. Disable optimizations and never try again.
1345 if (trace_compiler) { 1142 if (trace_compiler) {
1346 THR_Print("--> disabling optimizations for '%s'\n", 1143 THR_Print("--> disabling optimizations for '%s'\n",
1347 function.ToFullyQualifiedCString()); 1144 function.ToFullyQualifiedCString());
1348 } else if (FLAG_trace_failed_optimization_attempts) { 1145 } else if (FLAG_trace_failed_optimization_attempts) {
1349 THR_Print("Cannot optimize: %s\n", 1146 THR_Print("Cannot optimize: %s\n",
1350 function.ToFullyQualifiedCString()); 1147 function.ToFullyQualifiedCString());
1351 } 1148 }
1352 function.SetIsOptimizable(false); 1149 function.SetIsOptimizable(false);
1353 return Error::null(); 1150 return Error::null();
(...skipping 17 matching lines...) Expand all
1371 Code::Handle(function.CurrentCode()).EntryPoint(), 1168 Code::Handle(function.CurrentCode()).EntryPoint(),
1372 Code::Handle(function.CurrentCode()).Size(), 1169 Code::Handle(function.CurrentCode()).Size(),
1373 per_compile_timer.TotalElapsedTime()); 1170 per_compile_timer.TotalElapsedTime());
1374 } 1171 }
1375 1172
1376 if (FLAG_support_debugger) { 1173 if (FLAG_support_debugger) {
1377 isolate->debugger()->NotifyCompilation(function); 1174 isolate->debugger()->NotifyCompilation(function);
1378 } 1175 }
1379 1176
1380 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) { 1177 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
1381 DisassembleCode(function, optimized); 1178 Disassembler::DisassembleCode(function, optimized);
1382 } else if (FLAG_disassemble_optimized && 1179 } else if (FLAG_disassemble_optimized &&
1383 optimized && 1180 optimized &&
1384 FlowGraphPrinter::ShouldPrint(function)) { 1181 FlowGraphPrinter::ShouldPrint(function)) {
1385 // TODO(fschneider): Print unoptimized code along with the optimized code. 1182 // TODO(fschneider): Print unoptimized code along with the optimized code.
1386 THR_Print("*** BEGIN CODE\n"); 1183 THR_Print("*** BEGIN CODE\n");
1387 DisassembleCode(function, true); 1184 Disassembler::DisassembleCode(function, true);
1388 THR_Print("*** END CODE\n"); 1185 THR_Print("*** END CODE\n");
1389 } 1186 }
1390 #if defined(DEBUG) 1187 #if defined(DEBUG)
1391 CheckInliningIntervals(function); 1188 CheckInliningIntervals(function);
1392 #endif 1189 #endif
1393 return Error::null(); 1190 return Error::null();
1394 } else { 1191 } else {
1395 Thread* const thread = Thread::Current(); 1192 Thread* const thread = Thread::Current();
1396 StackZone stack_zone(thread); 1193 StackZone stack_zone(thread);
1397 Error& error = Error::Handle(); 1194 Error& error = Error::Handle();
1398 // We got an error during compilation. 1195 // We got an error during compilation.
1399 error = thread->sticky_error(); 1196 error = thread->sticky_error();
1400 thread->clear_sticky_error(); 1197 thread->clear_sticky_error();
1401 // Unoptimized compilation or precompilation may encounter compile-time 1198 // Unoptimized compilation or precompilation may encounter compile-time
1402 // errors, but regular optimized compilation should not. 1199 // errors, but regular optimized compilation should not.
1403 ASSERT(!optimized || FLAG_precompilation); 1200 ASSERT(!optimized);
1404 // Do not attempt to optimize functions that can cause errors. 1201 // Do not attempt to optimize functions that can cause errors.
1405 function.set_is_optimizable(false); 1202 function.set_is_optimizable(false);
1406 return error.raw(); 1203 return error.raw();
1407 } 1204 }
1408 UNREACHABLE(); 1205 UNREACHABLE();
1409 return Error::null(); 1206 return Error::null();
1410 } 1207 }
1411 1208
1412 1209
1413 RawError* Compiler::CompileFunction(Thread* thread, 1210 RawError* Compiler::CompileFunction(Thread* thread,
1414 const Function& function) { 1211 const Function& function) {
1212 #ifdef DART_PRECOMPILER
1213 if (FLAG_precompilation) {
1214 return Precompiler::CompileFunction(thread, function);
1215 }
1216 #endif
1415 Isolate* isolate = thread->isolate(); 1217 Isolate* isolate = thread->isolate();
1416 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); 1218 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
1417 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "Function", function); 1219 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "Function", function);
1418 1220
1419 if (!isolate->compilation_allowed()) { 1221 if (!isolate->compilation_allowed()) {
1420 FATAL3("Precompilation missed function %s (%s, %s)\n", 1222 FATAL3("Precompilation missed function %s (%s, %s)\n",
1421 function.ToLibNamePrefixedQualifiedCString(), 1223 function.ToLibNamePrefixedQualifiedCString(),
1422 function.token_pos().ToCString(), 1224 function.token_pos().ToCString(),
1423 Function::KindToCString(function.kind())); 1225 Function::KindToCString(function.kind()));
1424 } 1226 }
1425 1227
1426 CompilationPipeline* pipeline = 1228 CompilationPipeline* pipeline =
1427 CompilationPipeline::New(thread->zone(), function); 1229 CompilationPipeline::New(thread->zone(), function);
1428 1230
1429 const bool optimized =
1430 FLAG_precompilation && function.IsOptimizable();
1431
1432 return CompileFunctionHelper(pipeline, 1231 return CompileFunctionHelper(pipeline,
1433 function, 1232 function,
1434 optimized, 1233 /* optimized = */ false,
1435 kNoOSRDeoptId); 1234 kNoOSRDeoptId);
1436 } 1235 }
1437 1236
1438 1237
1439 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread, 1238 RawError* Compiler::EnsureUnoptimizedCode(Thread* thread,
1440 const Function& function) { 1239 const Function& function) {
1441 if (function.unoptimized_code() != Object::null()) { 1240 if (function.unoptimized_code() != Object::null()) {
1442 return Error::null(); 1241 return Error::null();
1443 } 1242 }
1444 Code& original_code = Code::ZoneHandle(thread->zone()); 1243 Code& original_code = Code::ZoneHandle(thread->zone());
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1492 // This is only used from unit tests. 1291 // This is only used from unit tests.
1493 RawError* Compiler::CompileParsedFunction( 1292 RawError* Compiler::CompileParsedFunction(
1494 ParsedFunction* parsed_function) { 1293 ParsedFunction* parsed_function) {
1495 LongJumpScope jump; 1294 LongJumpScope jump;
1496 if (setjmp(*jump.Set()) == 0) { 1295 if (setjmp(*jump.Set()) == 0) {
1497 // Non-optimized code generator. 1296 // Non-optimized code generator.
1498 DartCompilationPipeline pipeline; 1297 DartCompilationPipeline pipeline;
1499 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); 1298 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1500 helper.Compile(&pipeline); 1299 helper.Compile(&pipeline);
1501 if (FLAG_disassemble) { 1300 if (FLAG_disassemble) {
1502 DisassembleCode(parsed_function->function(), false); 1301 Disassembler::DisassembleCode(parsed_function->function(), false);
1503 } 1302 }
1504 return Error::null(); 1303 return Error::null();
1505 } else { 1304 } else {
1506 Error& error = Error::Handle(); 1305 Error& error = Error::Handle();
1507 Thread* thread = Thread::Current(); 1306 Thread* thread = Thread::Current();
1508 // We got an error during compilation. 1307 // We got an error during compilation.
1509 error = thread->sticky_error(); 1308 error = thread->sticky_error();
1510 thread->clear_sticky_error(); 1309 thread->clear_sticky_error();
1511 return error.raw(); 1310 return error.raw();
1512 } 1311 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1559 return error.raw(); 1358 return error.raw();
1560 } 1359 }
1561 func.ClearICDataArray(); 1360 func.ClearICDataArray();
1562 func.ClearCode(); 1361 func.ClearCode();
1563 } 1362 }
1564 } 1363 }
1565 return error.raw(); 1364 return error.raw();
1566 } 1365 }
1567 1366
1568 1367
1569 void Compiler::CompileStaticInitializer(const Field& field) {
1570 ASSERT(field.is_static());
1571 if (field.HasPrecompiledInitializer()) {
1572 // TODO(rmacnak): Investigate why this happens for _enum_names.
1573 OS::Print("Warning: Ignoring repeated request for initializer for %s\n",
1574 field.ToCString());
1575 return;
1576 }
1577 Thread* thread = Thread::Current();
1578 StackZone zone(thread);
1579
1580 ParsedFunction* parsed_function = Parser::ParseStaticFieldInitializer(field);
1581
1582 parsed_function->AllocateVariables();
1583 // Non-optimized code generator.
1584 DartCompilationPipeline pipeline;
1585 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1586 helper.Compile(&pipeline);
1587 const Function& initializer = parsed_function->function();
1588 field.SetPrecompiledInitializer(initializer);
1589 }
1590
1591
1592 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { 1368 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) {
1369 #ifdef DART_PRECOMPILER
1370 if (FLAG_precompilation) {
1371 return Precompiler::EvaluateStaticInitializer(field);
1372 }
1373 #endif
1593 ASSERT(field.is_static()); 1374 ASSERT(field.is_static());
1594 // The VM sets the field's value to transiton_sentinel prior to 1375 // The VM sets the field's value to transiton_sentinel prior to
1595 // evaluating the initializer value. 1376 // evaluating the initializer value.
1596 ASSERT(field.StaticValue() == Object::transition_sentinel().raw()); 1377 ASSERT(field.StaticValue() == Object::transition_sentinel().raw());
1597 LongJumpScope jump; 1378 LongJumpScope jump;
1598 if (setjmp(*jump.Set()) == 0) { 1379 if (setjmp(*jump.Set()) == 0) {
1599 // Under precompilation, the initializer may have already been compiled, in 1380 // Under lazy compilation initializer has not yet been created, so create
1600 // which case use it. Under lazy compilation or early in precompilation, the 1381 // it now, but don't bother remembering it because it won't be used again.
1601 // initializer has not yet been created, so create it now, but don't bother 1382 ASSERT(!field.HasPrecompiledInitializer());
1602 // remembering it because it won't be used again. 1383 Thread* const thread = Thread::Current();
1603 Function& initializer = Function::Handle(); 1384 StackZone zone(thread);
1604 if (!field.HasPrecompiledInitializer()) { 1385 ParsedFunction* parsed_function =
1605 Thread* const thread = Thread::Current(); 1386 Parser::ParseStaticFieldInitializer(field);
1606 StackZone zone(thread);
1607 ParsedFunction* parsed_function =
1608 Parser::ParseStaticFieldInitializer(field);
1609 1387
1610 parsed_function->AllocateVariables(); 1388 parsed_function->AllocateVariables();
1611 // Non-optimized code generator. 1389 // Non-optimized code generator.
1612 DartCompilationPipeline pipeline; 1390 DartCompilationPipeline pipeline;
1613 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId); 1391 CompileParsedFunctionHelper helper(parsed_function, false, kNoOSRDeoptId);
1614 helper.Compile(&pipeline); 1392 helper.Compile(&pipeline);
1615 initializer = parsed_function->function().raw(); 1393 const Function& initializer =
1616 Code::Handle(initializer.unoptimized_code()).set_var_descriptors( 1394 Function::Handle(parsed_function->function().raw());
1617 Object::empty_var_descriptors()); 1395 Code::Handle(initializer.unoptimized_code()).set_var_descriptors(
1618 } else { 1396 Object::empty_var_descriptors());
1619 initializer ^= field.PrecompiledInitializer();
1620 }
1621 // Invoke the function to evaluate the expression. 1397 // Invoke the function to evaluate the expression.
1622 return DartEntry::InvokeFunction(initializer, Object::empty_array()); 1398 return DartEntry::InvokeFunction(initializer, Object::empty_array());
1623 } else { 1399 } else {
1624 Thread* const thread = Thread::Current(); 1400 Thread* const thread = Thread::Current();
1625 StackZone zone(thread); 1401 StackZone zone(thread);
1626 const Error& error = Error::Handle(thread->zone(), thread->sticky_error()); 1402 const Error& error = Error::Handle(thread->zone(), thread->sticky_error());
1627 thread->clear_sticky_error(); 1403 thread->clear_sticky_error();
1628 return error.raw(); 1404 return error.raw();
1629 } 1405 }
1630 UNREACHABLE(); 1406 UNREACHABLE();
1631 return Object::null(); 1407 return Object::null();
1632 } 1408 }
1633 1409
1634 1410
1635 1411
1636 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { 1412 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) {
1413 #ifdef DART_PRECOMPILER
1414 if (FLAG_precompilation) {
1415 return Precompiler::ExecuteOnce(fragment);
1416 }
1417 #endif
1637 LongJumpScope jump; 1418 LongJumpScope jump;
1638 if (setjmp(*jump.Set()) == 0) { 1419 if (setjmp(*jump.Set()) == 0) {
1639 Thread* const thread = Thread::Current(); 1420 Thread* const thread = Thread::Current();
1640 if (FLAG_trace_compiler) { 1421 if (FLAG_trace_compiler) {
1641 THR_Print("compiling expression: "); 1422 THR_Print("compiling expression: ");
1642 AstPrinter::PrintNode(fragment); 1423 AstPrinter::PrintNode(fragment);
1643 } 1424 }
1644 1425
1645 // Create a dummy function object for the code generator. 1426 // Create a dummy function object for the code generator.
1646 // The function needs to be associated with a named Class: the interface 1427 // The function needs to be associated with a named Class: the interface
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after
1939 } 1720 }
1940 if (start_task) { 1721 if (start_task) {
1941 Dart::thread_pool()->Run(isolate->background_compiler()); 1722 Dart::thread_pool()->Run(isolate->background_compiler());
1942 } 1723 }
1943 } 1724 }
1944 1725
1945 1726
1946 #else // DART_PRECOMPILED_RUNTIME 1727 #else // DART_PRECOMPILED_RUNTIME
1947 1728
1948 1729
1730 CompilationPipeline* CompilationPipeline::New(Zone* zone,
1731 const Function& function) {
1732 UNREACHABLE();
1733 return NULL;
1734 }
1735
1736
1949 DEFINE_RUNTIME_ENTRY(CompileFunction, 1) { 1737 DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
1950 const Function& function = Function::CheckedHandle(arguments.ArgAt(0)); 1738 const Function& function = Function::CheckedHandle(arguments.ArgAt(0));
1951 FATAL3("Precompilation missed function %s (%" Pd ", %s)\n", 1739 FATAL3("Precompilation missed function %s (%" Pd ", %s)\n",
1952 function.ToLibNamePrefixedQualifiedCString(), 1740 function.ToLibNamePrefixedQualifiedCString(),
1953 function.token_pos().value(), 1741 function.token_pos().value(),
1954 Function::KindToCString(function.kind())); 1742 Function::KindToCString(function.kind()));
1955 } 1743 }
1956 1744
1957 1745
1958 bool Compiler::IsBackgroundCompilation() { 1746 bool Compiler::IsBackgroundCompilation() {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2006 UNREACHABLE(); 1794 UNREACHABLE();
2007 } 1795 }
2008 1796
2009 1797
2010 RawError* Compiler::CompileAllFunctions(const Class& cls) { 1798 RawError* Compiler::CompileAllFunctions(const Class& cls) {
2011 UNREACHABLE(); 1799 UNREACHABLE();
2012 return Error::null(); 1800 return Error::null();
2013 } 1801 }
2014 1802
2015 1803
2016 void Compiler::CompileStaticInitializer(const Field& field) {
2017 UNREACHABLE();
2018 }
2019
2020
2021 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) { 1804 RawObject* Compiler::EvaluateStaticInitializer(const Field& field) {
2022 ASSERT(field.HasPrecompiledInitializer()); 1805 ASSERT(field.HasPrecompiledInitializer());
2023 const Function& initializer = 1806 const Function& initializer =
2024 Function::Handle(field.PrecompiledInitializer()); 1807 Function::Handle(field.PrecompiledInitializer());
2025 return DartEntry::InvokeFunction(initializer, Object::empty_array()); 1808 return DartEntry::InvokeFunction(initializer, Object::empty_array());
2026 } 1809 }
2027 1810
2028 1811
2029
2030 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) { 1812 RawObject* Compiler::ExecuteOnce(SequenceNode* fragment) {
2031 UNREACHABLE(); 1813 UNREACHABLE();
2032 return Object::null(); 1814 return Object::null();
2033 } 1815 }
2034 1816
2035 1817
2036 void Compiler::AbortBackgroundCompilation(intptr_t deopt_id) { 1818 void Compiler::AbortBackgroundCompilation(intptr_t deopt_id) {
2037 UNREACHABLE(); 1819 UNREACHABLE();
2038 } 1820 }
2039 1821
(...skipping 13 matching lines...) Expand all
2053 } 1835 }
2054 1836
2055 1837
2056 void BackgroundCompiler::EnsureInit(Thread* thread) { 1838 void BackgroundCompiler::EnsureInit(Thread* thread) {
2057 UNREACHABLE(); 1839 UNREACHABLE();
2058 } 1840 }
2059 1841
2060 #endif // DART_PRECOMPILED_RUNTIME 1842 #endif // DART_PRECOMPILED_RUNTIME
2061 1843
2062 } // namespace dart 1844 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/compiler.h ('k') | runtime/vm/dart_api_impl.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698