Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(151)

Side by Side Diff: runtime/vm/flow_graph_inliner.cc

Issue 298913007: Use isolate when allocation Zone objects and handles: focus on FlowGraphOptimizer, next inliner. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_optimizer.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/flow_graph_inliner.h" 5 #include "vm/flow_graph_inliner.h"
6 6
7 #include "vm/block_scheduler.h" 7 #include "vm/block_scheduler.h"
8 #include "vm/compiler.h" 8 #include "vm/compiler.h"
9 #include "vm/flags.h" 9 #include "vm/flags.h"
10 #include "vm/flow_graph.h" 10 #include "vm/flow_graph.h"
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
80 while (env != NULL) { 80 while (env != NULL) {
81 if (code.raw() == env->code().raw()) { 81 if (code.raw() == env->code().raw()) {
82 return true; 82 return true;
83 } 83 }
84 env = env->outer(); 84 env = env->outer();
85 } 85 }
86 return false; 86 return false;
87 } 87 }
88 88
89 89
90 // Helper to create a parameter stub from an actual argument.
91 static Definition* CreateParameterStub(intptr_t i,
92 Value* argument,
93 FlowGraph* graph) {
94 ConstantInstr* constant = argument->definition()->AsConstant();
95 if (constant != NULL) {
96 return new ConstantInstr(constant->value());
97 } else {
98 return new ParameterInstr(i, graph->graph_entry());
99 }
100 }
101
102
103 // Helper to get the default value of a formal parameter. 90 // Helper to get the default value of a formal parameter.
104 static ConstantInstr* GetDefaultValue(intptr_t i, 91 static ConstantInstr* GetDefaultValue(intptr_t i,
105 const ParsedFunction& parsed_function) { 92 const ParsedFunction& parsed_function) {
106 return new ConstantInstr(Object::ZoneHandle( 93 return new ConstantInstr(Object::ZoneHandle(
107 parsed_function.default_parameter_values().At(i))); 94 parsed_function.default_parameter_values().At(i)));
108 } 95 }
109 96
110 97
111 // Pair of an argument name and its value. 98 // Pair of an argument name and its value.
112 struct NamedArgument { 99 struct NamedArgument {
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after
439 426
440 private: 427 private:
441 bool CheckInlinedDuplicate(const Function& target); 428 bool CheckInlinedDuplicate(const Function& target);
442 bool CheckNonInlinedDuplicate(const Function& target); 429 bool CheckNonInlinedDuplicate(const Function& target);
443 430
444 bool TryInliningPoly(intptr_t receiver_cid, const Function& target); 431 bool TryInliningPoly(intptr_t receiver_cid, const Function& target);
445 bool TryInlineRecognizedMethod(intptr_t receiver_cid, const Function& target); 432 bool TryInlineRecognizedMethod(intptr_t receiver_cid, const Function& target);
446 433
447 TargetEntryInstr* BuildDecisionGraph(); 434 TargetEntryInstr* BuildDecisionGraph();
448 435
436 Isolate* isolate() const;
437
449 CallSiteInliner* const owner_; 438 CallSiteInliner* const owner_;
450 PolymorphicInstanceCallInstr* const call_; 439 PolymorphicInstanceCallInstr* const call_;
451 const intptr_t num_variants_; 440 const intptr_t num_variants_;
452 GrowableArray<CidTarget> variants_; 441 GrowableArray<CidTarget> variants_;
453 442
454 GrowableArray<CidTarget> inlined_variants_; 443 GrowableArray<CidTarget> inlined_variants_;
455 GrowableArray<CidTarget> non_inlined_variants_; 444 GrowableArray<CidTarget> non_inlined_variants_;
456 GrowableArray<BlockEntryInstr*> inlined_entries_; 445 GrowableArray<BlockEntryInstr*> inlined_entries_;
457 InlineExitCollector* exit_collector_; 446 InlineExitCollector* exit_collector_;
458 447
459 const Function& caller_function_; 448 const Function& caller_function_;
460 }; 449 };
461 450
462 451
463 class CallSiteInliner : public ValueObject { 452 class CallSiteInliner : public ValueObject {
464 public: 453 public:
465 explicit CallSiteInliner(FlowGraph* flow_graph) 454 explicit CallSiteInliner(FlowGraph* flow_graph)
466 : caller_graph_(flow_graph), 455 : caller_graph_(flow_graph),
467 inlined_(false), 456 inlined_(false),
468 initial_size_(flow_graph->InstructionCount()), 457 initial_size_(flow_graph->InstructionCount()),
469 inlined_size_(0), 458 inlined_size_(0),
470 inlining_depth_(1), 459 inlining_depth_(1),
471 collected_call_sites_(NULL), 460 collected_call_sites_(NULL),
472 inlining_call_sites_(NULL), 461 inlining_call_sites_(NULL),
473 function_cache_(), 462 function_cache_(),
474 inlined_info_() { } 463 inlined_info_() { }
475 464
476 FlowGraph* caller_graph() const { return caller_graph_; } 465 FlowGraph* caller_graph() const { return caller_graph_; }
477 466
467 Isolate* isolate() const { return caller_graph_->isolate(); }
468
478 // Inlining heuristics based on Cooper et al. 2008. 469 // Inlining heuristics based on Cooper et al. 2008.
479 bool ShouldWeInline(const Function& callee, 470 bool ShouldWeInline(const Function& callee,
480 intptr_t instr_count, 471 intptr_t instr_count,
481 intptr_t call_site_count, 472 intptr_t call_site_count,
482 intptr_t const_arg_count) { 473 intptr_t const_arg_count) {
483 if (inlined_size_ > FLAG_inlining_caller_size_threshold) { 474 if (inlined_size_ > FLAG_inlining_caller_size_threshold) {
484 // Prevent methods becoming humongous and thus slow to compile. 475 // Prevent methods becoming humongous and thus slow to compile.
485 return false; 476 return false;
486 } 477 }
487 if (instr_count > FLAG_inlining_callee_size_threshold) { 478 if (instr_count > FLAG_inlining_callee_size_threshold) {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
547 inlining_call_sites_ = NULL; 538 inlining_call_sites_ = NULL;
548 } 539 }
549 540
550 bool inlined() const { return inlined_; } 541 bool inlined() const { return inlined_; }
551 542
552 double GrowthFactor() const { 543 double GrowthFactor() const {
553 return static_cast<double>(inlined_size_) / 544 return static_cast<double>(inlined_size_) /
554 static_cast<double>(initial_size_); 545 static_cast<double>(initial_size_);
555 } 546 }
556 547
548 // Helper to create a parameter stub from an actual argument.
549 Definition* CreateParameterStub(intptr_t i,
550 Value* argument,
551 FlowGraph* graph) {
552 ConstantInstr* constant = argument->definition()->AsConstant();
553 if (constant != NULL) {
554 return new(isolate()) ConstantInstr(constant->value());
555 } else {
556 return new(isolate()) ParameterInstr(i, graph->graph_entry());
557 }
558 }
559
557 bool TryInlining(const Function& function, 560 bool TryInlining(const Function& function,
558 const Array& argument_names, 561 const Array& argument_names,
559 InlinedCallData* call_data) { 562 InlinedCallData* call_data) {
560 TRACE_INLINING(OS::Print(" => %s (deopt count %d)\n", 563 TRACE_INLINING(OS::Print(" => %s (deopt count %d)\n",
561 function.ToCString(), 564 function.ToCString(),
562 function.deoptimization_counter())); 565 function.deoptimization_counter()));
563 566
564 // TODO(fschneider): Enable inlining inside try-blocks. 567 // TODO(fschneider): Enable inlining inside try-blocks.
565 if (call_data->call->GetBlock()->try_index() != 568 if (call_data->call->GetBlock()->try_index() !=
566 CatchClauseNode::kInvalidTryIndex) { 569 CatchClauseNode::kInvalidTryIndex) {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
612 // Abort if this is a recursive occurrence. 615 // Abort if this is a recursive occurrence.
613 Definition* call = call_data->call; 616 Definition* call = call_data->call;
614 if (!FLAG_inline_recursive && IsCallRecursive(unoptimized_code, call)) { 617 if (!FLAG_inline_recursive && IsCallRecursive(unoptimized_code, call)) {
615 function.set_is_inlinable(false); 618 function.set_is_inlinable(false);
616 TRACE_INLINING(OS::Print(" Bailout: recursive function\n")); 619 TRACE_INLINING(OS::Print(" Bailout: recursive function\n"));
617 PRINT_INLINING_TREE("Recursive function", 620 PRINT_INLINING_TREE("Recursive function",
618 &call_data->caller, &function, call_data->call); 621 &call_data->caller, &function, call_data->call);
619 return false; 622 return false;
620 } 623 }
621 624
622 Isolate* isolate = Isolate::Current();
623 // Save and clear deopt id. 625 // Save and clear deopt id.
624 const intptr_t prev_deopt_id = isolate->deopt_id(); 626 const intptr_t prev_deopt_id = isolate()->deopt_id();
625 isolate->set_deopt_id(0); 627 isolate()->set_deopt_id(0);
626 // Install bailout jump. 628 // Install bailout jump.
627 LongJumpScope jump; 629 LongJumpScope jump;
628 if (setjmp(*jump.Set()) == 0) { 630 if (setjmp(*jump.Set()) == 0) {
629 // Parse the callee function. 631 // Parse the callee function.
630 bool in_cache; 632 bool in_cache;
631 ParsedFunction* parsed_function; 633 ParsedFunction* parsed_function;
632 { 634 {
633 TimerScope timer(FLAG_compiler_stats, 635 TimerScope timer(FLAG_compiler_stats,
634 &CompilerStats::graphinliner_parse_timer, 636 &CompilerStats::graphinliner_parse_timer,
635 isolate); 637 isolate());
636 parsed_function = GetParsedFunction(function, &in_cache); 638 parsed_function = GetParsedFunction(function, &in_cache);
637 } 639 }
638 640
639 // Load IC data for the callee. 641 // Load IC data for the callee.
640 Array& ic_data_array = Array::Handle(); 642 Array& ic_data_array = Array::Handle();
641 643
642 // IsInlineable above checked HasCode. Creating a Handle for the code 644 // IsInlineable above checked HasCode. Creating a Handle for the code
643 // should have kept GC from detaching, but let's assert just to make sure. 645 // should have kept GC from detaching, but let's assert just to make sure.
644 ASSERT(function.HasCode()); 646 ASSERT(function.HasCode());
645 ic_data_array = unoptimized_code.ExtractTypeFeedbackArray(); 647 ic_data_array = unoptimized_code.ExtractTypeFeedbackArray();
646 648
647 // Build the callee graph. 649 // Build the callee graph.
648 InlineExitCollector* exit_collector = 650 InlineExitCollector* exit_collector =
649 new InlineExitCollector(caller_graph_, call); 651 new(isolate()) InlineExitCollector(caller_graph_, call);
650 FlowGraphBuilder builder(parsed_function, 652 FlowGraphBuilder builder(parsed_function,
651 ic_data_array, 653 ic_data_array,
652 exit_collector, 654 exit_collector,
653 Isolate::kNoDeoptId, 655 Isolate::kNoDeoptId,
654 true); 656 true);
655 builder.SetInitialBlockId(caller_graph_->max_block_id()); 657 builder.SetInitialBlockId(caller_graph_->max_block_id());
656 FlowGraph* callee_graph; 658 FlowGraph* callee_graph;
657 { 659 {
658 TimerScope timer(FLAG_compiler_stats, 660 TimerScope timer(FLAG_compiler_stats,
659 &CompilerStats::graphinliner_build_timer, 661 &CompilerStats::graphinliner_build_timer,
660 isolate); 662 isolate());
661 callee_graph = builder.BuildGraph(); 663 callee_graph = builder.BuildGraph();
662 } 664 }
663 665
664 // The parameter stubs are a copy of the actual arguments providing 666 // The parameter stubs are a copy of the actual arguments providing
665 // concrete information about the values, for example constant values, 667 // concrete information about the values, for example constant values,
666 // without linking between the caller and callee graphs. 668 // without linking between the caller and callee graphs.
667 // TODO(zerny): Put more information in the stubs, eg, type information. 669 // TODO(zerny): Put more information in the stubs, eg, type information.
668 ZoneGrowableArray<Definition*>* param_stubs = 670 ZoneGrowableArray<Definition*>* param_stubs =
669 new ZoneGrowableArray<Definition*>(function.NumParameters()); 671 new(isolate()) ZoneGrowableArray<Definition*>(
672 function.NumParameters());
670 673
671 // Create a parameter stub for each fixed positional parameter. 674 // Create a parameter stub for each fixed positional parameter.
672 for (intptr_t i = 0; i < function.num_fixed_parameters(); ++i) { 675 for (intptr_t i = 0; i < function.num_fixed_parameters(); ++i) {
673 param_stubs->Add(CreateParameterStub(i, (*arguments)[i], callee_graph)); 676 param_stubs->Add(CreateParameterStub(i, (*arguments)[i], callee_graph));
674 } 677 }
675 678
676 // If the callee has optional parameters, rebuild the argument and stub 679 // If the callee has optional parameters, rebuild the argument and stub
677 // arrays so that actual arguments are in one-to-one with the formal 680 // arrays so that actual arguments are in one-to-one with the formal
678 // parameters. 681 // parameters.
679 if (function.HasOptionalParameters()) { 682 if (function.HasOptionalParameters()) {
(...skipping 14 matching lines...) Expand all
694 // After treating optional parameters the actual/formal count must match. 697 // After treating optional parameters the actual/formal count must match.
695 ASSERT(arguments->length() == function.NumParameters()); 698 ASSERT(arguments->length() == function.NumParameters());
696 ASSERT(param_stubs->length() == callee_graph->parameter_count()); 699 ASSERT(param_stubs->length() == callee_graph->parameter_count());
697 700
698 BlockScheduler block_scheduler(callee_graph); 701 BlockScheduler block_scheduler(callee_graph);
699 block_scheduler.AssignEdgeWeights(); 702 block_scheduler.AssignEdgeWeights();
700 703
701 { 704 {
702 TimerScope timer(FLAG_compiler_stats, 705 TimerScope timer(FLAG_compiler_stats,
703 &CompilerStats::graphinliner_ssa_timer, 706 &CompilerStats::graphinliner_ssa_timer,
704 isolate); 707 isolate());
705 // Compute SSA on the callee graph, catching bailouts. 708 // Compute SSA on the callee graph, catching bailouts.
706 callee_graph->ComputeSSA(caller_graph_->max_virtual_register_number(), 709 callee_graph->ComputeSSA(caller_graph_->max_virtual_register_number(),
707 param_stubs); 710 param_stubs);
708 DEBUG_ASSERT(callee_graph->VerifyUseLists()); 711 DEBUG_ASSERT(callee_graph->VerifyUseLists());
709 } 712 }
710 713
711 { 714 {
712 TimerScope timer(FLAG_compiler_stats, 715 TimerScope timer(FLAG_compiler_stats,
713 &CompilerStats::graphinliner_opt_timer, 716 &CompilerStats::graphinliner_opt_timer,
714 isolate); 717 isolate());
715 // TODO(zerny): Do more optimization passes on the callee graph. 718 // TODO(zerny): Do more optimization passes on the callee graph.
716 FlowGraphOptimizer optimizer(callee_graph); 719 FlowGraphOptimizer optimizer(callee_graph);
717 optimizer.ApplyICData(); 720 optimizer.ApplyICData();
718 DEBUG_ASSERT(callee_graph->VerifyUseLists()); 721 DEBUG_ASSERT(callee_graph->VerifyUseLists());
719 722
720 // Optimize (a << b) & c patterns, merge instructions. Must occur before 723 // Optimize (a << b) & c patterns, merge instructions. Must occur before
721 // 'SelectRepresentations' which inserts conversion nodes. 724 // 'SelectRepresentations' which inserts conversion nodes.
722 optimizer.TryOptimizePatterns(); 725 optimizer.TryOptimizePatterns();
723 DEBUG_ASSERT(callee_graph->VerifyUseLists()); 726 DEBUG_ASSERT(callee_graph->VerifyUseLists());
724 } 727 }
(...skipping 21 matching lines...) Expand all
746 function.set_optimized_call_site_count(call_site_count); 749 function.set_optimized_call_site_count(call_site_count);
747 750
748 // Use heuristics do decide if this call should be inlined. 751 // Use heuristics do decide if this call should be inlined.
749 if (!ShouldWeInline(function, size, call_site_count, constants_count)) { 752 if (!ShouldWeInline(function, size, call_site_count, constants_count)) {
750 // If size is larger than all thresholds, don't consider it again. 753 // If size is larger than all thresholds, don't consider it again.
751 if ((size > FLAG_inlining_size_threshold) && 754 if ((size > FLAG_inlining_size_threshold) &&
752 (call_site_count > FLAG_inlining_callee_call_sites_threshold) && 755 (call_site_count > FLAG_inlining_callee_call_sites_threshold) &&
753 (size > FLAG_inlining_constant_arguments_size_threshold)) { 756 (size > FLAG_inlining_constant_arguments_size_threshold)) {
754 function.set_is_inlinable(false); 757 function.set_is_inlinable(false);
755 } 758 }
756 isolate->set_deopt_id(prev_deopt_id); 759 isolate()->set_deopt_id(prev_deopt_id);
757 TRACE_INLINING(OS::Print(" Bailout: heuristics with " 760 TRACE_INLINING(OS::Print(" Bailout: heuristics with "
758 "code size: %" Pd ", " 761 "code size: %" Pd ", "
759 "call sites: %" Pd ", " 762 "call sites: %" Pd ", "
760 "const args: %" Pd "\n", 763 "const args: %" Pd "\n",
761 size, 764 size,
762 call_site_count, 765 call_site_count,
763 constants_count)); 766 constants_count));
764 PRINT_INLINING_TREE("Heuristic fail", 767 PRINT_INLINING_TREE("Heuristic fail",
765 &call_data->caller, &function, call_data->call); 768 &call_data->caller, &function, call_data->call);
766 return false; 769 return false;
(...skipping 13 matching lines...) Expand all
780 } 783 }
781 784
782 // Add the function to the cache. 785 // Add the function to the cache.
783 if (!in_cache) { 786 if (!in_cache) {
784 function_cache_.Add(parsed_function); 787 function_cache_.Add(parsed_function);
785 } 788 }
786 789
787 // Build succeeded so we restore the bailout jump. 790 // Build succeeded so we restore the bailout jump.
788 inlined_ = true; 791 inlined_ = true;
789 inlined_size_ += size; 792 inlined_size_ += size;
790 isolate->set_deopt_id(prev_deopt_id); 793 isolate()->set_deopt_id(prev_deopt_id);
791 794
792 call_data->callee_graph = callee_graph; 795 call_data->callee_graph = callee_graph;
793 call_data->parameter_stubs = param_stubs; 796 call_data->parameter_stubs = param_stubs;
794 call_data->exit_collector = exit_collector; 797 call_data->exit_collector = exit_collector;
795 798
796 // When inlined, we add the guarded fields of the callee to the caller's 799 // When inlined, we add the guarded fields of the callee to the caller's
797 // list of guarded fields. 800 // list of guarded fields.
798 for (intptr_t i = 0; i < callee_graph->guarded_fields()->length(); ++i) { 801 for (intptr_t i = 0; i < callee_graph->guarded_fields()->length(); ++i) {
799 FlowGraph::AddToGuardedFields(caller_graph_->guarded_fields(), 802 FlowGraph::AddToGuardedFields(caller_graph_->guarded_fields(),
800 (*callee_graph->guarded_fields())[i]); 803 (*callee_graph->guarded_fields())[i]);
801 } 804 }
802 805
803 // We allocate a ZoneHandle for the unoptimized code so that it cannot be 806 // We allocate a ZoneHandle for the unoptimized code so that it cannot be
804 // disconnected from its function during the rest of compilation. 807 // disconnected from its function during the rest of compilation.
805 Code::ZoneHandle(unoptimized_code.raw()); 808 Code::ZoneHandle(unoptimized_code.raw());
806 TRACE_INLINING(OS::Print(" Success\n")); 809 TRACE_INLINING(OS::Print(" Success\n"));
807 PRINT_INLINING_TREE(NULL, 810 PRINT_INLINING_TREE(NULL,
808 &call_data->caller, &function, call); 811 &call_data->caller, &function, call);
809 return true; 812 return true;
810 } else { 813 } else {
811 Error& error = Error::Handle(); 814 Error& error = Error::Handle();
812 error = isolate->object_store()->sticky_error(); 815 error = isolate()->object_store()->sticky_error();
813 isolate->object_store()->clear_sticky_error(); 816 isolate()->object_store()->clear_sticky_error();
814 isolate->set_deopt_id(prev_deopt_id); 817 isolate()->set_deopt_id(prev_deopt_id);
815 TRACE_INLINING(OS::Print(" Bailout: %s\n", error.ToErrorCString())); 818 TRACE_INLINING(OS::Print(" Bailout: %s\n", error.ToErrorCString()));
816 PRINT_INLINING_TREE("Bailout", 819 PRINT_INLINING_TREE("Bailout",
817 &call_data->caller, &function, call); 820 &call_data->caller, &function, call);
818 return false; 821 return false;
819 } 822 }
820 } 823 }
821 824
822 void PrintInlinedInfo(const Function& top) { 825 void PrintInlinedInfo(const Function& top) {
823 if (inlined_info_.length() > 0) { 826 if (inlined_info_.length() > 0) {
824 OS::Print("Inlining into: '%s' growth: %f (%" Pd " -> %" Pd ")\n", 827 OS::Print("Inlining into: '%s' growth: %f (%" Pd " -> %" Pd ")\n",
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
942 ParsedFunction* GetParsedFunction(const Function& function, bool* in_cache) { 945 ParsedFunction* GetParsedFunction(const Function& function, bool* in_cache) {
943 // TODO(zerny): Use a hash map for the cache. 946 // TODO(zerny): Use a hash map for the cache.
944 for (intptr_t i = 0; i < function_cache_.length(); ++i) { 947 for (intptr_t i = 0; i < function_cache_.length(); ++i) {
945 ParsedFunction* parsed_function = function_cache_[i]; 948 ParsedFunction* parsed_function = function_cache_[i];
946 if (parsed_function->function().raw() == function.raw()) { 949 if (parsed_function->function().raw() == function.raw()) {
947 *in_cache = true; 950 *in_cache = true;
948 return parsed_function; 951 return parsed_function;
949 } 952 }
950 } 953 }
951 *in_cache = false; 954 *in_cache = false;
952 ParsedFunction* parsed_function = new ParsedFunction(function); 955 ParsedFunction* parsed_function = new(isolate()) ParsedFunction(function);
953 Parser::ParseFunction(parsed_function); 956 Parser::ParseFunction(parsed_function);
954 parsed_function->AllocateVariables(); 957 parsed_function->AllocateVariables();
955 return parsed_function; 958 return parsed_function;
956 } 959 }
957 960
958 // Include special handling for List. factory: inlining it is not helpful 961 // Include special handling for List. factory: inlining it is not helpful
959 // if the incoming argument is a non-constant value. 962 // if the incoming argument is a non-constant value.
960 // TODO(srdjan): Fix inlining of List. factory. 963 // TODO(srdjan): Fix inlining of List. factory.
961 void InlineStaticCalls() { 964 void InlineStaticCalls() {
962 const GrowableArray<CallSites::StaticCallInfo>& call_info = 965 const GrowableArray<CallSites::StaticCallInfo>& call_info =
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
1096 } 1099 }
1097 ASSERT(function.NumOptionalPositionalParameters() == 1100 ASSERT(function.NumOptionalPositionalParameters() ==
1098 (param_count - fixed_param_count)); 1101 (param_count - fixed_param_count));
1099 // For each optional positional parameter without an actual, add its 1102 // For each optional positional parameter without an actual, add its
1100 // default value. 1103 // default value.
1101 for (intptr_t i = arg_count; i < param_count; ++i) { 1104 for (intptr_t i = arg_count; i < param_count; ++i) {
1102 const Object& object = 1105 const Object& object =
1103 Object::ZoneHandle( 1106 Object::ZoneHandle(
1104 parsed_function.default_parameter_values().At( 1107 parsed_function.default_parameter_values().At(
1105 i - fixed_param_count)); 1108 i - fixed_param_count));
1106 ConstantInstr* constant = new ConstantInstr(object); 1109 ConstantInstr* constant = new(isolate()) ConstantInstr(object);
1107 arguments->Add(NULL); 1110 arguments->Add(NULL);
1108 param_stubs->Add(constant); 1111 param_stubs->Add(constant);
1109 } 1112 }
1110 return true; 1113 return true;
1111 } 1114 }
1112 1115
1113 ASSERT(function.HasOptionalNamedParameters()); 1116 ASSERT(function.HasOptionalNamedParameters());
1114 1117
1115 // Passed arguments must match fixed parameters plus named arguments. 1118 // Passed arguments must match fixed parameters plus named arguments.
1116 intptr_t argument_names_count = 1119 intptr_t argument_names_count =
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1181 PolymorphicInliner::PolymorphicInliner(CallSiteInliner* owner, 1184 PolymorphicInliner::PolymorphicInliner(CallSiteInliner* owner,
1182 PolymorphicInstanceCallInstr* call, 1185 PolymorphicInstanceCallInstr* call,
1183 const Function& caller_function) 1186 const Function& caller_function)
1184 : owner_(owner), 1187 : owner_(owner),
1185 call_(call), 1188 call_(call),
1186 num_variants_(call->ic_data().NumberOfChecks()), 1189 num_variants_(call->ic_data().NumberOfChecks()),
1187 variants_(num_variants_), 1190 variants_(num_variants_),
1188 inlined_variants_(num_variants_), 1191 inlined_variants_(num_variants_),
1189 non_inlined_variants_(num_variants_), 1192 non_inlined_variants_(num_variants_),
1190 inlined_entries_(num_variants_), 1193 inlined_entries_(num_variants_),
1191 exit_collector_(new InlineExitCollector(owner->caller_graph(), call)), 1194 exit_collector_(new(isolate())
1195 InlineExitCollector(owner->caller_graph(), call)),
1192 caller_function_(caller_function) { 1196 caller_function_(caller_function) {
1193 } 1197 }
1194 1198
1195 1199
1200 Isolate* PolymorphicInliner::isolate() const {
1201 return owner_->caller_graph()->isolate();
1202 }
1203
1204
1196 // Inlined bodies are shared if two different class ids have the same 1205 // Inlined bodies are shared if two different class ids have the same
1197 // inlined target. This sharing is represented by using three different 1206 // inlined target. This sharing is represented by using three different
1198 // types of entries in the inlined_entries_ array: 1207 // types of entries in the inlined_entries_ array:
1199 // 1208 //
1200 // * GraphEntry: the inlined body is not shared. 1209 // * GraphEntry: the inlined body is not shared.
1201 // 1210 //
1202 // * TargetEntry: the inlined body is shared and this is the first variant. 1211 // * TargetEntry: the inlined body is shared and this is the first variant.
1203 // 1212 //
1204 // * JoinEntry: the inlined body is shared and this is a subsequent variant. 1213 // * JoinEntry: the inlined body is shared and this is a subsequent variant.
1205 bool PolymorphicInliner::CheckInlinedDuplicate(const Function& target) { 1214 bool PolymorphicInliner::CheckInlinedDuplicate(const Function& target) {
1206 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) { 1215 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) {
1207 if ((target.raw() == inlined_variants_[i].target->raw()) && 1216 if ((target.raw() == inlined_variants_[i].target->raw()) &&
1208 !MethodRecognizer::PolymorphicTarget(target)) { 1217 !MethodRecognizer::PolymorphicTarget(target)) {
1209 // The call target is shared with a previous inlined variant. Share 1218 // The call target is shared with a previous inlined variant. Share
1210 // the graph. This requires a join block at the entry, and edge-split 1219 // the graph. This requires a join block at the entry, and edge-split
1211 // form requires a target for each branch. 1220 // form requires a target for each branch.
1212 // 1221 //
1213 // Represent the sharing by recording a fresh target for the first 1222 // Represent the sharing by recording a fresh target for the first
1214 // variant and the shared join for all later variants. 1223 // variant and the shared join for all later variants.
1215 if (inlined_entries_[i]->IsGraphEntry()) { 1224 if (inlined_entries_[i]->IsGraphEntry()) {
1216 // Convert the old target entry to a new join entry. 1225 // Convert the old target entry to a new join entry.
1217 TargetEntryInstr* old_target = 1226 TargetEntryInstr* old_target =
1218 inlined_entries_[i]->AsGraphEntry()->normal_entry(); 1227 inlined_entries_[i]->AsGraphEntry()->normal_entry();
1219 // Unuse all inputs in the the old graph entry since it is not part of 1228 // Unuse all inputs in the the old graph entry since it is not part of
1220 // the graph anymore. A new target be created instead. 1229 // the graph anymore. A new target be created instead.
1221 inlined_entries_[i]->AsGraphEntry()->UnuseAllInputs(); 1230 inlined_entries_[i]->AsGraphEntry()->UnuseAllInputs();
1222 1231
1223 JoinEntryInstr* new_join = BranchSimplifier::ToJoinEntry(old_target); 1232 JoinEntryInstr* new_join =
1233 BranchSimplifier::ToJoinEntry(isolate(), old_target);
1224 old_target->ReplaceAsPredecessorWith(new_join); 1234 old_target->ReplaceAsPredecessorWith(new_join);
1225 for (intptr_t j = 0; j < old_target->dominated_blocks().length(); ++j) { 1235 for (intptr_t j = 0; j < old_target->dominated_blocks().length(); ++j) {
1226 BlockEntryInstr* block = old_target->dominated_blocks()[j]; 1236 BlockEntryInstr* block = old_target->dominated_blocks()[j];
1227 new_join->AddDominatedBlock(block); 1237 new_join->AddDominatedBlock(block);
1228 } 1238 }
1229 // Create a new target with the join as unconditional successor. 1239 // Create a new target with the join as unconditional successor.
1230 TargetEntryInstr* new_target = 1240 TargetEntryInstr* new_target =
1231 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), 1241 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(),
1232 old_target->try_index()); 1242 old_target->try_index());
1233 new_target->InheritDeoptTarget(new_join); 1243 new_target->InheritDeoptTarget(isolate(), new_join);
1234 GotoInstr* new_goto = new GotoInstr(new_join); 1244 GotoInstr* new_goto = new(isolate()) GotoInstr(new_join);
1235 new_goto->InheritDeoptTarget(new_join); 1245 new_goto->InheritDeoptTarget(isolate(), new_join);
1236 new_target->LinkTo(new_goto); 1246 new_target->LinkTo(new_goto);
1237 new_target->set_last_instruction(new_goto); 1247 new_target->set_last_instruction(new_goto);
1238 new_join->predecessors_.Add(new_target); 1248 new_join->predecessors_.Add(new_target);
1239 1249
1240 // Record the new target for the first variant. 1250 // Record the new target for the first variant.
1241 inlined_entries_[i] = new_target; 1251 inlined_entries_[i] = new_target;
1242 } 1252 }
1243 ASSERT(inlined_entries_[i]->IsTargetEntry()); 1253 ASSERT(inlined_entries_[i]->IsTargetEntry());
1244 // Record the shared join for this variant. 1254 // Record the shared join for this variant.
1245 BlockEntryInstr* join = 1255 BlockEntryInstr* join =
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1289 FlowGraph* callee_graph = call_data.callee_graph; 1299 FlowGraph* callee_graph = call_data.callee_graph;
1290 call_data.exit_collector->PrepareGraphs(callee_graph); 1300 call_data.exit_collector->PrepareGraphs(callee_graph);
1291 inlined_entries_.Add(callee_graph->graph_entry()); 1301 inlined_entries_.Add(callee_graph->graph_entry());
1292 exit_collector_->Union(call_data.exit_collector); 1302 exit_collector_->Union(call_data.exit_collector);
1293 1303
1294 // Replace parameter stubs and constants. Replace the receiver argument 1304 // Replace parameter stubs and constants. Replace the receiver argument
1295 // with a redefinition to prevent code from the inlined body from being 1305 // with a redefinition to prevent code from the inlined body from being
1296 // hoisted above the inlined entry. 1306 // hoisted above the inlined entry.
1297 ASSERT(arguments.length() > 0); 1307 ASSERT(arguments.length() > 0);
1298 Value* actual = arguments[0]; 1308 Value* actual = arguments[0];
1299 RedefinitionInstr* redefinition = new RedefinitionInstr(actual->Copy()); 1309 RedefinitionInstr* redefinition = new(isolate())
1310 RedefinitionInstr(actual->Copy(isolate()));
1300 redefinition->set_ssa_temp_index( 1311 redefinition->set_ssa_temp_index(
1301 owner_->caller_graph()->alloc_ssa_temp_index()); 1312 owner_->caller_graph()->alloc_ssa_temp_index());
1302 redefinition->InsertAfter(callee_graph->graph_entry()->normal_entry()); 1313 redefinition->InsertAfter(callee_graph->graph_entry()->normal_entry());
1303 Definition* stub = (*call_data.parameter_stubs)[0]; 1314 Definition* stub = (*call_data.parameter_stubs)[0];
1304 stub->ReplaceUsesWith(redefinition); 1315 stub->ReplaceUsesWith(redefinition);
1305 1316
1306 for (intptr_t i = 1; i < arguments.length(); ++i) { 1317 for (intptr_t i = 1; i < arguments.length(); ++i) {
1307 actual = arguments[i]; 1318 actual = arguments[i];
1308 if (actual != NULL) { 1319 if (actual != NULL) {
1309 stub = (*call_data.parameter_stubs)[i]; 1320 stub = (*call_data.parameter_stubs)[i];
(...skipping 27 matching lines...) Expand all
1337 bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid, 1348 bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid,
1338 const Function& target) { 1349 const Function& target) {
1339 FlowGraphOptimizer optimizer(owner_->caller_graph()); 1350 FlowGraphOptimizer optimizer(owner_->caller_graph());
1340 TargetEntryInstr* entry; 1351 TargetEntryInstr* entry;
1341 Definition* last; 1352 Definition* last;
1342 // Replace the receiver argument with a redefinition to prevent code from 1353 // Replace the receiver argument with a redefinition to prevent code from
1343 // the inlined body from being hoisted above the inlined entry. 1354 // the inlined body from being hoisted above the inlined entry.
1344 GrowableArray<Definition*> arguments(call_->ArgumentCount()); 1355 GrowableArray<Definition*> arguments(call_->ArgumentCount());
1345 Definition* receiver = call_->ArgumentAt(0); 1356 Definition* receiver = call_->ArgumentAt(0);
1346 RedefinitionInstr* redefinition = 1357 RedefinitionInstr* redefinition =
1347 new RedefinitionInstr(new Value(receiver)); 1358 new(isolate()) RedefinitionInstr(new(isolate()) Value(receiver));
1348 redefinition->set_ssa_temp_index( 1359 redefinition->set_ssa_temp_index(
1349 owner_->caller_graph()->alloc_ssa_temp_index()); 1360 owner_->caller_graph()->alloc_ssa_temp_index());
1350 if (optimizer.TryInlineRecognizedMethod(receiver_cid, 1361 if (optimizer.TryInlineRecognizedMethod(receiver_cid,
1351 target, 1362 target,
1352 call_, 1363 call_,
1353 redefinition, 1364 redefinition,
1354 call_->instance_call()->token_pos(), 1365 call_->instance_call()->token_pos(),
1355 *call_->instance_call()->ic_data(), 1366 *call_->instance_call()->ic_data(),
1356 &entry, &last)) { 1367 &entry, &last)) {
1357 // Create a graph fragment. 1368 // Create a graph fragment.
1358 redefinition->InsertAfter(entry); 1369 redefinition->InsertAfter(entry);
1359 InlineExitCollector* exit_collector = 1370 InlineExitCollector* exit_collector =
1360 new InlineExitCollector(owner_->caller_graph(), call_); 1371 new(isolate()) InlineExitCollector(owner_->caller_graph(), call_);
1361 1372
1362 ReturnInstr* result = 1373 ReturnInstr* result =
1363 new ReturnInstr(call_->instance_call()->token_pos(), 1374 new(isolate()) ReturnInstr(call_->instance_call()->token_pos(),
1364 new Value(last)); 1375 new(isolate()) Value(last));
1365 owner_->caller_graph()->AppendTo( 1376 owner_->caller_graph()->AppendTo(
1366 last, 1377 last,
1367 result, 1378 result,
1368 call_->env(), // Return can become deoptimization target. 1379 call_->env(), // Return can become deoptimization target.
1369 FlowGraph::kEffect); 1380 FlowGraph::kEffect);
1370 entry->set_last_instruction(result); 1381 entry->set_last_instruction(result);
1371 exit_collector->AddExit(result); 1382 exit_collector->AddExit(result);
1372 GraphEntryInstr* graph_entry = 1383 GraphEntryInstr* graph_entry =
1373 new GraphEntryInstr(NULL, // No parsed function. 1384 new(isolate()) GraphEntryInstr(NULL, // No parsed function.
1374 entry, 1385 entry,
1375 Isolate::kNoDeoptId); // No OSR id. 1386 Isolate::kNoDeoptId); // No OSR id.
1376 // Update polymorphic inliner state. 1387 // Update polymorphic inliner state.
1377 inlined_entries_.Add(graph_entry); 1388 inlined_entries_.Add(graph_entry);
1378 exit_collector_->Union(exit_collector); 1389 exit_collector_->Union(exit_collector);
1379 return true; 1390 return true;
1380 } 1391 }
1381 return false; 1392 return false;
1382 } 1393 }
1383 1394
1384 1395
1385 // Build a DAG to dispatch to the inlined function bodies. Load the class 1396 // Build a DAG to dispatch to the inlined function bodies. Load the class
1386 // id of the receiver and make explicit comparisons for each inlined body, 1397 // id of the receiver and make explicit comparisons for each inlined body,
1387 // in frequency order. If all variants are inlined, the entry to the last 1398 // in frequency order. If all variants are inlined, the entry to the last
1388 // inlined body is guarded by a CheckClassId instruction which can deopt. 1399 // inlined body is guarded by a CheckClassId instruction which can deopt.
1389 // If not all variants are inlined, we add a PolymorphicInstanceCall 1400 // If not all variants are inlined, we add a PolymorphicInstanceCall
1390 // instruction to handle the non-inlined variants. 1401 // instruction to handle the non-inlined variants.
1391 TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() { 1402 TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
1392 // Start with a fresh target entry. 1403 // Start with a fresh target entry.
1393 TargetEntryInstr* entry = 1404 TargetEntryInstr* entry =
1394 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), 1405 new(isolate()) TargetEntryInstr(
1395 call_->GetBlock()->try_index()); 1406 owner_->caller_graph()->allocate_block_id(),
1396 entry->InheritDeoptTarget(call_); 1407 call_->GetBlock()->try_index());
1408 entry->InheritDeoptTarget(isolate(), call_);
1397 1409
1398 // This function uses a cursor (a pointer to the 'current' instruction) to 1410 // This function uses a cursor (a pointer to the 'current' instruction) to
1399 // build the graph. The next instruction will be inserted after the 1411 // build the graph. The next instruction will be inserted after the
1400 // cursor. 1412 // cursor.
1401 TargetEntryInstr* current_block = entry; 1413 TargetEntryInstr* current_block = entry;
1402 Instruction* cursor = entry; 1414 Instruction* cursor = entry;
1403 1415
1404 Definition* receiver = call_->ArgumentAt(0); 1416 Definition* receiver = call_->ArgumentAt(0);
1405 // There are at least two variants including non-inlined ones, so we have 1417 // There are at least two variants including non-inlined ones, so we have
1406 // at least one branch on the class id. 1418 // at least one branch on the class id.
1407 LoadClassIdInstr* load_cid = new LoadClassIdInstr(new Value(receiver)); 1419 LoadClassIdInstr* load_cid =
1420 new(isolate()) LoadClassIdInstr(new(isolate()) Value(receiver));
1408 load_cid->set_ssa_temp_index(owner_->caller_graph()->alloc_ssa_temp_index()); 1421 load_cid->set_ssa_temp_index(owner_->caller_graph()->alloc_ssa_temp_index());
1409 cursor = AppendInstruction(cursor, load_cid); 1422 cursor = AppendInstruction(cursor, load_cid);
1410 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) { 1423 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) {
1411 // 1. Guard the body with a class id check. 1424 // 1. Guard the body with a class id check.
1412 if ((i == (inlined_variants_.length() - 1)) && 1425 if ((i == (inlined_variants_.length() - 1)) &&
1413 non_inlined_variants_.is_empty()) { 1426 non_inlined_variants_.is_empty()) {
1414 // If it is the last variant use a check class or check smi 1427 // If it is the last variant use a check class or check smi
1415 // instruction which can deoptimize, followed unconditionally by the 1428 // instruction which can deoptimize, followed unconditionally by the
1416 // body. Check a redefinition of the receiver, to prevent the check 1429 // body. Check a redefinition of the receiver, to prevent the check
1417 // from being hoisted. 1430 // from being hoisted.
1418 RedefinitionInstr* redefinition = 1431 RedefinitionInstr* redefinition =
1419 new RedefinitionInstr(new Value(receiver)); 1432 new(isolate()) RedefinitionInstr(new(isolate()) Value(receiver));
1420 redefinition->set_ssa_temp_index( 1433 redefinition->set_ssa_temp_index(
1421 owner_->caller_graph()->alloc_ssa_temp_index()); 1434 owner_->caller_graph()->alloc_ssa_temp_index());
1422 cursor = AppendInstruction(cursor, redefinition); 1435 cursor = AppendInstruction(cursor, redefinition);
1423 if (inlined_variants_[i].cid == kSmiCid) { 1436 if (inlined_variants_[i].cid == kSmiCid) {
1424 CheckSmiInstr* check_smi = 1437 CheckSmiInstr* check_smi =
1425 new CheckSmiInstr(new Value(redefinition), 1438 new CheckSmiInstr(new Value(redefinition),
1426 call_->deopt_id(), 1439 call_->deopt_id(),
1427 call_->token_pos()); 1440 call_->token_pos());
1428 check_smi->InheritDeoptTarget(call_); 1441 check_smi->InheritDeoptTarget(isolate(), call_);
1429 cursor = AppendInstruction(cursor, check_smi); 1442 cursor = AppendInstruction(cursor, check_smi);
1430 } else { 1443 } else {
1431 const ICData& old_checks = call_->ic_data(); 1444 const ICData& old_checks = call_->ic_data();
1432 const ICData& new_checks = ICData::ZoneHandle( 1445 const ICData& new_checks = ICData::ZoneHandle(
1433 ICData::New(Function::Handle(old_checks.owner()), 1446 ICData::New(Function::Handle(old_checks.owner()),
1434 String::Handle(old_checks.target_name()), 1447 String::Handle(old_checks.target_name()),
1435 Array::Handle(old_checks.arguments_descriptor()), 1448 Array::Handle(old_checks.arguments_descriptor()),
1436 old_checks.deopt_id(), 1449 old_checks.deopt_id(),
1437 1)); // Number of args tested. 1450 1)); // Number of args tested.
1438 new_checks.AddReceiverCheck(inlined_variants_[i].cid, 1451 new_checks.AddReceiverCheck(inlined_variants_[i].cid,
1439 *inlined_variants_[i].target); 1452 *inlined_variants_[i].target);
1440 CheckClassInstr* check_class = 1453 CheckClassInstr* check_class =
1441 new CheckClassInstr(new Value(redefinition), 1454 new CheckClassInstr(new Value(redefinition),
1442 call_->deopt_id(), 1455 call_->deopt_id(),
1443 new_checks, 1456 new_checks,
1444 call_->token_pos()); 1457 call_->token_pos());
1445 check_class->InheritDeoptTarget(call_); 1458 check_class->InheritDeoptTarget(isolate(), call_);
1446 cursor = AppendInstruction(cursor, check_class); 1459 cursor = AppendInstruction(cursor, check_class);
1447 } 1460 }
1448 // The next instruction is the first instruction of the inlined body. 1461 // The next instruction is the first instruction of the inlined body.
1449 // Handle the two possible cases (unshared and shared subsequent 1462 // Handle the two possible cases (unshared and shared subsequent
1450 // predecessors) separately. 1463 // predecessors) separately.
1451 BlockEntryInstr* callee_entry = inlined_entries_[i]; 1464 BlockEntryInstr* callee_entry = inlined_entries_[i];
1452 if (callee_entry->IsGraphEntry()) { 1465 if (callee_entry->IsGraphEntry()) {
1453 // Unshared. Graft the normal entry on after the check class 1466 // Unshared. Graft the normal entry on after the check class
1454 // instruction. 1467 // instruction.
1455 TargetEntryInstr* target = 1468 TargetEntryInstr* target =
(...skipping 12 matching lines...) Expand all
1468 BlockEntryInstr* block = target->dominated_blocks()[j]; 1481 BlockEntryInstr* block = target->dominated_blocks()[j];
1469 current_block->AddDominatedBlock(block); 1482 current_block->AddDominatedBlock(block);
1470 } 1483 }
1471 } else if (callee_entry->IsJoinEntry()) { 1484 } else if (callee_entry->IsJoinEntry()) {
1472 // Shared inlined body and this is a subsequent entry. We have 1485 // Shared inlined body and this is a subsequent entry. We have
1473 // already constructed a join and set its dominator. Add a jump to 1486 // already constructed a join and set its dominator. Add a jump to
1474 // the join. 1487 // the join.
1475 JoinEntryInstr* join = callee_entry->AsJoinEntry(); 1488 JoinEntryInstr* join = callee_entry->AsJoinEntry();
1476 ASSERT(join->dominator() != NULL); 1489 ASSERT(join->dominator() != NULL);
1477 GotoInstr* goto_join = new GotoInstr(join); 1490 GotoInstr* goto_join = new GotoInstr(join);
1478 goto_join->InheritDeoptTarget(join); 1491 goto_join->InheritDeoptTarget(isolate(), join);
1479 cursor->LinkTo(goto_join); 1492 cursor->LinkTo(goto_join);
1480 current_block->set_last_instruction(goto_join); 1493 current_block->set_last_instruction(goto_join);
1481 } else { 1494 } else {
1482 // There is no possibility of a TargetEntry (the first entry to a 1495 // There is no possibility of a TargetEntry (the first entry to a
1483 // shared inlined body) because this is the last inlined entry. 1496 // shared inlined body) because this is the last inlined entry.
1484 UNREACHABLE(); 1497 UNREACHABLE();
1485 } 1498 }
1486 cursor = NULL; 1499 cursor = NULL;
1487 } else { 1500 } else {
1488 // For all variants except the last, use a branch on the loaded class 1501 // For all variants except the last, use a branch on the loaded class
1489 // id. 1502 // id.
1490 const Smi& cid = Smi::ZoneHandle(Smi::New(inlined_variants_[i].cid)); 1503 const Smi& cid = Smi::ZoneHandle(Smi::New(inlined_variants_[i].cid));
1491 ConstantInstr* cid_constant = new ConstantInstr(cid); 1504 ConstantInstr* cid_constant = new ConstantInstr(cid);
1492 cid_constant->set_ssa_temp_index( 1505 cid_constant->set_ssa_temp_index(
1493 owner_->caller_graph()->alloc_ssa_temp_index()); 1506 owner_->caller_graph()->alloc_ssa_temp_index());
1494 StrictCompareInstr* compare = 1507 StrictCompareInstr* compare =
1495 new StrictCompareInstr(call_->instance_call()->token_pos(), 1508 new StrictCompareInstr(call_->instance_call()->token_pos(),
1496 Token::kEQ_STRICT, 1509 Token::kEQ_STRICT,
1497 new Value(load_cid), 1510 new Value(load_cid),
1498 new Value(cid_constant), 1511 new Value(cid_constant),
1499 false); // No number check. 1512 false); // No number check.
1500 BranchInstr* branch = new BranchInstr(compare); 1513 BranchInstr* branch = new BranchInstr(compare);
1501 branch->InheritDeoptTarget(call_); 1514 branch->InheritDeoptTarget(isolate(), call_);
1502 AppendInstruction(AppendInstruction(cursor, cid_constant), branch); 1515 AppendInstruction(AppendInstruction(cursor, cid_constant), branch);
1503 current_block->set_last_instruction(branch); 1516 current_block->set_last_instruction(branch);
1504 cursor = NULL; 1517 cursor = NULL;
1505 1518
1506 // 2. Handle a match by linking to the inlined body. There are three 1519 // 2. Handle a match by linking to the inlined body. There are three
1507 // cases (unshared, shared first predecessor, and shared subsequent 1520 // cases (unshared, shared first predecessor, and shared subsequent
1508 // predecessors). 1521 // predecessors).
1509 BlockEntryInstr* callee_entry = inlined_entries_[i]; 1522 BlockEntryInstr* callee_entry = inlined_entries_[i];
1510 TargetEntryInstr* true_target = NULL; 1523 TargetEntryInstr* true_target = NULL;
1511 if (callee_entry->IsGraphEntry()) { 1524 if (callee_entry->IsGraphEntry()) {
(...skipping 11 matching lines...) Expand all
1523 } else { 1536 } else {
1524 // Shared inlined body and this is a subsequent entry. We have 1537 // Shared inlined body and this is a subsequent entry. We have
1525 // already constructed a join. We need a fresh target that jumps to 1538 // already constructed a join. We need a fresh target that jumps to
1526 // the join. 1539 // the join.
1527 JoinEntryInstr* join = callee_entry->AsJoinEntry(); 1540 JoinEntryInstr* join = callee_entry->AsJoinEntry();
1528 ASSERT(join != NULL); 1541 ASSERT(join != NULL);
1529 ASSERT(join->dominator() != NULL); 1542 ASSERT(join->dominator() != NULL);
1530 true_target = 1543 true_target =
1531 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), 1544 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(),
1532 call_->GetBlock()->try_index()); 1545 call_->GetBlock()->try_index());
1533 true_target->InheritDeoptTarget(join); 1546 true_target->InheritDeoptTarget(isolate(), join);
1534 GotoInstr* goto_join = new GotoInstr(join); 1547 GotoInstr* goto_join = new GotoInstr(join);
1535 goto_join->InheritDeoptTarget(join); 1548 goto_join->InheritDeoptTarget(isolate(), join);
1536 true_target->LinkTo(goto_join); 1549 true_target->LinkTo(goto_join);
1537 true_target->set_last_instruction(goto_join); 1550 true_target->set_last_instruction(goto_join);
1538 } 1551 }
1539 *branch->true_successor_address() = true_target; 1552 *branch->true_successor_address() = true_target;
1540 current_block->AddDominatedBlock(true_target); 1553 current_block->AddDominatedBlock(true_target);
1541 1554
1542 // 3. Prepare to handle a match failure on the next iteration or the 1555 // 3. Prepare to handle a match failure on the next iteration or the
1543 // fall-through code below for non-inlined variants. 1556 // fall-through code below for non-inlined variants.
1544 TargetEntryInstr* false_target = 1557 TargetEntryInstr* false_target =
1545 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), 1558 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(),
1546 call_->GetBlock()->try_index()); 1559 call_->GetBlock()->try_index());
1547 false_target->InheritDeoptTarget(call_); 1560 false_target->InheritDeoptTarget(isolate(), call_);
1548 *branch->false_successor_address() = false_target; 1561 *branch->false_successor_address() = false_target;
1549 current_block->AddDominatedBlock(false_target); 1562 current_block->AddDominatedBlock(false_target);
1550 cursor = current_block = false_target; 1563 cursor = current_block = false_target;
1551 } 1564 }
1552 } 1565 }
1553 1566
1554 // Handle any non-inlined variants. 1567 // Handle any non-inlined variants.
1555 if (!non_inlined_variants_.is_empty()) { 1568 if (!non_inlined_variants_.is_empty()) {
1556 // Move push arguments of the call. 1569 // Move push arguments of the call.
1557 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { 1570 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) {
(...skipping 14 matching lines...) Expand all
1572 new_checks.AddReceiverCheck(non_inlined_variants_[i].cid, 1585 new_checks.AddReceiverCheck(non_inlined_variants_[i].cid,
1573 *non_inlined_variants_[i].target, 1586 *non_inlined_variants_[i].target,
1574 non_inlined_variants_[i].count); 1587 non_inlined_variants_[i].count);
1575 } 1588 }
1576 PolymorphicInstanceCallInstr* fallback_call = 1589 PolymorphicInstanceCallInstr* fallback_call =
1577 new PolymorphicInstanceCallInstr(call_->instance_call(), 1590 new PolymorphicInstanceCallInstr(call_->instance_call(),
1578 new_checks, 1591 new_checks,
1579 true); // With checks. 1592 true); // With checks.
1580 fallback_call->set_ssa_temp_index( 1593 fallback_call->set_ssa_temp_index(
1581 owner_->caller_graph()->alloc_ssa_temp_index()); 1594 owner_->caller_graph()->alloc_ssa_temp_index());
1582 fallback_call->InheritDeoptTarget(call_); 1595 fallback_call->InheritDeoptTarget(isolate(), call_);
1583 ReturnInstr* fallback_return = 1596 ReturnInstr* fallback_return =
1584 new ReturnInstr(call_->instance_call()->token_pos(), 1597 new ReturnInstr(call_->instance_call()->token_pos(),
1585 new Value(fallback_call)); 1598 new Value(fallback_call));
1586 fallback_return->InheritDeoptTargetAfter(call_); 1599 fallback_return->InheritDeoptTargetAfter(isolate(), call_);
1587 AppendInstruction(AppendInstruction(cursor, fallback_call), 1600 AppendInstruction(AppendInstruction(cursor, fallback_call),
1588 fallback_return); 1601 fallback_return);
1589 exit_collector_->AddExit(fallback_return); 1602 exit_collector_->AddExit(fallback_return);
1590 cursor = NULL; 1603 cursor = NULL;
1591 } else { 1604 } else {
1592 // Remove push arguments of the call. 1605 // Remove push arguments of the call.
1593 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { 1606 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) {
1594 PushArgumentInstr* push = call_->PushArgumentAt(i); 1607 PushArgumentInstr* push = call_->PushArgumentAt(i);
1595 push->ReplaceUsesWith(push->value()->definition()); 1608 push->ReplaceUsesWith(push->value()->definition());
1596 push->RemoveFromGraph(); 1609 push->RemoveFromGraph();
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
1704 OS::Print("After Inlining of %s\n", flow_graph_-> 1717 OS::Print("After Inlining of %s\n", flow_graph_->
1705 parsed_function().function().ToFullyQualifiedCString()); 1718 parsed_function().function().ToFullyQualifiedCString());
1706 FlowGraphPrinter printer(*flow_graph_); 1719 FlowGraphPrinter printer(*flow_graph_);
1707 printer.PrintBlocks(); 1720 printer.PrintBlocks();
1708 } 1721 }
1709 } 1722 }
1710 } 1723 }
1711 } 1724 }
1712 1725
1713 } // namespace dart 1726 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_optimizer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698