Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(222)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm64.cc

Issue 1343373003: Revert "VM: New calling convention for generated code." (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64.
6 #if defined(TARGET_ARCH_ARM64) 6 #if defined(TARGET_ARCH_ARM64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 90
91 // Emit all kMaterializeObject instructions describing objects to be 91 // Emit all kMaterializeObject instructions describing objects to be
92 // materialized on the deoptimization as a prefix to the deoptimization info. 92 // materialized on the deoptimization as a prefix to the deoptimization info.
93 EmitMaterializations(deopt_env_, builder); 93 EmitMaterializations(deopt_env_, builder);
94 94
95 // The real frame starts here. 95 // The real frame starts here.
96 builder->MarkFrameStart(); 96 builder->MarkFrameStart();
97 97
98 Zone* zone = compiler->zone(); 98 Zone* zone = compiler->zone();
99 99
100 // Current PP, FP, and PC.
100 builder->AddPp(current->function(), slot_ix++); 101 builder->AddPp(current->function(), slot_ix++);
101 builder->AddPcMarker(Function::Handle(zone), slot_ix++); 102 builder->AddPcMarker(Function::Handle(zone), slot_ix++);
102 builder->AddCallerFp(slot_ix++); 103 builder->AddCallerFp(slot_ix++);
103 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); 104 builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
104 105
105 // Emit all values that are needed for materialization as a part of the 106 // Emit all values that are needed for materialization as a part of the
106 // expression stack for the bottom-most frame. This guarantees that GC 107 // expression stack for the bottom-most frame. This guarantees that GC
107 // will be able to find them during materialization. 108 // will be able to find them during materialization.
108 slot_ix = builder->EmitMaterializationArguments(slot_ix); 109 slot_ix = builder->EmitMaterializationArguments(slot_ix);
109 110
110 // For the innermost environment, set outgoing arguments and the locals. 111 // For the innermost environment, set outgoing arguments and the locals.
111 for (intptr_t i = current->Length() - 1; 112 for (intptr_t i = current->Length() - 1;
112 i >= current->fixed_parameter_count(); 113 i >= current->fixed_parameter_count();
113 i--) { 114 i--) {
114 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); 115 builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
115 } 116 }
116 117
117 Environment* previous = current; 118 Environment* previous = current;
118 current = current->outer(); 119 current = current->outer();
119 while (current != NULL) { 120 while (current != NULL) {
121 // PP, FP, and PC.
120 builder->AddPp(current->function(), slot_ix++); 122 builder->AddPp(current->function(), slot_ix++);
121 builder->AddPcMarker(previous->function(), slot_ix++); 123 builder->AddPcMarker(previous->function(), slot_ix++);
122 builder->AddCallerFp(slot_ix++); 124 builder->AddCallerFp(slot_ix++);
123 125
124 // For any outer environment the deopt id is that of the call instruction 126 // For any outer environment the deopt id is that of the call instruction
125 // which is recorded in the outer environment. 127 // which is recorded in the outer environment.
126 builder->AddReturnAddress( 128 builder->AddReturnAddress(
127 current->function(), 129 current->function(),
128 Isolate::ToDeoptAfter(current->deopt_id()), 130 Isolate::ToDeoptAfter(current->deopt_id()),
129 slot_ix++); 131 slot_ix++);
(...skipping 15 matching lines...) Expand all
145 slot_ix++); 147 slot_ix++);
146 } 148 }
147 149
148 // Iterate on the outer environment. 150 // Iterate on the outer environment.
149 previous = current; 151 previous = current;
150 current = current->outer(); 152 current = current->outer();
151 } 153 }
152 // The previous pointer is now the outermost environment. 154 // The previous pointer is now the outermost environment.
153 ASSERT(previous != NULL); 155 ASSERT(previous != NULL);
154 156
155 // Add slots for the outermost environment. 157 // For the outermost environment, set caller PC, caller PP, and caller FP.
156 builder->AddCallerPp(slot_ix++); 158 builder->AddCallerPp(slot_ix++);
159 // PC marker.
157 builder->AddPcMarker(previous->function(), slot_ix++); 160 builder->AddPcMarker(previous->function(), slot_ix++);
158 builder->AddCallerFp(slot_ix++); 161 builder->AddCallerFp(slot_ix++);
159 builder->AddCallerPc(slot_ix++); 162 builder->AddCallerPc(slot_ix++);
160 163
161 // For the outermost environment, set the incoming arguments. 164 // For the outermost environment, set the incoming arguments.
162 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { 165 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) {
163 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); 166 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++);
164 } 167 }
165 168
166 return builder->CreateDeoptInfo(deopt_table); 169 return builder->CreateDeoptInfo(deopt_table);
167 } 170 }
168 171
169 172
170 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, 173 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
171 intptr_t stub_ix) { 174 intptr_t stub_ix) {
172 // Calls do not need stubs, they share a deoptimization trampoline. 175 // Calls do not need stubs, they share a deoptimization trampoline.
173 ASSERT(reason() != ICData::kDeoptAtCall); 176 ASSERT(reason() != ICData::kDeoptAtCall);
174 Assembler* assem = compiler->assembler(); 177 Assembler* assem = compiler->assembler();
175 #define __ assem-> 178 #define __ assem->
176 __ Comment("%s", Name()); 179 __ Comment("%s", Name());
177 __ Bind(entry_label()); 180 __ Bind(entry_label());
178 if (FLAG_trap_on_deoptimization) { 181 if (FLAG_trap_on_deoptimization) {
179 __ brk(0); 182 __ brk(0);
180 } 183 }
181 184
182 ASSERT(deopt_env() != NULL); 185 ASSERT(deopt_env() != NULL);
183 __ Push(CODE_REG); 186
184 __ BranchLink(*StubCode::Deoptimize_entry()); 187 __ BranchLink(*StubCode::Deoptimize_entry());
185 set_pc_offset(assem->CodeSize()); 188 set_pc_offset(assem->CodeSize());
186 #undef __ 189 #undef __
187 } 190 }
188 191
189 192
190 #define __ assembler()-> 193 #define __ assembler()->
191 194
192 195
193 // Fall through if bool_register contains null. 196 // Fall through if bool_register contains null.
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after
901 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset()); 904 __ LoadFieldFromOffset(R7, R4, ArgumentsDescriptor::count_offset());
902 __ SmiUntag(R7); 905 __ SmiUntag(R7);
903 // Check that R8 equals R7, i.e. no named arguments passed. 906 // Check that R8 equals R7, i.e. no named arguments passed.
904 __ CompareRegisters(R8, R7); 907 __ CompareRegisters(R8, R7);
905 __ b(&all_arguments_processed, EQ); 908 __ b(&all_arguments_processed, EQ);
906 } 909 }
907 } 910 }
908 911
909 __ Bind(&wrong_num_arguments); 912 __ Bind(&wrong_num_arguments);
910 if (function.IsClosureFunction()) { 913 if (function.IsClosureFunction()) {
911 __ LeaveDartFrame(kKeepCalleePP); // The arguments are still on the stack. 914 ASSERT(assembler()->constant_pool_allowed());
915 __ LeaveDartFrame(); // The arguments are still on the stack.
916 // Do not use caller's pool ptr in branch.
917 ASSERT(!assembler()->constant_pool_allowed());
912 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); 918 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry());
919 __ set_constant_pool_allowed(true);
913 // The noSuchMethod call may return to the caller, but not here. 920 // The noSuchMethod call may return to the caller, but not here.
914 } else if (check_correct_named_args) { 921 } else if (check_correct_named_args) {
915 __ Stop("Wrong arguments"); 922 __ Stop("Wrong arguments");
916 } 923 }
917 924
918 __ Bind(&all_arguments_processed); 925 __ Bind(&all_arguments_processed);
919 // Nullify originally passed arguments only after they have been copied and 926 // Nullify originally passed arguments only after they have been copied and
920 // checked, otherwise noSuchMethod would not see their original values. 927 // checked, otherwise noSuchMethod would not see their original values.
921 // This step can be skipped in case we decide that formal parameters are 928 // This step can be skipped in case we decide that formal parameters are
922 // implicitly final, since garbage collecting the unmodified value is not 929 // implicitly final, since garbage collecting the unmodified value is not
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
971 (!is_optimizing() || may_reoptimize())) { 978 (!is_optimizing() || may_reoptimize())) {
972 const Register function_reg = R6; 979 const Register function_reg = R6;
973 new_pp = R13; 980 new_pp = R13;
974 // The pool pointer is not setup before entering the Dart frame. 981 // The pool pointer is not setup before entering the Dart frame.
975 // Temporarily setup pool pointer for this dart function. 982 // Temporarily setup pool pointer for this dart function.
976 __ LoadPoolPointer(new_pp); 983 __ LoadPoolPointer(new_pp);
977 984
978 // Load function object using the callee's pool pointer. 985 // Load function object using the callee's pool pointer.
979 __ LoadFunctionFromCalleePool(function_reg, function, new_pp); 986 __ LoadFunctionFromCalleePool(function_reg, function, new_pp);
980 987
988 // Patch point is after the eventually inlined function object.
989 entry_patch_pc_offset_ = assembler()->CodeSize();
990
981 __ LoadFieldFromOffset( 991 __ LoadFieldFromOffset(
982 R7, function_reg, Function::usage_counter_offset(), kWord); 992 R7, function_reg, Function::usage_counter_offset(), kWord);
983 // Reoptimization of an optimized function is triggered by counting in 993 // Reoptimization of an optimized function is triggered by counting in
984 // IC stubs, but not at the entry of the function. 994 // IC stubs, but not at the entry of the function.
985 if (!is_optimizing()) { 995 if (!is_optimizing()) {
986 __ add(R7, R7, Operand(1)); 996 __ add(R7, R7, Operand(1));
987 __ StoreFieldToOffset( 997 __ StoreFieldToOffset(
988 R7, function_reg, Function::usage_counter_offset(), kWord); 998 R7, function_reg, Function::usage_counter_offset(), kWord);
989 } 999 }
990 __ CompareImmediate(R7, GetOptimizationThreshold()); 1000 __ CompareImmediate(R7, GetOptimizationThreshold());
991 ASSERT(function_reg == R6); 1001 ASSERT(function_reg == R6);
992 Label dont_optimize; 1002 Label dont_optimize;
993 __ b(&dont_optimize, LT); 1003 __ b(&dont_optimize, LT);
994 __ Branch(*StubCode::OptimizeFunction_entry(), new_pp); 1004 __ Branch(*StubCode::OptimizeFunction_entry());
995 __ Bind(&dont_optimize); 1005 __ Bind(&dont_optimize);
1006 } else if (!flow_graph().IsCompiledForOsr()) {
1007 entry_patch_pc_offset_ = assembler()->CodeSize();
996 } 1008 }
997 __ Comment("Enter frame"); 1009 __ Comment("Enter frame");
998 if (flow_graph().IsCompiledForOsr()) { 1010 if (flow_graph().IsCompiledForOsr()) {
999 intptr_t extra_slots = StackSize() 1011 intptr_t extra_slots = StackSize()
1000 - flow_graph().num_stack_locals() 1012 - flow_graph().num_stack_locals()
1001 - flow_graph().num_copied_params(); 1013 - flow_graph().num_copied_params();
1002 ASSERT(extra_slots >= 0); 1014 ASSERT(extra_slots >= 0);
1003 __ EnterOsrFrame(extra_slots * kWordSize, new_pp); 1015 __ EnterOsrFrame(extra_slots * kWordSize, new_pp);
1004 } else { 1016 } else {
1005 ASSERT(StackSize() >= 0); 1017 ASSERT(StackSize() >= 0);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1047 Label correct_num_arguments, wrong_num_arguments; 1059 Label correct_num_arguments, wrong_num_arguments;
1048 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset()); 1060 __ LoadFieldFromOffset(R0, R4, ArgumentsDescriptor::count_offset());
1049 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params)); 1061 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params));
1050 __ b(&wrong_num_arguments, NE); 1062 __ b(&wrong_num_arguments, NE);
1051 __ LoadFieldFromOffset(R1, R4, 1063 __ LoadFieldFromOffset(R1, R4,
1052 ArgumentsDescriptor::positional_count_offset()); 1064 ArgumentsDescriptor::positional_count_offset());
1053 __ CompareRegisters(R0, R1); 1065 __ CompareRegisters(R0, R1);
1054 __ b(&correct_num_arguments, EQ); 1066 __ b(&correct_num_arguments, EQ);
1055 __ Bind(&wrong_num_arguments); 1067 __ Bind(&wrong_num_arguments);
1056 if (function.IsClosureFunction()) { 1068 if (function.IsClosureFunction()) {
1057 __ LeaveDartFrame(kKeepCalleePP); // Arguments are still on the stack. 1069 ASSERT(assembler()->constant_pool_allowed());
1070 __ LeaveDartFrame(); // The arguments are still on the stack.
1071 // Do not use caller's pool ptr in branch.
1072 ASSERT(!assembler()->constant_pool_allowed());
1058 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry()); 1073 __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry());
1074 __ set_constant_pool_allowed(true);
1059 // The noSuchMethod call may return to the caller, but not here. 1075 // The noSuchMethod call may return to the caller, but not here.
1060 } else { 1076 } else {
1061 __ Stop("Wrong number of arguments"); 1077 __ Stop("Wrong number of arguments");
1062 } 1078 }
1063 __ Bind(&correct_num_arguments); 1079 __ Bind(&correct_num_arguments);
1064 } 1080 }
1065 } else if (!flow_graph().IsCompiledForOsr()) { 1081 } else if (!flow_graph().IsCompiledForOsr()) {
1066 CopyParameters(); 1082 CopyParameters();
1067 } 1083 }
1068 1084
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1102 } 1118 }
1103 } 1119 }
1104 } 1120 }
1105 1121
1106 VisitBlocks(); 1122 VisitBlocks();
1107 1123
1108 __ brk(0); 1124 __ brk(0);
1109 ASSERT(assembler()->constant_pool_allowed()); 1125 ASSERT(assembler()->constant_pool_allowed());
1110 GenerateDeferredCode(); 1126 GenerateDeferredCode();
1111 1127
1128 // Emit function patching code. This will be swapped with the first 3
1129 // instructions at entry point.
1130 patch_code_pc_offset_ = assembler()->CodeSize();
1131 __ BranchPatchable(*StubCode::FixCallersTarget_entry());
1132
1112 if (is_optimizing()) { 1133 if (is_optimizing()) {
1113 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1134 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1114 __ BranchPatchable(*StubCode::DeoptimizeLazy_entry()); 1135 __ BranchPatchable(*StubCode::DeoptimizeLazy_entry());
1115 } 1136 }
1116 } 1137 }
1117 1138
1118 1139
1119 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1140 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1120 const StubEntry& stub_entry, 1141 const StubEntry& stub_entry,
1121 RawPcDescriptors::Kind kind, 1142 RawPcDescriptors::Kind kind,
(...skipping 689 matching lines...) Expand 10 before | Expand all | Expand 10 after
1811 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { 1832 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) {
1812 __ PopDouble(reg); 1833 __ PopDouble(reg);
1813 } 1834 }
1814 1835
1815 1836
1816 #undef __ 1837 #undef __
1817 1838
1818 } // namespace dart 1839 } // namespace dart
1819 1840
1820 #endif // defined TARGET_ARCH_ARM64 1841 #endif // defined TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698