Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(34)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm.cc

Issue 1247783002: Make array allocation stub shared between isolates. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 Assembler* assem = compiler->assembler(); 187 Assembler* assem = compiler->assembler();
188 #define __ assem-> 188 #define __ assem->
189 __ Comment("%s", Name()); 189 __ Comment("%s", Name());
190 __ Bind(entry_label()); 190 __ Bind(entry_label());
191 if (FLAG_trap_on_deoptimization) { 191 if (FLAG_trap_on_deoptimization) {
192 __ bkpt(0); 192 __ bkpt(0);
193 } 193 }
194 194
195 ASSERT(deopt_env() != NULL); 195 ASSERT(deopt_env() != NULL);
196 196
197 StubCode* stub_code = compiler->isolate()->stub_code();
198 // LR may be live. It will be clobbered by BranchLink, so cache it in IP. 197 // LR may be live. It will be clobbered by BranchLink, so cache it in IP.
199 // It will be restored at the top of the deoptimization stub, specifically in 198 // It will be restored at the top of the deoptimization stub, specifically in
200 // GenerateDeoptimizationSequence in stub_code_arm.cc. 199 // GenerateDeoptimizationSequence in stub_code_arm.cc.
201 __ mov(IP, Operand(LR)); 200 __ mov(IP, Operand(LR));
202 __ BranchLink(&stub_code->DeoptimizeLabel()); 201 __ BranchLink(&StubCode::DeoptimizeLabel());
koda 2015/07/23 00:58:01 True already before your change, but even more obv
Florian Schneider 2015/07/23 10:43:22 I would also prefer a const-reference. I'll see if
203 set_pc_offset(assem->CodeSize()); 202 set_pc_offset(assem->CodeSize());
204 #undef __ 203 #undef __
205 } 204 }
206 205
207 206
208 #define __ assembler()-> 207 #define __ assembler()->
209 208
210 209
211 // Fall through if bool_register contains null. 210 // Fall through if bool_register contains null.
212 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 211 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
(...skipping 16 matching lines...) Expand all
229 TypeTestStubKind test_kind, 228 TypeTestStubKind test_kind,
230 Register instance_reg, 229 Register instance_reg,
231 Register type_arguments_reg, 230 Register type_arguments_reg,
232 Register temp_reg, 231 Register temp_reg,
233 Label* is_instance_lbl, 232 Label* is_instance_lbl,
234 Label* is_not_instance_lbl) { 233 Label* is_not_instance_lbl) {
235 ASSERT(instance_reg == R0); 234 ASSERT(instance_reg == R0);
236 ASSERT(temp_reg == kNoRegister); // Unused on ARM. 235 ASSERT(temp_reg == kNoRegister); // Unused on ARM.
237 const SubtypeTestCache& type_test_cache = 236 const SubtypeTestCache& type_test_cache =
238 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); 237 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
239 StubCode* stub_code = isolate()->stub_code();
240 __ LoadUniqueObject(R2, type_test_cache); 238 __ LoadUniqueObject(R2, type_test_cache);
241 if (test_kind == kTestTypeOneArg) { 239 if (test_kind == kTestTypeOneArg) {
242 ASSERT(type_arguments_reg == kNoRegister); 240 ASSERT(type_arguments_reg == kNoRegister);
243 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null())); 241 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null()));
244 __ BranchLink(&stub_code->Subtype1TestCacheLabel()); 242 __ BranchLink(&StubCode::Subtype1TestCacheLabel());
245 } else if (test_kind == kTestTypeTwoArgs) { 243 } else if (test_kind == kTestTypeTwoArgs) {
246 ASSERT(type_arguments_reg == kNoRegister); 244 ASSERT(type_arguments_reg == kNoRegister);
247 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null())); 245 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null()));
248 __ BranchLink(&stub_code->Subtype2TestCacheLabel()); 246 __ BranchLink(&StubCode::Subtype2TestCacheLabel());
249 } else if (test_kind == kTestTypeThreeArgs) { 247 } else if (test_kind == kTestTypeThreeArgs) {
250 ASSERT(type_arguments_reg == R1); 248 ASSERT(type_arguments_reg == R1);
251 __ BranchLink(&stub_code->Subtype3TestCacheLabel()); 249 __ BranchLink(&StubCode::Subtype3TestCacheLabel());
252 } else { 250 } else {
253 UNREACHABLE(); 251 UNREACHABLE();
254 } 252 }
255 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False. 253 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False.
256 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl); 254 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl);
257 return type_test_cache.raw(); 255 return type_test_cache.raw();
258 } 256 }
259 257
260 258
261 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 259 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
(...skipping 654 matching lines...) Expand 10 before | Expand all | Expand 10 after
916 __ SmiUntag(R7); 914 __ SmiUntag(R7);
917 // Check that R9 equals R7, i.e. no named arguments passed. 915 // Check that R9 equals R7, i.e. no named arguments passed.
918 __ cmp(R9, Operand(R7)); 916 __ cmp(R9, Operand(R7));
919 __ b(&all_arguments_processed, EQ); 917 __ b(&all_arguments_processed, EQ);
920 } 918 }
921 } 919 }
922 920
923 __ Bind(&wrong_num_arguments); 921 __ Bind(&wrong_num_arguments);
924 if (function.IsClosureFunction()) { 922 if (function.IsClosureFunction()) {
925 __ LeaveDartFrame(); // The arguments are still on the stack. 923 __ LeaveDartFrame(); // The arguments are still on the stack.
926 __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel()); 924 __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
927 // The noSuchMethod call may return to the caller, but not here. 925 // The noSuchMethod call may return to the caller, but not here.
928 } else if (check_correct_named_args) { 926 } else if (check_correct_named_args) {
929 __ Stop("Wrong arguments"); 927 __ Stop("Wrong arguments");
930 } 928 }
931 929
932 __ Bind(&all_arguments_processed); 930 __ Bind(&all_arguments_processed);
933 // Nullify originally passed arguments only after they have been copied and 931 // Nullify originally passed arguments only after they have been copied and
934 // checked, otherwise noSuchMethod would not see their original values. 932 // checked, otherwise noSuchMethod would not see their original values.
935 // This step can be skipped in case we decide that formal parameters are 933 // This step can be skipped in case we decide that formal parameters are
936 // implicitly final, since garbage collecting the unmodified value is not 934 // implicitly final, since garbage collecting the unmodified value is not
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 __ Ret(); 974 __ Ret();
977 } 975 }
978 976
979 977
980 void FlowGraphCompiler::EmitFrameEntry() { 978 void FlowGraphCompiler::EmitFrameEntry() {
981 const Function& function = parsed_function().function(); 979 const Function& function = parsed_function().function();
982 if (CanOptimizeFunction() && 980 if (CanOptimizeFunction() &&
983 function.IsOptimizable() && 981 function.IsOptimizable() &&
984 (!is_optimizing() || may_reoptimize())) { 982 (!is_optimizing() || may_reoptimize())) {
985 const Register function_reg = R6; 983 const Register function_reg = R6;
986 StubCode* stub_code = isolate()->stub_code();
987 984
988 // The pool pointer is not setup before entering the Dart frame. 985 // The pool pointer is not setup before entering the Dart frame.
989 // Preserve PP of caller. 986 // Preserve PP of caller.
990 __ mov(R7, Operand(PP)); 987 __ mov(R7, Operand(PP));
991 // Temporarily setup pool pointer for this dart function. 988 // Temporarily setup pool pointer for this dart function.
992 __ LoadPoolPointer(); 989 __ LoadPoolPointer();
993 // Load function object from object pool. 990 // Load function object from object pool.
994 __ LoadObject(function_reg, function); // Uses PP. 991 __ LoadObject(function_reg, function); // Uses PP.
995 // Restore PP of caller. 992 // Restore PP of caller.
996 __ mov(PP, Operand(R7)); 993 __ mov(PP, Operand(R7));
997 994
998 // Patch point is after the eventually inlined function object. 995 // Patch point is after the eventually inlined function object.
999 entry_patch_pc_offset_ = assembler()->CodeSize(); 996 entry_patch_pc_offset_ = assembler()->CodeSize();
1000 997
1001 __ ldr(R7, FieldAddress(function_reg, 998 __ ldr(R7, FieldAddress(function_reg,
1002 Function::usage_counter_offset())); 999 Function::usage_counter_offset()));
1003 // Reoptimization of an optimized function is triggered by counting in 1000 // Reoptimization of an optimized function is triggered by counting in
1004 // IC stubs, but not at the entry of the function. 1001 // IC stubs, but not at the entry of the function.
1005 if (!is_optimizing()) { 1002 if (!is_optimizing()) {
1006 __ add(R7, R7, Operand(1)); 1003 __ add(R7, R7, Operand(1));
1007 __ str(R7, FieldAddress(function_reg, 1004 __ str(R7, FieldAddress(function_reg,
1008 Function::usage_counter_offset())); 1005 Function::usage_counter_offset()));
1009 } 1006 }
1010 __ CompareImmediate(R7, GetOptimizationThreshold()); 1007 __ CompareImmediate(R7, GetOptimizationThreshold());
1011 ASSERT(function_reg == R6); 1008 ASSERT(function_reg == R6);
1012 __ Branch(&stub_code->OptimizeFunctionLabel(), GE); 1009 __ Branch(&StubCode::OptimizeFunctionLabel(), GE);
1013 } else if (!flow_graph().IsCompiledForOsr()) { 1010 } else if (!flow_graph().IsCompiledForOsr()) {
1014 entry_patch_pc_offset_ = assembler()->CodeSize(); 1011 entry_patch_pc_offset_ = assembler()->CodeSize();
1015 } 1012 }
1016 __ Comment("Enter frame"); 1013 __ Comment("Enter frame");
1017 if (flow_graph().IsCompiledForOsr()) { 1014 if (flow_graph().IsCompiledForOsr()) {
1018 intptr_t extra_slots = StackSize() 1015 intptr_t extra_slots = StackSize()
1019 - flow_graph().num_stack_locals() 1016 - flow_graph().num_stack_locals()
1020 - flow_graph().num_copied_params(); 1017 - flow_graph().num_copied_params();
1021 ASSERT(extra_slots >= 0); 1018 ASSERT(extra_slots >= 0);
1022 __ EnterOsrFrame(extra_slots * kWordSize); 1019 __ EnterOsrFrame(extra_slots * kWordSize);
(...skipping 16 matching lines...) Expand all
1039 1036
1040 TryIntrinsify(); 1037 TryIntrinsify();
1041 1038
1042 EmitFrameEntry(); 1039 EmitFrameEntry();
1043 1040
1044 const Function& function = parsed_function().function(); 1041 const Function& function = parsed_function().function();
1045 1042
1046 const int num_fixed_params = function.num_fixed_parameters(); 1043 const int num_fixed_params = function.num_fixed_parameters();
1047 const int num_copied_params = parsed_function().num_copied_params(); 1044 const int num_copied_params = parsed_function().num_copied_params();
1048 const int num_locals = parsed_function().num_stack_locals(); 1045 const int num_locals = parsed_function().num_stack_locals();
1049 StubCode* stub_code = isolate()->stub_code();
1050 1046
1051 // We check the number of passed arguments when we have to copy them due to 1047 // We check the number of passed arguments when we have to copy them due to
1052 // the presence of optional parameters. 1048 // the presence of optional parameters.
1053 // No such checking code is generated if only fixed parameters are declared, 1049 // No such checking code is generated if only fixed parameters are declared,
1054 // unless we are in debug mode or unless we are compiling a closure. 1050 // unless we are in debug mode or unless we are compiling a closure.
1055 if (num_copied_params == 0) { 1051 if (num_copied_params == 0) {
1056 #ifdef DEBUG 1052 #ifdef DEBUG
1057 ASSERT(!parsed_function().function().HasOptionalParameters()); 1053 ASSERT(!parsed_function().function().HasOptionalParameters());
1058 const bool check_arguments = !flow_graph().IsCompiledForOsr(); 1054 const bool check_arguments = !flow_graph().IsCompiledForOsr();
1059 #else 1055 #else
1060 const bool check_arguments = 1056 const bool check_arguments =
1061 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr(); 1057 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr();
1062 #endif 1058 #endif
1063 if (check_arguments) { 1059 if (check_arguments) {
1064 __ Comment("Check argument count"); 1060 __ Comment("Check argument count");
1065 // Check that exactly num_fixed arguments are passed in. 1061 // Check that exactly num_fixed arguments are passed in.
1066 Label correct_num_arguments, wrong_num_arguments; 1062 Label correct_num_arguments, wrong_num_arguments;
1067 __ ldr(R0, FieldAddress(R4, ArgumentsDescriptor::count_offset())); 1063 __ ldr(R0, FieldAddress(R4, ArgumentsDescriptor::count_offset()));
1068 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params)); 1064 __ CompareImmediate(R0, Smi::RawValue(num_fixed_params));
1069 __ b(&wrong_num_arguments, NE); 1065 __ b(&wrong_num_arguments, NE);
1070 __ ldr(R1, FieldAddress(R4, 1066 __ ldr(R1, FieldAddress(R4,
1071 ArgumentsDescriptor::positional_count_offset())); 1067 ArgumentsDescriptor::positional_count_offset()));
1072 __ cmp(R0, Operand(R1)); 1068 __ cmp(R0, Operand(R1));
1073 __ b(&correct_num_arguments, EQ); 1069 __ b(&correct_num_arguments, EQ);
1074 __ Bind(&wrong_num_arguments); 1070 __ Bind(&wrong_num_arguments);
1075 if (function.IsClosureFunction()) { 1071 if (function.IsClosureFunction()) {
1076 __ LeaveDartFrame(); // The arguments are still on the stack. 1072 __ LeaveDartFrame(); // The arguments are still on the stack.
1077 __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel()); 1073 __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
1078 // The noSuchMethod call may return to the caller, but not here. 1074 // The noSuchMethod call may return to the caller, but not here.
1079 } else { 1075 } else {
1080 __ Stop("Wrong number of arguments"); 1076 __ Stop("Wrong number of arguments");
1081 } 1077 }
1082 __ Bind(&correct_num_arguments); 1078 __ Bind(&correct_num_arguments);
1083 } 1079 }
1084 } else if (!flow_graph().IsCompiledForOsr()) { 1080 } else if (!flow_graph().IsCompiledForOsr()) {
1085 CopyParameters(); 1081 CopyParameters();
1086 } 1082 }
1087 1083
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1122 } 1118 }
1123 } 1119 }
1124 1120
1125 VisitBlocks(); 1121 VisitBlocks();
1126 1122
1127 __ bkpt(0); 1123 __ bkpt(0);
1128 GenerateDeferredCode(); 1124 GenerateDeferredCode();
1129 // Emit function patching code. This will be swapped with the first 3 1125 // Emit function patching code. This will be swapped with the first 3
1130 // instructions at entry point. 1126 // instructions at entry point.
1131 patch_code_pc_offset_ = assembler()->CodeSize(); 1127 patch_code_pc_offset_ = assembler()->CodeSize();
1132 __ BranchPatchable(&stub_code->FixCallersTargetLabel()); 1128 __ BranchPatchable(&StubCode::FixCallersTargetLabel());
1133 1129
1134 if (is_optimizing()) { 1130 if (is_optimizing()) {
1135 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1131 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1136 __ Branch(&stub_code->DeoptimizeLazyLabel()); 1132 __ Branch(&StubCode::DeoptimizeLazyLabel());
1137 } 1133 }
1138 } 1134 }
1139 1135
1140 1136
1141 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1137 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1142 const ExternalLabel* label, 1138 const ExternalLabel* label,
1143 RawPcDescriptors::Kind kind, 1139 RawPcDescriptors::Kind kind,
1144 LocationSummary* locs) { 1140 LocationSummary* locs) {
1145 __ BranchLinkPatchable(label); 1141 __ BranchLinkPatchable(label);
1146 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); 1142 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1284 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1289 const MegamorphicCache& cache = 1285 const MegamorphicCache& cache =
1290 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor)); 1286 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor));
1291 const Register receiverR = R0; 1287 const Register receiverR = R0;
1292 const Register cacheR = R1; 1288 const Register cacheR = R1;
1293 const Register targetR = R1; 1289 const Register targetR = R1;
1294 __ LoadFromOffset(kWord, receiverR, SP, (argument_count - 1) * kWordSize); 1290 __ LoadFromOffset(kWord, receiverR, SP, (argument_count - 1) * kWordSize);
1295 __ LoadObject(cacheR, cache); 1291 __ LoadObject(cacheR, cache);
1296 1292
1297 if (FLAG_use_megamorphic_stub) { 1293 if (FLAG_use_megamorphic_stub) {
1298 StubCode* stub_code = isolate()->stub_code(); 1294 __ BranchLink(&StubCode::MegamorphicLookupLabel());
1299 __ BranchLink(&stub_code->MegamorphicLookupLabel());
1300 } else { 1295 } else {
1301 StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR); 1296 StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR);
1302 } 1297 }
1303 __ LoadObject(R5, ic_data); 1298 __ LoadObject(R5, ic_data);
1304 __ LoadObject(R4, arguments_descriptor); 1299 __ LoadObject(R4, arguments_descriptor);
1305 __ blx(targetR); 1300 __ blx(targetR);
1306 AddCurrentDescriptor(RawPcDescriptors::kOther, 1301 AddCurrentDescriptor(RawPcDescriptors::kOther,
1307 Isolate::kNoDeoptId, token_pos); 1302 Isolate::kNoDeoptId, token_pos);
1308 RecordSafepoint(locs); 1303 RecordSafepoint(locs);
1309 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); 1304 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
1310 if (is_optimizing()) { 1305 if (is_optimizing()) {
1311 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1306 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1312 } else { 1307 } else {
1313 // Add deoptimization continuation point after the call and before the 1308 // Add deoptimization continuation point after the call and before the
1314 // arguments are removed. 1309 // arguments are removed.
1315 AddCurrentDescriptor(RawPcDescriptors::kDeopt, 1310 AddCurrentDescriptor(RawPcDescriptors::kDeopt,
1316 deopt_id_after, token_pos); 1311 deopt_id_after, token_pos);
1317 } 1312 }
1318 __ Drop(argument_count); 1313 __ Drop(argument_count);
1319 } 1314 }
1320 1315
1321 1316
1322 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1317 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1323 intptr_t argument_count, 1318 intptr_t argument_count,
1324 intptr_t deopt_id, 1319 intptr_t deopt_id,
1325 intptr_t token_pos, 1320 intptr_t token_pos,
1326 LocationSummary* locs, 1321 LocationSummary* locs,
1327 const ICData& ic_data) { 1322 const ICData& ic_data) {
1328 StubCode* stub_code = isolate()->stub_code();
1329 const uword label_address = 1323 const uword label_address =
1330 stub_code->UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested()); 1324 StubCode::UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested());
1331 ExternalLabel target_label(label_address); 1325 ExternalLabel target_label(label_address);
1332 __ LoadObject(R5, ic_data); 1326 __ LoadObject(R5, ic_data);
1333 GenerateDartCall(deopt_id, 1327 GenerateDartCall(deopt_id,
1334 token_pos, 1328 token_pos,
1335 &target_label, 1329 &target_label,
1336 RawPcDescriptors::kUnoptStaticCall, 1330 RawPcDescriptors::kUnoptStaticCall,
1337 locs); 1331 locs);
1338 __ Drop(argument_count); 1332 __ Drop(argument_count);
1339 } 1333 }
1340 1334
1341 1335
1342 void FlowGraphCompiler::EmitOptimizedStaticCall( 1336 void FlowGraphCompiler::EmitOptimizedStaticCall(
1343 const Function& function, 1337 const Function& function,
1344 const Array& arguments_descriptor, 1338 const Array& arguments_descriptor,
1345 intptr_t argument_count, 1339 intptr_t argument_count,
1346 intptr_t deopt_id, 1340 intptr_t deopt_id,
1347 intptr_t token_pos, 1341 intptr_t token_pos,
1348 LocationSummary* locs) { 1342 LocationSummary* locs) {
1349 StubCode* stub_code = isolate()->stub_code();
1350 __ LoadObject(R4, arguments_descriptor); 1343 __ LoadObject(R4, arguments_descriptor);
1351 // Do not use the code from the function, but let the code be patched so that 1344 // Do not use the code from the function, but let the code be patched so that
1352 // we can record the outgoing edges to other code. 1345 // we can record the outgoing edges to other code.
1353 GenerateDartCall(deopt_id, 1346 GenerateDartCall(deopt_id,
1354 token_pos, 1347 token_pos,
1355 &stub_code->CallStaticFunctionLabel(), 1348 &StubCode::CallStaticFunctionLabel(),
1356 RawPcDescriptors::kOther, 1349 RawPcDescriptors::kOther,
1357 locs); 1350 locs);
1358 AddStaticCallTarget(function); 1351 AddStaticCallTarget(function);
1359 __ Drop(argument_count); 1352 __ Drop(argument_count);
1360 } 1353 }
1361 1354
1362 1355
1363 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1356 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1364 Register reg, 1357 Register reg,
1365 const Object& obj, 1358 const Object& obj,
1366 bool needs_number_check, 1359 bool needs_number_check,
1367 intptr_t token_pos) { 1360 intptr_t token_pos) {
1368 if (needs_number_check) { 1361 if (needs_number_check) {
1369 StubCode* stub_code = isolate()->stub_code();
1370 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()); 1362 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
1371 __ Push(reg); 1363 __ Push(reg);
1372 __ PushObject(obj); 1364 __ PushObject(obj);
1373 if (is_optimizing()) { 1365 if (is_optimizing()) {
1374 __ BranchLinkPatchable( 1366 __ BranchLinkPatchable(
1375 &stub_code->OptimizedIdenticalWithNumberCheckLabel()); 1367 &StubCode::OptimizedIdenticalWithNumberCheckLabel());
1376 } else { 1368 } else {
1377 __ BranchLinkPatchable( 1369 __ BranchLinkPatchable(
1378 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel()); 1370 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1379 } 1371 }
1380 if (token_pos != Scanner::kNoSourcePos) { 1372 if (token_pos != Scanner::kNoSourcePos) {
1381 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1373 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1382 Isolate::kNoDeoptId, 1374 Isolate::kNoDeoptId,
1383 token_pos); 1375 token_pos);
1384 } 1376 }
1385 // Stub returns result in flags (result of a cmp, we need Z computed). 1377 // Stub returns result in flags (result of a cmp, we need Z computed).
1386 __ Drop(1); // Discard constant. 1378 __ Drop(1); // Discard constant.
1387 __ Pop(reg); // Restore 'reg'. 1379 __ Pop(reg); // Restore 'reg'.
1388 } else { 1380 } else {
1389 __ CompareObject(reg, obj); 1381 __ CompareObject(reg, obj);
1390 } 1382 }
1391 return EQ; 1383 return EQ;
1392 } 1384 }
1393 1385
1394 1386
1395 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1387 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
1396 Register right, 1388 Register right,
1397 bool needs_number_check, 1389 bool needs_number_check,
1398 intptr_t token_pos) { 1390 intptr_t token_pos) {
1399 if (needs_number_check) { 1391 if (needs_number_check) {
1400 StubCode* stub_code = isolate()->stub_code();
1401 __ Push(left); 1392 __ Push(left);
1402 __ Push(right); 1393 __ Push(right);
1403 if (is_optimizing()) { 1394 if (is_optimizing()) {
1404 __ BranchLinkPatchable( 1395 __ BranchLinkPatchable(
1405 &stub_code->OptimizedIdenticalWithNumberCheckLabel()); 1396 &StubCode::OptimizedIdenticalWithNumberCheckLabel());
1406 } else { 1397 } else {
1407 __ BranchLinkPatchable( 1398 __ BranchLinkPatchable(
1408 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel()); 1399 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1409 } 1400 }
1410 if (token_pos != Scanner::kNoSourcePos) { 1401 if (token_pos != Scanner::kNoSourcePos) {
1411 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1402 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1412 Isolate::kNoDeoptId, 1403 Isolate::kNoDeoptId,
1413 token_pos); 1404 token_pos);
1414 } 1405 }
1415 // Stub returns result in flags (result of a cmp, we need Z computed). 1406 // Stub returns result in flags (result of a cmp, we need Z computed).
1416 __ Pop(right); 1407 __ Pop(right);
1417 __ Pop(left); 1408 __ Pop(left);
1418 } else { 1409 } else {
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1517 Label* failed, 1508 Label* failed,
1518 Label* match_found, 1509 Label* match_found,
1519 intptr_t deopt_id, 1510 intptr_t deopt_id,
1520 intptr_t token_index, 1511 intptr_t token_index,
1521 LocationSummary* locs) { 1512 LocationSummary* locs) {
1522 ASSERT(is_optimizing()); 1513 ASSERT(is_optimizing());
1523 __ Comment("EmitTestAndCall"); 1514 __ Comment("EmitTestAndCall");
1524 const Array& arguments_descriptor = 1515 const Array& arguments_descriptor =
1525 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 1516 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
1526 argument_names)); 1517 argument_names));
1527 StubCode* stub_code = isolate()->stub_code();
1528 1518
1529 // Load receiver into R0. 1519 // Load receiver into R0.
1530 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize); 1520 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize);
1531 __ LoadObject(R4, arguments_descriptor); 1521 __ LoadObject(R4, arguments_descriptor);
1532 1522
1533 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid; 1523 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
1534 const intptr_t kNumChecks = ic_data.NumberOfChecks(); 1524 const intptr_t kNumChecks = ic_data.NumberOfChecks();
1535 1525
1536 ASSERT(!ic_data.IsNull() && (kNumChecks > 0)); 1526 ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
1537 1527
1538 Label after_smi_test; 1528 Label after_smi_test;
1539 __ tst(R0, Operand(kSmiTagMask)); 1529 __ tst(R0, Operand(kSmiTagMask));
1540 if (kFirstCheckIsSmi) { 1530 if (kFirstCheckIsSmi) {
1541 // Jump if receiver is not Smi. 1531 // Jump if receiver is not Smi.
1542 if (kNumChecks == 1) { 1532 if (kNumChecks == 1) {
1543 __ b(failed, NE); 1533 __ b(failed, NE);
1544 } else { 1534 } else {
1545 __ b(&after_smi_test, NE); 1535 __ b(&after_smi_test, NE);
1546 } 1536 }
1547 // Do not use the code from the function, but let the code be patched so 1537 // Do not use the code from the function, but let the code be patched so
1548 // that we can record the outgoing edges to other code. 1538 // that we can record the outgoing edges to other code.
1549 GenerateDartCall(deopt_id, 1539 GenerateDartCall(deopt_id,
1550 token_index, 1540 token_index,
1551 &stub_code->CallStaticFunctionLabel(), 1541 &StubCode::CallStaticFunctionLabel(),
1552 RawPcDescriptors::kOther, 1542 RawPcDescriptors::kOther,
1553 locs); 1543 locs);
1554 const Function& function = Function::Handle(ic_data.GetTargetAt(0)); 1544 const Function& function = Function::Handle(ic_data.GetTargetAt(0));
1555 AddStaticCallTarget(function); 1545 AddStaticCallTarget(function);
1556 __ Drop(argument_count); 1546 __ Drop(argument_count);
1557 if (kNumChecks > 1) { 1547 if (kNumChecks > 1) {
1558 __ b(match_found); 1548 __ b(match_found);
1559 } 1549 }
1560 } else { 1550 } else {
1561 // Receiver is Smi, but Smi is not a valid class therefore fail. 1551 // Receiver is Smi, but Smi is not a valid class therefore fail.
(...skipping 20 matching lines...) Expand all
1582 __ CompareImmediate(R2, sorted[i].cid); 1572 __ CompareImmediate(R2, sorted[i].cid);
1583 if (kIsLastCheck) { 1573 if (kIsLastCheck) {
1584 __ b(failed, NE); 1574 __ b(failed, NE);
1585 } else { 1575 } else {
1586 __ b(&next_test, NE); 1576 __ b(&next_test, NE);
1587 } 1577 }
1588 // Do not use the code from the function, but let the code be patched so 1578 // Do not use the code from the function, but let the code be patched so
1589 // that we can record the outgoing edges to other code. 1579 // that we can record the outgoing edges to other code.
1590 GenerateDartCall(deopt_id, 1580 GenerateDartCall(deopt_id,
1591 token_index, 1581 token_index,
1592 &stub_code->CallStaticFunctionLabel(), 1582 &StubCode::CallStaticFunctionLabel(),
1593 RawPcDescriptors::kOther, 1583 RawPcDescriptors::kOther,
1594 locs); 1584 locs);
1595 const Function& function = *sorted[i].target; 1585 const Function& function = *sorted[i].target;
1596 AddStaticCallTarget(function); 1586 AddStaticCallTarget(function);
1597 __ Drop(argument_count); 1587 __ Drop(argument_count);
1598 if (!kIsLastCheck) { 1588 if (!kIsLastCheck) {
1599 __ b(match_found); 1589 __ b(match_found);
1600 } 1590 }
1601 __ Bind(&next_test); 1591 __ Bind(&next_test);
1602 } 1592 }
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
1899 DRegister dreg = EvenDRegisterOf(reg); 1889 DRegister dreg = EvenDRegisterOf(reg);
1900 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); 1890 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex));
1901 } 1891 }
1902 1892
1903 1893
1904 #undef __ 1894 #undef __
1905 1895
1906 } // namespace dart 1896 } // namespace dart
1907 1897
1908 #endif // defined TARGET_ARCH_ARM 1898 #endif // defined TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698