Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(477)

Side by Side Diff: runtime/vm/flow_graph_compiler_mips.cc

Issue 1247783002: Make array allocation stub shared between isolates. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS.
6 #if defined(TARGET_ARCH_MIPS) 6 #if defined(TARGET_ARCH_MIPS)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
182 Assembler* assem = compiler->assembler(); 182 Assembler* assem = compiler->assembler();
183 #define __ assem-> 183 #define __ assem->
184 __ Comment("%s", Name()); 184 __ Comment("%s", Name());
185 __ Bind(entry_label()); 185 __ Bind(entry_label());
186 if (FLAG_trap_on_deoptimization) { 186 if (FLAG_trap_on_deoptimization) {
187 __ break_(0); 187 __ break_(0);
188 } 188 }
189 189
190 ASSERT(deopt_env() != NULL); 190 ASSERT(deopt_env() != NULL);
191 191
192 StubCode* stub_code = compiler->isolate()->stub_code(); 192 __ BranchLink(&StubCode::DeoptimizeLabel());
193 __ BranchLink(&stub_code->DeoptimizeLabel());
194 set_pc_offset(assem->CodeSize()); 193 set_pc_offset(assem->CodeSize());
195 #undef __ 194 #undef __
196 } 195 }
197 196
198 197
199 #define __ assembler()-> 198 #define __ assembler()->
200 199
201 200
202 // Fall through if bool_register contains null. 201 // Fall through if bool_register contains null.
203 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 202 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
(...skipping 15 matching lines...) Expand all
219 Register instance_reg, 218 Register instance_reg,
220 Register type_arguments_reg, 219 Register type_arguments_reg,
221 Register temp_reg, 220 Register temp_reg,
222 Label* is_instance_lbl, 221 Label* is_instance_lbl,
223 Label* is_not_instance_lbl) { 222 Label* is_not_instance_lbl) {
224 __ Comment("CallSubtypeTestStub"); 223 __ Comment("CallSubtypeTestStub");
225 ASSERT(instance_reg == A0); 224 ASSERT(instance_reg == A0);
226 ASSERT(temp_reg == kNoRegister); // Unused on MIPS. 225 ASSERT(temp_reg == kNoRegister); // Unused on MIPS.
227 const SubtypeTestCache& type_test_cache = 226 const SubtypeTestCache& type_test_cache =
228 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); 227 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
229 StubCode* stub_code = isolate()->stub_code();
230 __ LoadUniqueObject(A2, type_test_cache); 228 __ LoadUniqueObject(A2, type_test_cache);
231 if (test_kind == kTestTypeOneArg) { 229 if (test_kind == kTestTypeOneArg) {
232 ASSERT(type_arguments_reg == kNoRegister); 230 ASSERT(type_arguments_reg == kNoRegister);
233 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null())); 231 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null()));
234 __ BranchLink(&stub_code->Subtype1TestCacheLabel()); 232 __ BranchLink(&StubCode::Subtype1TestCacheLabel());
235 } else if (test_kind == kTestTypeTwoArgs) { 233 } else if (test_kind == kTestTypeTwoArgs) {
236 ASSERT(type_arguments_reg == kNoRegister); 234 ASSERT(type_arguments_reg == kNoRegister);
237 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null())); 235 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null()));
238 __ BranchLink(&stub_code->Subtype2TestCacheLabel()); 236 __ BranchLink(&StubCode::Subtype2TestCacheLabel());
239 } else if (test_kind == kTestTypeThreeArgs) { 237 } else if (test_kind == kTestTypeThreeArgs) {
240 ASSERT(type_arguments_reg == A1); 238 ASSERT(type_arguments_reg == A1);
241 __ BranchLink(&stub_code->Subtype3TestCacheLabel()); 239 __ BranchLink(&StubCode::Subtype3TestCacheLabel());
242 } else { 240 } else {
243 UNREACHABLE(); 241 UNREACHABLE();
244 } 242 }
245 // Result is in V0: null -> not found, otherwise Bool::True or Bool::False. 243 // Result is in V0: null -> not found, otherwise Bool::True or Bool::False.
246 GenerateBoolToJump(V0, is_instance_lbl, is_not_instance_lbl); 244 GenerateBoolToJump(V0, is_instance_lbl, is_not_instance_lbl);
247 return type_test_cache.raw(); 245 return type_test_cache.raw();
248 } 246 }
249 247
250 248
251 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 249 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
(...skipping 671 matching lines...) Expand 10 before | Expand all | Expand 10 after
923 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset())); 921 __ lw(T1, FieldAddress(S4, ArgumentsDescriptor::count_offset()));
924 __ SmiUntag(T1); 922 __ SmiUntag(T1);
925 // Check that T2 equals T1, i.e. no named arguments passed. 923 // Check that T2 equals T1, i.e. no named arguments passed.
926 __ beq(T2, T1, &all_arguments_processed); 924 __ beq(T2, T1, &all_arguments_processed);
927 } 925 }
928 } 926 }
929 927
930 __ Bind(&wrong_num_arguments); 928 __ Bind(&wrong_num_arguments);
931 if (function.IsClosureFunction()) { 929 if (function.IsClosureFunction()) {
932 __ LeaveDartFrame(); // The arguments are still on the stack. 930 __ LeaveDartFrame(); // The arguments are still on the stack.
933 __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel()); 931 __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
934 // The noSuchMethod call may return to the caller, but not here. 932 // The noSuchMethod call may return to the caller, but not here.
935 } else if (check_correct_named_args) { 933 } else if (check_correct_named_args) {
936 __ Stop("Wrong arguments"); 934 __ Stop("Wrong arguments");
937 } 935 }
938 936
939 __ Bind(&all_arguments_processed); 937 __ Bind(&all_arguments_processed);
940 // Nullify originally passed arguments only after they have been copied and 938 // Nullify originally passed arguments only after they have been copied and
941 // checked, otherwise noSuchMethod would not see their original values. 939 // checked, otherwise noSuchMethod would not see their original values.
942 // This step can be skipped in case we decide that formal parameters are 940 // This step can be skipped in case we decide that formal parameters are
943 // implicitly final, since garbage collecting the unmodified value is not 941 // implicitly final, since garbage collecting the unmodified value is not
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
986 __ Ret(); 984 __ Ret();
987 } 985 }
988 986
989 987
990 void FlowGraphCompiler::EmitFrameEntry() { 988 void FlowGraphCompiler::EmitFrameEntry() {
991 const Function& function = parsed_function().function(); 989 const Function& function = parsed_function().function();
992 if (CanOptimizeFunction() && 990 if (CanOptimizeFunction() &&
993 function.IsOptimizable() && 991 function.IsOptimizable() &&
994 (!is_optimizing() || may_reoptimize())) { 992 (!is_optimizing() || may_reoptimize())) {
995 const Register function_reg = T0; 993 const Register function_reg = T0;
996 StubCode* stub_code = isolate()->stub_code();
997 994
998 __ GetNextPC(T2, TMP); 995 __ GetNextPC(T2, TMP);
999 996
1000 // Calculate offset of pool pointer from the PC. 997 // Calculate offset of pool pointer from the PC.
1001 const intptr_t object_pool_pc_dist = 998 const intptr_t object_pool_pc_dist =
1002 Instructions::HeaderSize() - Instructions::object_pool_offset() + 999 Instructions::HeaderSize() - Instructions::object_pool_offset() +
1003 assembler()->CodeSize() - 1 * Instr::kInstrSize; 1000 assembler()->CodeSize() - 1 * Instr::kInstrSize;
1004 1001
1005 // Preserve PP of caller. 1002 // Preserve PP of caller.
1006 __ mov(T1, PP); 1003 __ mov(T1, PP);
(...skipping 14 matching lines...) Expand all
1021 __ addiu(T1, T1, Immediate(1)); 1018 __ addiu(T1, T1, Immediate(1));
1022 __ sw(T1, FieldAddress(function_reg, Function::usage_counter_offset())); 1019 __ sw(T1, FieldAddress(function_reg, Function::usage_counter_offset()));
1023 } 1020 }
1024 1021
1025 // Skip Branch if T1 is less than the threshold. 1022 // Skip Branch if T1 is less than the threshold.
1026 Label dont_branch; 1023 Label dont_branch;
1027 __ BranchSignedLess( 1024 __ BranchSignedLess(
1028 T1, Immediate(GetOptimizationThreshold()), &dont_branch); 1025 T1, Immediate(GetOptimizationThreshold()), &dont_branch);
1029 1026
1030 ASSERT(function_reg == T0); 1027 ASSERT(function_reg == T0);
1031 __ Branch(&stub_code->OptimizeFunctionLabel()); 1028 __ Branch(&StubCode::OptimizeFunctionLabel());
1032 1029
1033 __ Bind(&dont_branch); 1030 __ Bind(&dont_branch);
1034 1031
1035 } else if (!flow_graph().IsCompiledForOsr()) { 1032 } else if (!flow_graph().IsCompiledForOsr()) {
1036 entry_patch_pc_offset_ = assembler()->CodeSize(); 1033 entry_patch_pc_offset_ = assembler()->CodeSize();
1037 } 1034 }
1038 __ Comment("Enter frame"); 1035 __ Comment("Enter frame");
1039 if (flow_graph().IsCompiledForOsr()) { 1036 if (flow_graph().IsCompiledForOsr()) {
1040 intptr_t extra_slots = StackSize() 1037 intptr_t extra_slots = StackSize()
1041 - flow_graph().num_stack_locals() 1038 - flow_graph().num_stack_locals()
(...skipping 19 matching lines...) Expand all
1061 1058
1062 TryIntrinsify(); 1059 TryIntrinsify();
1063 1060
1064 EmitFrameEntry(); 1061 EmitFrameEntry();
1065 1062
1066 const Function& function = parsed_function().function(); 1063 const Function& function = parsed_function().function();
1067 1064
1068 const int num_fixed_params = function.num_fixed_parameters(); 1065 const int num_fixed_params = function.num_fixed_parameters();
1069 const int num_copied_params = parsed_function().num_copied_params(); 1066 const int num_copied_params = parsed_function().num_copied_params();
1070 const int num_locals = parsed_function().num_stack_locals(); 1067 const int num_locals = parsed_function().num_stack_locals();
1071 StubCode* stub_code = isolate()->stub_code();
1072 1068
1073 // We check the number of passed arguments when we have to copy them due to 1069 // We check the number of passed arguments when we have to copy them due to
1074 // the presence of optional parameters. 1070 // the presence of optional parameters.
1075 // No such checking code is generated if only fixed parameters are declared, 1071 // No such checking code is generated if only fixed parameters are declared,
1076 // unless we are in debug mode or unless we are compiling a closure. 1072 // unless we are in debug mode or unless we are compiling a closure.
1077 if (num_copied_params == 0) { 1073 if (num_copied_params == 0) {
1078 #ifdef DEBUG 1074 #ifdef DEBUG
1079 ASSERT(!parsed_function().function().HasOptionalParameters()); 1075 ASSERT(!parsed_function().function().HasOptionalParameters());
1080 const bool check_arguments = !flow_graph().IsCompiledForOsr(); 1076 const bool check_arguments = !flow_graph().IsCompiledForOsr();
1081 #else 1077 #else
1082 const bool check_arguments = 1078 const bool check_arguments =
1083 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr(); 1079 function.IsClosureFunction() && !flow_graph().IsCompiledForOsr();
1084 #endif 1080 #endif
1085 if (check_arguments) { 1081 if (check_arguments) {
1086 __ Comment("Check argument count"); 1082 __ Comment("Check argument count");
1087 // Check that exactly num_fixed arguments are passed in. 1083 // Check that exactly num_fixed arguments are passed in.
1088 Label correct_num_arguments, wrong_num_arguments; 1084 Label correct_num_arguments, wrong_num_arguments;
1089 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset())); 1085 __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset()));
1090 __ BranchNotEqual(T0, Immediate(Smi::RawValue(num_fixed_params)), 1086 __ BranchNotEqual(T0, Immediate(Smi::RawValue(num_fixed_params)),
1091 &wrong_num_arguments); 1087 &wrong_num_arguments);
1092 1088
1093 __ lw(T1, FieldAddress(S4, 1089 __ lw(T1, FieldAddress(S4,
1094 ArgumentsDescriptor::positional_count_offset())); 1090 ArgumentsDescriptor::positional_count_offset()));
1095 __ beq(T0, T1, &correct_num_arguments); 1091 __ beq(T0, T1, &correct_num_arguments);
1096 __ Bind(&wrong_num_arguments); 1092 __ Bind(&wrong_num_arguments);
1097 if (function.IsClosureFunction()) { 1093 if (function.IsClosureFunction()) {
1098 __ LeaveDartFrame(); // The arguments are still on the stack. 1094 __ LeaveDartFrame(); // The arguments are still on the stack.
1099 __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel()); 1095 __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
1100 // The noSuchMethod call may return to the caller, but not here. 1096 // The noSuchMethod call may return to the caller, but not here.
1101 } else { 1097 } else {
1102 __ Stop("Wrong number of arguments"); 1098 __ Stop("Wrong number of arguments");
1103 } 1099 }
1104 __ Bind(&correct_num_arguments); 1100 __ Bind(&correct_num_arguments);
1105 } 1101 }
1106 } else if (!flow_graph().IsCompiledForOsr()) { 1102 } else if (!flow_graph().IsCompiledForOsr()) {
1107 CopyParameters(); 1103 CopyParameters();
1108 } 1104 }
1109 1105
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1144 } 1140 }
1145 } 1141 }
1146 1142
1147 VisitBlocks(); 1143 VisitBlocks();
1148 1144
1149 __ break_(0); 1145 __ break_(0);
1150 GenerateDeferredCode(); 1146 GenerateDeferredCode();
1151 // Emit function patching code. This will be swapped with the first 5 bytes 1147 // Emit function patching code. This will be swapped with the first 5 bytes
1152 // at entry point. 1148 // at entry point.
1153 patch_code_pc_offset_ = assembler()->CodeSize(); 1149 patch_code_pc_offset_ = assembler()->CodeSize();
1154 __ BranchPatchable(&stub_code->FixCallersTargetLabel()); 1150 __ BranchPatchable(&StubCode::FixCallersTargetLabel());
1155 1151
1156 if (is_optimizing()) { 1152 if (is_optimizing()) {
1157 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1153 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1158 __ Branch(&stub_code->DeoptimizeLazyLabel()); 1154 __ Branch(&StubCode::DeoptimizeLazyLabel());
1159 } 1155 }
1160 } 1156 }
1161 1157
1162 1158
1163 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1159 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1164 const ExternalLabel* label, 1160 const ExternalLabel* label,
1165 RawPcDescriptors::Kind kind, 1161 RawPcDescriptors::Kind kind,
1166 LocationSummary* locs) { 1162 LocationSummary* locs) {
1167 __ BranchLinkPatchable(label); 1163 __ BranchLinkPatchable(label);
1168 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); 1164 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1293 const MegamorphicCache& cache = 1289 const MegamorphicCache& cache =
1294 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor)); 1290 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor));
1295 __ Comment("MegamorphicInstanceCall"); 1291 __ Comment("MegamorphicInstanceCall");
1296 const Register receiverR = T0; 1292 const Register receiverR = T0;
1297 const Register cacheR = T1; 1293 const Register cacheR = T1;
1298 const Register targetR = T1; 1294 const Register targetR = T1;
1299 __ lw(receiverR, Address(SP, (argument_count - 1) * kWordSize)); 1295 __ lw(receiverR, Address(SP, (argument_count - 1) * kWordSize));
1300 __ LoadObject(cacheR, cache); 1296 __ LoadObject(cacheR, cache);
1301 1297
1302 if (FLAG_use_megamorphic_stub) { 1298 if (FLAG_use_megamorphic_stub) {
1303 StubCode* stub_code = isolate()->stub_code(); 1299 __ BranchLink(&StubCode::MegamorphicLookupLabel());
1304 __ BranchLink(&stub_code->MegamorphicLookupLabel());
1305 } else { 1300 } else {
1306 StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR); 1301 StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR);
1307 } 1302 }
1308 __ LoadObject(S5, ic_data); 1303 __ LoadObject(S5, ic_data);
1309 __ LoadObject(S4, arguments_descriptor); 1304 __ LoadObject(S4, arguments_descriptor);
1310 __ jalr(targetR); 1305 __ jalr(targetR);
1311 AddCurrentDescriptor(RawPcDescriptors::kOther, 1306 AddCurrentDescriptor(RawPcDescriptors::kOther,
1312 Isolate::kNoDeoptId, token_pos); 1307 Isolate::kNoDeoptId, token_pos);
1313 RecordSafepoint(locs); 1308 RecordSafepoint(locs);
1314 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id); 1309 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
1315 if (is_optimizing()) { 1310 if (is_optimizing()) {
1316 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1311 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1317 } else { 1312 } else {
1318 // Add deoptimization continuation point after the call and before the 1313 // Add deoptimization continuation point after the call and before the
1319 // arguments are removed. 1314 // arguments are removed.
1320 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1315 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1321 } 1316 }
1322 __ Drop(argument_count); 1317 __ Drop(argument_count);
1323 } 1318 }
1324 1319
1325 1320
1326 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1321 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1327 intptr_t argument_count, 1322 intptr_t argument_count,
1328 intptr_t deopt_id, 1323 intptr_t deopt_id,
1329 intptr_t token_pos, 1324 intptr_t token_pos,
1330 LocationSummary* locs, 1325 LocationSummary* locs,
1331 const ICData& ic_data) { 1326 const ICData& ic_data) {
1332 StubCode* stub_code = isolate()->stub_code();
1333 const uword label_address = 1327 const uword label_address =
1334 stub_code->UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested()); 1328 StubCode::UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested());
1335 ExternalLabel target_label(label_address); 1329 ExternalLabel target_label(label_address);
1336 __ LoadObject(S5, ic_data); 1330 __ LoadObject(S5, ic_data);
1337 GenerateDartCall(deopt_id, 1331 GenerateDartCall(deopt_id,
1338 token_pos, 1332 token_pos,
1339 &target_label, 1333 &target_label,
1340 RawPcDescriptors::kUnoptStaticCall, 1334 RawPcDescriptors::kUnoptStaticCall,
1341 locs); 1335 locs);
1342 __ Drop(argument_count); 1336 __ Drop(argument_count);
1343 } 1337 }
1344 1338
1345 1339
1346 void FlowGraphCompiler::EmitOptimizedStaticCall( 1340 void FlowGraphCompiler::EmitOptimizedStaticCall(
1347 const Function& function, 1341 const Function& function,
1348 const Array& arguments_descriptor, 1342 const Array& arguments_descriptor,
1349 intptr_t argument_count, 1343 intptr_t argument_count,
1350 intptr_t deopt_id, 1344 intptr_t deopt_id,
1351 intptr_t token_pos, 1345 intptr_t token_pos,
1352 LocationSummary* locs) { 1346 LocationSummary* locs) {
1353 StubCode* stub_code = isolate()->stub_code();
1354 __ Comment("StaticCall"); 1347 __ Comment("StaticCall");
1355 __ LoadObject(S4, arguments_descriptor); 1348 __ LoadObject(S4, arguments_descriptor);
1356 // Do not use the code from the function, but let the code be patched so that 1349 // Do not use the code from the function, but let the code be patched so that
1357 // we can record the outgoing edges to other code. 1350 // we can record the outgoing edges to other code.
1358 GenerateDartCall(deopt_id, 1351 GenerateDartCall(deopt_id,
1359 token_pos, 1352 token_pos,
1360 &stub_code->CallStaticFunctionLabel(), 1353 &StubCode::CallStaticFunctionLabel(),
1361 RawPcDescriptors::kOther, 1354 RawPcDescriptors::kOther,
1362 locs); 1355 locs);
1363 AddStaticCallTarget(function); 1356 AddStaticCallTarget(function);
1364 __ Drop(argument_count); 1357 __ Drop(argument_count);
1365 } 1358 }
1366 1359
1367 1360
1368 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1361 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1369 Register reg, 1362 Register reg,
1370 const Object& obj, 1363 const Object& obj,
1371 bool needs_number_check, 1364 bool needs_number_check,
1372 intptr_t token_pos) { 1365 intptr_t token_pos) {
1373 __ Comment("EqualityRegConstCompare"); 1366 __ Comment("EqualityRegConstCompare");
1374 ASSERT(!needs_number_check || 1367 ASSERT(!needs_number_check ||
1375 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); 1368 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()));
1376 if (needs_number_check) { 1369 if (needs_number_check) {
1377 StubCode* stub_code = isolate()->stub_code();
1378 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()); 1370 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
1379 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1371 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1380 __ sw(reg, Address(SP, 1 * kWordSize)); 1372 __ sw(reg, Address(SP, 1 * kWordSize));
1381 __ LoadObject(TMP, obj); 1373 __ LoadObject(TMP, obj);
1382 __ sw(TMP, Address(SP, 0 * kWordSize)); 1374 __ sw(TMP, Address(SP, 0 * kWordSize));
1383 if (is_optimizing()) { 1375 if (is_optimizing()) {
1384 __ BranchLinkPatchable( 1376 __ BranchLinkPatchable(
1385 &stub_code->OptimizedIdenticalWithNumberCheckLabel()); 1377 &StubCode::OptimizedIdenticalWithNumberCheckLabel());
1386 } else { 1378 } else {
1387 __ BranchLinkPatchable( 1379 __ BranchLinkPatchable(
1388 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel()); 1380 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1389 } 1381 }
1390 if (token_pos != Scanner::kNoSourcePos) { 1382 if (token_pos != Scanner::kNoSourcePos) {
1391 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1383 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1392 Isolate::kNoDeoptId, 1384 Isolate::kNoDeoptId,
1393 token_pos); 1385 token_pos);
1394 } 1386 }
1395 __ Comment("EqualityRegConstCompare return"); 1387 __ Comment("EqualityRegConstCompare return");
1396 // Stub returns result in CMPRES1 (if it is 0, then reg and obj are equal). 1388 // Stub returns result in CMPRES1 (if it is 0, then reg and obj are equal).
1397 __ lw(reg, Address(SP, 1 * kWordSize)); // Restore 'reg'. 1389 __ lw(reg, Address(SP, 1 * kWordSize)); // Restore 'reg'.
1398 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Discard constant. 1390 __ addiu(SP, SP, Immediate(2 * kWordSize)); // Discard constant.
1399 return Condition(CMPRES1, ZR, EQ); 1391 return Condition(CMPRES1, ZR, EQ);
1400 } else { 1392 } else {
1401 int16_t imm = 0; 1393 int16_t imm = 0;
1402 const Register obj_reg = __ LoadConditionOperand(CMPRES1, obj, &imm); 1394 const Register obj_reg = __ LoadConditionOperand(CMPRES1, obj, &imm);
1403 return Condition(reg, obj_reg, EQ, imm); 1395 return Condition(reg, obj_reg, EQ, imm);
1404 } 1396 }
1405 } 1397 }
1406 1398
1407 1399
1408 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1400 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
1409 Register right, 1401 Register right,
1410 bool needs_number_check, 1402 bool needs_number_check,
1411 intptr_t token_pos) { 1403 intptr_t token_pos) {
1412 __ Comment("EqualityRegRegCompare"); 1404 __ Comment("EqualityRegRegCompare");
1413 if (needs_number_check) { 1405 if (needs_number_check) {
1414 StubCode* stub_code = isolate()->stub_code();
1415 __ addiu(SP, SP, Immediate(-2 * kWordSize)); 1406 __ addiu(SP, SP, Immediate(-2 * kWordSize));
1416 __ sw(left, Address(SP, 1 * kWordSize)); 1407 __ sw(left, Address(SP, 1 * kWordSize));
1417 __ sw(right, Address(SP, 0 * kWordSize)); 1408 __ sw(right, Address(SP, 0 * kWordSize));
1418 if (is_optimizing()) { 1409 if (is_optimizing()) {
1419 __ BranchLinkPatchable( 1410 __ BranchLinkPatchable(
1420 &stub_code->OptimizedIdenticalWithNumberCheckLabel()); 1411 &StubCode::OptimizedIdenticalWithNumberCheckLabel());
1421 } else { 1412 } else {
1422 __ BranchLinkPatchable( 1413 __ BranchLinkPatchable(
1423 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel()); 1414 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
1424 } 1415 }
1425 if (token_pos != Scanner::kNoSourcePos) { 1416 if (token_pos != Scanner::kNoSourcePos) {
1426 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1417 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1427 Isolate::kNoDeoptId, 1418 Isolate::kNoDeoptId,
1428 token_pos); 1419 token_pos);
1429 } 1420 }
1430 __ Comment("EqualityRegRegCompare return"); 1421 __ Comment("EqualityRegRegCompare return");
1431 // Stub returns result in CMPRES1 (if it is 0, then left and right are 1422 // Stub returns result in CMPRES1 (if it is 0, then left and right are
1432 // equal). 1423 // equal).
1433 __ lw(right, Address(SP, 0 * kWordSize)); 1424 __ lw(right, Address(SP, 0 * kWordSize));
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
1545 Label* failed, 1536 Label* failed,
1546 Label* match_found, 1537 Label* match_found,
1547 intptr_t deopt_id, 1538 intptr_t deopt_id,
1548 intptr_t token_index, 1539 intptr_t token_index,
1549 LocationSummary* locs) { 1540 LocationSummary* locs) {
1550 ASSERT(is_optimizing()); 1541 ASSERT(is_optimizing());
1551 __ Comment("EmitTestAndCall"); 1542 __ Comment("EmitTestAndCall");
1552 const Array& arguments_descriptor = 1543 const Array& arguments_descriptor =
1553 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 1544 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
1554 argument_names)); 1545 argument_names));
1555 StubCode* stub_code = isolate()->stub_code();
1556 1546
1557 // Load receiver into T0. 1547 // Load receiver into T0.
1558 __ LoadFromOffset(T0, SP, (argument_count - 1) * kWordSize); 1548 __ LoadFromOffset(T0, SP, (argument_count - 1) * kWordSize);
1559 __ LoadObject(S4, arguments_descriptor); 1549 __ LoadObject(S4, arguments_descriptor);
1560 1550
1561 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid; 1551 const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
1562 const intptr_t kNumChecks = ic_data.NumberOfChecks(); 1552 const intptr_t kNumChecks = ic_data.NumberOfChecks();
1563 1553
1564 ASSERT(!ic_data.IsNull() && (kNumChecks > 0)); 1554 ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
1565 1555
1566 Label after_smi_test; 1556 Label after_smi_test;
1567 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); 1557 __ andi(CMPRES1, T0, Immediate(kSmiTagMask));
1568 if (kFirstCheckIsSmi) { 1558 if (kFirstCheckIsSmi) {
1569 // Jump if receiver is not Smi. 1559 // Jump if receiver is not Smi.
1570 if (kNumChecks == 1) { 1560 if (kNumChecks == 1) {
1571 __ bne(CMPRES1, ZR, failed); 1561 __ bne(CMPRES1, ZR, failed);
1572 } else { 1562 } else {
1573 __ bne(CMPRES1, ZR, &after_smi_test); 1563 __ bne(CMPRES1, ZR, &after_smi_test);
1574 } 1564 }
1575 // Do not use the code from the function, but let the code be patched so 1565 // Do not use the code from the function, but let the code be patched so
1576 // that we can record the outgoing edges to other code. 1566 // that we can record the outgoing edges to other code.
1577 GenerateDartCall(deopt_id, 1567 GenerateDartCall(deopt_id,
1578 token_index, 1568 token_index,
1579 &stub_code->CallStaticFunctionLabel(), 1569 &StubCode::CallStaticFunctionLabel(),
1580 RawPcDescriptors::kOther, 1570 RawPcDescriptors::kOther,
1581 locs); 1571 locs);
1582 const Function& function = Function::Handle(ic_data.GetTargetAt(0)); 1572 const Function& function = Function::Handle(ic_data.GetTargetAt(0));
1583 AddStaticCallTarget(function); 1573 AddStaticCallTarget(function);
1584 __ Drop(argument_count); 1574 __ Drop(argument_count);
1585 if (kNumChecks > 1) { 1575 if (kNumChecks > 1) {
1586 __ b(match_found); 1576 __ b(match_found);
1587 } 1577 }
1588 } else { 1578 } else {
1589 // Receiver is Smi, but Smi is not a valid class therefore fail. 1579 // Receiver is Smi, but Smi is not a valid class therefore fail.
(...skipping 19 matching lines...) Expand all
1609 Label next_test; 1599 Label next_test;
1610 if (kIsLastCheck) { 1600 if (kIsLastCheck) {
1611 __ BranchNotEqual(T2, Immediate(sorted[i].cid), failed); 1601 __ BranchNotEqual(T2, Immediate(sorted[i].cid), failed);
1612 } else { 1602 } else {
1613 __ BranchNotEqual(T2, Immediate(sorted[i].cid), &next_test); 1603 __ BranchNotEqual(T2, Immediate(sorted[i].cid), &next_test);
1614 } 1604 }
1615 // Do not use the code from the function, but let the code be patched so 1605 // Do not use the code from the function, but let the code be patched so
1616 // that we can record the outgoing edges to other code. 1606 // that we can record the outgoing edges to other code.
1617 GenerateDartCall(deopt_id, 1607 GenerateDartCall(deopt_id,
1618 token_index, 1608 token_index,
1619 &stub_code->CallStaticFunctionLabel(), 1609 &StubCode::CallStaticFunctionLabel(),
1620 RawPcDescriptors::kOther, 1610 RawPcDescriptors::kOther,
1621 locs); 1611 locs);
1622 const Function& function = *sorted[i].target; 1612 const Function& function = *sorted[i].target;
1623 AddStaticCallTarget(function); 1613 AddStaticCallTarget(function);
1624 __ Drop(argument_count); 1614 __ Drop(argument_count);
1625 if (!kIsLastCheck) { 1615 if (!kIsLastCheck) {
1626 __ b(match_found); 1616 __ b(match_found);
1627 } 1617 }
1628 __ Bind(&next_test); 1618 __ Bind(&next_test);
1629 } 1619 }
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
1868 __ AddImmediate(SP, kDoubleSize); 1858 __ AddImmediate(SP, kDoubleSize);
1869 } 1859 }
1870 1860
1871 1861
1872 #undef __ 1862 #undef __
1873 1863
1874 1864
1875 } // namespace dart 1865 } // namespace dart
1876 1866
1877 #endif // defined TARGET_ARCH_MIPS 1867 #endif // defined TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698