Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(153)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm64.cc

Issue 365983002: Make isolate specific stub code accessors instance methods instead (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64.
6 #if defined(TARGET_ARCH_ARM64) 6 #if defined(TARGET_ARCH_ARM64)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
156 Assembler* assem = compiler->assembler(); 156 Assembler* assem = compiler->assembler();
157 #define __ assem-> 157 #define __ assem->
158 __ Comment("Deopt stub for id %" Pd "", deopt_id()); 158 __ Comment("Deopt stub for id %" Pd "", deopt_id());
159 __ Bind(entry_label()); 159 __ Bind(entry_label());
160 if (FLAG_trap_on_deoptimization) { 160 if (FLAG_trap_on_deoptimization) {
161 __ brk(0); 161 __ brk(0);
162 } 162 }
163 163
164 ASSERT(deopt_env() != NULL); 164 ASSERT(deopt_env() != NULL);
165 165
166 __ BranchLink(&StubCode::DeoptimizeLabel(), PP); 166 StubCode* stub_code = compiler->isolate()->stub_code();
167 __ BranchLink(&stub_code->DeoptimizeLabel(), PP);
167 set_pc_offset(assem->CodeSize()); 168 set_pc_offset(assem->CodeSize());
168 #undef __ 169 #undef __
169 } 170 }
170 171
171 172
172 #define __ assembler()-> 173 #define __ assembler()->
173 174
174 175
175 // Fall through if bool_register contains null. 176 // Fall through if bool_register contains null.
176 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 177 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
(...skipping 15 matching lines...) Expand all
192 TypeTestStubKind test_kind, 193 TypeTestStubKind test_kind,
193 Register instance_reg, 194 Register instance_reg,
194 Register type_arguments_reg, 195 Register type_arguments_reg,
195 Register temp_reg, 196 Register temp_reg,
196 Label* is_instance_lbl, 197 Label* is_instance_lbl,
197 Label* is_not_instance_lbl) { 198 Label* is_not_instance_lbl) {
198 ASSERT(instance_reg == R0); 199 ASSERT(instance_reg == R0);
199 ASSERT(temp_reg == kNoRegister); // Unused on ARM. 200 ASSERT(temp_reg == kNoRegister); // Unused on ARM.
200 const SubtypeTestCache& type_test_cache = 201 const SubtypeTestCache& type_test_cache =
201 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New()); 202 SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
203 StubCode* stub_code = isolate()->stub_code();
202 __ LoadObject(R2, type_test_cache, PP); 204 __ LoadObject(R2, type_test_cache, PP);
203 if (test_kind == kTestTypeOneArg) { 205 if (test_kind == kTestTypeOneArg) {
204 ASSERT(type_arguments_reg == kNoRegister); 206 ASSERT(type_arguments_reg == kNoRegister);
205 __ LoadObject(R1, Object::null_object(), PP); 207 __ LoadObject(R1, Object::null_object(), PP);
206 __ BranchLink(&StubCode::Subtype1TestCacheLabel(), PP); 208 __ BranchLink(&stub_code->Subtype1TestCacheLabel(), PP);
207 } else if (test_kind == kTestTypeTwoArgs) { 209 } else if (test_kind == kTestTypeTwoArgs) {
208 ASSERT(type_arguments_reg == kNoRegister); 210 ASSERT(type_arguments_reg == kNoRegister);
209 __ LoadObject(R1, Object::null_object(), PP); 211 __ LoadObject(R1, Object::null_object(), PP);
210 __ BranchLink(&StubCode::Subtype2TestCacheLabel(), PP); 212 __ BranchLink(&stub_code->Subtype2TestCacheLabel(), PP);
211 } else if (test_kind == kTestTypeThreeArgs) { 213 } else if (test_kind == kTestTypeThreeArgs) {
212 ASSERT(type_arguments_reg == R1); 214 ASSERT(type_arguments_reg == R1);
213 __ BranchLink(&StubCode::Subtype3TestCacheLabel(), PP); 215 __ BranchLink(&stub_code->Subtype3TestCacheLabel(), PP);
214 } else { 216 } else {
215 UNREACHABLE(); 217 UNREACHABLE();
216 } 218 }
217 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False. 219 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False.
218 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl); 220 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl);
219 return type_test_cache.raw(); 221 return type_test_cache.raw();
220 } 222 }
221 223
222 224
223 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 225 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
(...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after
886 __ SmiUntag(R7); 888 __ SmiUntag(R7);
887 // Check that R8 equals R7, i.e. no named arguments passed. 889 // Check that R8 equals R7, i.e. no named arguments passed.
888 __ CompareRegisters(R8, R7); 890 __ CompareRegisters(R8, R7);
889 __ b(&all_arguments_processed, EQ); 891 __ b(&all_arguments_processed, EQ);
890 } 892 }
891 } 893 }
892 894
893 __ Bind(&wrong_num_arguments); 895 __ Bind(&wrong_num_arguments);
894 if (function.IsClosureFunction()) { 896 if (function.IsClosureFunction()) {
895 // Invoke noSuchMethod function passing "call" as the original name. 897 // Invoke noSuchMethod function passing "call" as the original name.
898 StubCode* stub_code = isolate()->stub_code();
896 const int kNumArgsChecked = 1; 899 const int kNumArgsChecked = 1;
897 const ICData& ic_data = ICData::ZoneHandle( 900 const ICData& ic_data = ICData::ZoneHandle(
898 ICData::New(function, Symbols::Call(), Object::empty_array(), 901 ICData::New(function, Symbols::Call(), Object::empty_array(),
899 Isolate::kNoDeoptId, kNumArgsChecked)); 902 Isolate::kNoDeoptId, kNumArgsChecked));
900 __ LoadObject(R5, ic_data, PP); 903 __ LoadObject(R5, ic_data, PP);
901 __ LeaveDartFrame(); // The arguments are still on the stack. 904 __ LeaveDartFrame(); // The arguments are still on the stack.
902 __ BranchPatchable(&StubCode::CallNoSuchMethodFunctionLabel()); 905 __ BranchPatchable(&stub_code->CallNoSuchMethodFunctionLabel());
903 // The noSuchMethod call may return to the caller, but not here. 906 // The noSuchMethod call may return to the caller, but not here.
904 __ brk(0); 907 __ brk(0);
905 } else if (check_correct_named_args) { 908 } else if (check_correct_named_args) {
906 __ Stop("Wrong arguments"); 909 __ Stop("Wrong arguments");
907 } 910 }
908 911
909 __ Bind(&all_arguments_processed); 912 __ Bind(&all_arguments_processed);
910 // Nullify originally passed arguments only after they have been copied and 913 // Nullify originally passed arguments only after they have been copied and
911 // checked, otherwise noSuchMethod would not see their original values. 914 // checked, otherwise noSuchMethod would not see their original values.
912 // This step can be skipped in case we decide that formal parameters are 915 // This step can be skipped in case we decide that formal parameters are
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
954 } 957 }
955 958
956 959
957 void FlowGraphCompiler::EmitFrameEntry() { 960 void FlowGraphCompiler::EmitFrameEntry() {
958 const Function& function = parsed_function().function(); 961 const Function& function = parsed_function().function();
959 Register new_pp = kNoPP; 962 Register new_pp = kNoPP;
960 if (CanOptimizeFunction() && 963 if (CanOptimizeFunction() &&
961 function.IsOptimizable() && 964 function.IsOptimizable() &&
962 (!is_optimizing() || may_reoptimize())) { 965 (!is_optimizing() || may_reoptimize())) {
963 const Register function_reg = R6; 966 const Register function_reg = R6;
967 StubCode* stub_code = isolate()->stub_code();
964 new_pp = R13; 968 new_pp = R13;
965 969
966 // Set up pool pointer in new_pp. 970 // Set up pool pointer in new_pp.
967 __ LoadPoolPointer(new_pp); 971 __ LoadPoolPointer(new_pp);
968 972
969 // Load function object using the callee's pool pointer. 973 // Load function object using the callee's pool pointer.
970 __ LoadObject(function_reg, function, new_pp); 974 __ LoadObject(function_reg, function, new_pp);
971 975
972 // Patch point is after the eventually inlined function object. 976 // Patch point is after the eventually inlined function object.
973 entry_patch_pc_offset_ = assembler()->CodeSize(); 977 entry_patch_pc_offset_ = assembler()->CodeSize();
974 978
975 intptr_t threshold = FLAG_optimization_counter_threshold; 979 intptr_t threshold = FLAG_optimization_counter_threshold;
976 __ LoadFieldFromOffset( 980 __ LoadFieldFromOffset(
977 R7, function_reg, Function::usage_counter_offset(), new_pp, kWord); 981 R7, function_reg, Function::usage_counter_offset(), new_pp, kWord);
978 if (is_optimizing()) { 982 if (is_optimizing()) {
979 // Reoptimization of an optimized function is triggered by counting in 983 // Reoptimization of an optimized function is triggered by counting in
980 // IC stubs, but not at the entry of the function. 984 // IC stubs, but not at the entry of the function.
981 threshold = FLAG_reoptimization_counter_threshold; 985 threshold = FLAG_reoptimization_counter_threshold;
982 } else { 986 } else {
983 __ add(R7, R7, Operand(1)); 987 __ add(R7, R7, Operand(1));
984 __ StoreFieldToOffset( 988 __ StoreFieldToOffset(
985 R7, function_reg, Function::usage_counter_offset(), new_pp, kWord); 989 R7, function_reg, Function::usage_counter_offset(), new_pp, kWord);
986 } 990 }
987 __ CompareImmediate(R7, threshold, new_pp); 991 __ CompareImmediate(R7, threshold, new_pp);
988 ASSERT(function_reg == R6); 992 ASSERT(function_reg == R6);
989 Label dont_optimize; 993 Label dont_optimize;
990 __ b(&dont_optimize, LT); 994 __ b(&dont_optimize, LT);
991 __ Branch(&StubCode::OptimizeFunctionLabel(), new_pp); 995 __ Branch(&stub_code->OptimizeFunctionLabel(), new_pp);
992 __ Bind(&dont_optimize); 996 __ Bind(&dont_optimize);
993 } else if (!flow_graph().IsCompiledForOsr()) { 997 } else if (!flow_graph().IsCompiledForOsr()) {
994 // We have to load the PP here too because a load of an external label 998 // We have to load the PP here too because a load of an external label
995 // may be patched at the AddCurrentDescriptor below. 999 // may be patched at the AddCurrentDescriptor below.
996 new_pp = R13; 1000 new_pp = R13;
997 1001
998 // Set up pool pointer in new_pp. 1002 // Set up pool pointer in new_pp.
999 __ LoadPoolPointer(new_pp); 1003 __ LoadPoolPointer(new_pp);
1000 1004
1001 entry_patch_pc_offset_ = assembler()->CodeSize(); 1005 entry_patch_pc_offset_ = assembler()->CodeSize();
(...skipping 24 matching lines...) Expand all
1026 1030
1027 TryIntrinsify(); 1031 TryIntrinsify();
1028 1032
1029 EmitFrameEntry(); 1033 EmitFrameEntry();
1030 1034
1031 const Function& function = parsed_function().function(); 1035 const Function& function = parsed_function().function();
1032 1036
1033 const int num_fixed_params = function.num_fixed_parameters(); 1037 const int num_fixed_params = function.num_fixed_parameters();
1034 const int num_copied_params = parsed_function().num_copied_params(); 1038 const int num_copied_params = parsed_function().num_copied_params();
1035 const int num_locals = parsed_function().num_stack_locals(); 1039 const int num_locals = parsed_function().num_stack_locals();
1040 StubCode* stub_code = isolate()->stub_code();
1036 1041
1037 // We check the number of passed arguments when we have to copy them due to 1042 // We check the number of passed arguments when we have to copy them due to
1038 // the presence of optional parameters. 1043 // the presence of optional parameters.
1039 // No such checking code is generated if only fixed parameters are declared, 1044 // No such checking code is generated if only fixed parameters are declared,
1040 // unless we are in debug mode or unless we are compiling a closure. 1045 // unless we are in debug mode or unless we are compiling a closure.
1041 if (num_copied_params == 0) { 1046 if (num_copied_params == 0) {
1042 #ifdef DEBUG 1047 #ifdef DEBUG
1043 ASSERT(!parsed_function().function().HasOptionalParameters()); 1048 ASSERT(!parsed_function().function().HasOptionalParameters());
1044 const bool check_arguments = !flow_graph().IsCompiledForOsr(); 1049 const bool check_arguments = !flow_graph().IsCompiledForOsr();
1045 #else 1050 #else
(...skipping 18 matching lines...) Expand all
1064 const String& name = 1069 const String& name =
1065 String::Handle(function.IsClosureFunction() 1070 String::Handle(function.IsClosureFunction()
1066 ? Symbols::Call().raw() 1071 ? Symbols::Call().raw()
1067 : function.name()); 1072 : function.name());
1068 const int kNumArgsChecked = 1; 1073 const int kNumArgsChecked = 1;
1069 const ICData& ic_data = ICData::ZoneHandle( 1074 const ICData& ic_data = ICData::ZoneHandle(
1070 ICData::New(function, name, Object::empty_array(), 1075 ICData::New(function, name, Object::empty_array(),
1071 Isolate::kNoDeoptId, kNumArgsChecked)); 1076 Isolate::kNoDeoptId, kNumArgsChecked));
1072 __ LoadObject(R5, ic_data, PP); 1077 __ LoadObject(R5, ic_data, PP);
1073 __ LeaveDartFrame(); // The arguments are still on the stack. 1078 __ LeaveDartFrame(); // The arguments are still on the stack.
1074 __ BranchPatchable(&StubCode::CallNoSuchMethodFunctionLabel()); 1079 __ BranchPatchable(&stub_code->CallNoSuchMethodFunctionLabel());
1075 // The noSuchMethod call may return to the caller, but not here. 1080 // The noSuchMethod call may return to the caller, but not here.
1076 __ brk(0); 1081 __ brk(0);
1077 } else { 1082 } else {
1078 __ Stop("Wrong number of arguments"); 1083 __ Stop("Wrong number of arguments");
1079 } 1084 }
1080 __ Bind(&correct_num_arguments); 1085 __ Bind(&correct_num_arguments);
1081 } 1086 }
1082 } else if (!flow_graph().IsCompiledForOsr()) { 1087 } else if (!flow_graph().IsCompiledForOsr()) {
1083 CopyParameters(); 1088 CopyParameters();
1084 } 1089 }
(...skipping 11 matching lines...) Expand all
1096 } 1101 }
1097 1102
1098 VisitBlocks(); 1103 VisitBlocks();
1099 1104
1100 __ brk(0); 1105 __ brk(0);
1101 GenerateDeferredCode(); 1106 GenerateDeferredCode();
1102 1107
1103 // Emit function patching code. This will be swapped with the first 3 1108 // Emit function patching code. This will be swapped with the first 3
1104 // instructions at entry point. 1109 // instructions at entry point.
1105 patch_code_pc_offset_ = assembler()->CodeSize(); 1110 patch_code_pc_offset_ = assembler()->CodeSize();
1106 __ BranchPatchable(&StubCode::FixCallersTargetLabel()); 1111 __ BranchPatchable(&stub_code->FixCallersTargetLabel());
1107 1112
1108 if (is_optimizing()) { 1113 if (is_optimizing()) {
1109 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1114 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1110 __ BranchPatchable(&StubCode::DeoptimizeLazyLabel()); 1115 __ BranchPatchable(&stub_code->DeoptimizeLazyLabel());
1111 } 1116 }
1112 } 1117 }
1113 1118
1114 1119
1115 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1120 void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
1116 const ExternalLabel* label, 1121 const ExternalLabel* label,
1117 PcDescriptors::Kind kind, 1122 PcDescriptors::Kind kind,
1118 LocationSummary* locs) { 1123 LocationSummary* locs) {
1119 __ BranchLinkPatchable(label); 1124 __ BranchLinkPatchable(label);
1120 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos); 1125 AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
1290 } 1295 }
1291 1296
1292 1297
1293 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1298 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1294 intptr_t argument_count, 1299 intptr_t argument_count,
1295 intptr_t deopt_id, 1300 intptr_t deopt_id,
1296 intptr_t token_pos, 1301 intptr_t token_pos,
1297 LocationSummary* locs, 1302 LocationSummary* locs,
1298 const ICData& ic_data) { 1303 const ICData& ic_data) {
1299 uword label_address = 0; 1304 uword label_address = 0;
1305 StubCode* stub_code = isolate()->stub_code();
1300 if (ic_data.NumArgsTested() == 0) { 1306 if (ic_data.NumArgsTested() == 0) {
1301 label_address = StubCode::ZeroArgsUnoptimizedStaticCallEntryPoint(); 1307 label_address = stub_code->ZeroArgsUnoptimizedStaticCallEntryPoint();
1302 } else if (ic_data.NumArgsTested() == 2) { 1308 } else if (ic_data.NumArgsTested() == 2) {
1303 label_address = StubCode::TwoArgsUnoptimizedStaticCallEntryPoint(); 1309 label_address = stub_code->TwoArgsUnoptimizedStaticCallEntryPoint();
1304 } else { 1310 } else {
1305 UNIMPLEMENTED(); 1311 UNIMPLEMENTED();
1306 } 1312 }
1307 ExternalLabel target_label(label_address); 1313 ExternalLabel target_label(label_address);
1308 __ LoadImmediate(R4, 0, kNoPP); 1314 __ LoadImmediate(R4, 0, kNoPP);
1309 __ LoadObject(R5, ic_data, PP); 1315 __ LoadObject(R5, ic_data, PP);
1310 GenerateDartCall(deopt_id, 1316 GenerateDartCall(deopt_id,
1311 token_pos, 1317 token_pos,
1312 &target_label, 1318 &target_label,
1313 PcDescriptors::kUnoptStaticCall, 1319 PcDescriptors::kUnoptStaticCall,
1314 locs); 1320 locs);
1315 #if defined(DEBUG) 1321 #if defined(DEBUG)
1316 __ LoadImmediate(R5, kInvalidObjectPointer, kNoPP); 1322 __ LoadImmediate(R5, kInvalidObjectPointer, kNoPP);
1317 #endif 1323 #endif
1318 __ Drop(argument_count); 1324 __ Drop(argument_count);
1319 } 1325 }
1320 1326
1321 1327
1322 void FlowGraphCompiler::EmitOptimizedStaticCall( 1328 void FlowGraphCompiler::EmitOptimizedStaticCall(
1323 const Function& function, 1329 const Function& function,
1324 const Array& arguments_descriptor, 1330 const Array& arguments_descriptor,
1325 intptr_t argument_count, 1331 intptr_t argument_count,
1326 intptr_t deopt_id, 1332 intptr_t deopt_id,
1327 intptr_t token_pos, 1333 intptr_t token_pos,
1328 LocationSummary* locs) { 1334 LocationSummary* locs) {
1335 StubCode* stub_code = isolate()->stub_code();
1329 __ LoadObject(R4, arguments_descriptor, PP); 1336 __ LoadObject(R4, arguments_descriptor, PP);
1330 // Do not use the code from the function, but let the code be patched so that 1337 // Do not use the code from the function, but let the code be patched so that
1331 // we can record the outgoing edges to other code. 1338 // we can record the outgoing edges to other code.
1332 GenerateDartCall(deopt_id, 1339 GenerateDartCall(deopt_id,
1333 token_pos, 1340 token_pos,
1334 &StubCode::CallStaticFunctionLabel(), 1341 &stub_code->CallStaticFunctionLabel(),
1335 PcDescriptors::kOptStaticCall, 1342 PcDescriptors::kOptStaticCall,
1336 locs); 1343 locs);
1337 AddStaticCallTarget(function); 1344 AddStaticCallTarget(function);
1338 __ Drop(argument_count); 1345 __ Drop(argument_count);
1339 } 1346 }
1340 1347
1341 1348
1342 void FlowGraphCompiler::EmitEqualityRegConstCompare(Register reg, 1349 void FlowGraphCompiler::EmitEqualityRegConstCompare(Register reg,
1343 const Object& obj, 1350 const Object& obj,
1344 bool needs_number_check, 1351 bool needs_number_check,
1345 intptr_t token_pos) { 1352 intptr_t token_pos) {
1346 if (needs_number_check) { 1353 if (needs_number_check) {
1354 StubCode* stub_code = isolate()->stub_code();
1347 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()); 1355 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
1348 __ Push(reg); 1356 __ Push(reg);
1349 __ PushObject(obj, PP); 1357 __ PushObject(obj, PP);
1350 if (is_optimizing()) { 1358 if (is_optimizing()) {
1351 __ BranchLinkPatchable( 1359 __ BranchLinkPatchable(
1352 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1360 &stub_code->OptimizedIdenticalWithNumberCheckLabel());
1353 } else { 1361 } else {
1354 __ BranchLinkPatchable( 1362 __ BranchLinkPatchable(
1355 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1363 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel());
1356 } 1364 }
1357 if (token_pos != Scanner::kNoSourcePos) { 1365 if (token_pos != Scanner::kNoSourcePos) {
1358 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1366 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1359 Isolate::kNoDeoptId, 1367 Isolate::kNoDeoptId,
1360 token_pos); 1368 token_pos);
1361 } 1369 }
1362 __ Drop(1); // Discard constant. 1370 __ Drop(1); // Discard constant.
1363 __ Pop(reg); // Restore 'reg'. 1371 __ Pop(reg); // Restore 'reg'.
1364 return; 1372 return;
1365 } 1373 }
1366 1374
1367 __ CompareObject(reg, obj, PP); 1375 __ CompareObject(reg, obj, PP);
1368 } 1376 }
1369 1377
1370 1378
1371 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1379 void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
1372 Register right, 1380 Register right,
1373 bool needs_number_check, 1381 bool needs_number_check,
1374 intptr_t token_pos) { 1382 intptr_t token_pos) {
1375 if (needs_number_check) { 1383 if (needs_number_check) {
1384 StubCode* stub_code = isolate()->stub_code();
1376 __ Push(left); 1385 __ Push(left);
1377 __ Push(right); 1386 __ Push(right);
1378 if (is_optimizing()) { 1387 if (is_optimizing()) {
1379 __ BranchLinkPatchable( 1388 __ BranchLinkPatchable(
1380 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); 1389 &stub_code->OptimizedIdenticalWithNumberCheckLabel());
1381 } else { 1390 } else {
1382 __ LoadImmediate(R4, 0, kNoPP); 1391 __ LoadImmediate(R4, 0, kNoPP);
1383 __ LoadImmediate(R5, 0, kNoPP); 1392 __ LoadImmediate(R5, 0, kNoPP);
1384 __ BranchLinkPatchable( 1393 __ BranchLinkPatchable(
1385 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); 1394 &stub_code->UnoptimizedIdenticalWithNumberCheckLabel());
1386 } 1395 }
1387 if (token_pos != Scanner::kNoSourcePos) { 1396 if (token_pos != Scanner::kNoSourcePos) {
1388 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, 1397 AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
1389 Isolate::kNoDeoptId, 1398 Isolate::kNoDeoptId,
1390 token_pos); 1399 token_pos);
1391 } 1400 }
1392 #if defined(DEBUG) 1401 #if defined(DEBUG)
1393 if (!is_optimizing()) { 1402 if (!is_optimizing()) {
1394 // Do this *after* adding the pc descriptor! 1403 // Do this *after* adding the pc descriptor!
1395 __ LoadImmediate(R4, kInvalidObjectPointer, kNoPP); 1404 __ LoadImmediate(R4, kInvalidObjectPointer, kNoPP);
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1468 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); 1477 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0));
1469 Label match_found; 1478 Label match_found;
1470 const intptr_t len = ic_data.NumberOfChecks(); 1479 const intptr_t len = ic_data.NumberOfChecks();
1471 GrowableArray<CidTarget> sorted(len); 1480 GrowableArray<CidTarget> sorted(len);
1472 SortICDataByCount(ic_data, &sorted); 1481 SortICDataByCount(ic_data, &sorted);
1473 ASSERT(class_id_reg != R4); 1482 ASSERT(class_id_reg != R4);
1474 ASSERT(len > 0); // Why bother otherwise. 1483 ASSERT(len > 0); // Why bother otherwise.
1475 const Array& arguments_descriptor = 1484 const Array& arguments_descriptor =
1476 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 1485 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
1477 argument_names)); 1486 argument_names));
1487 StubCode* stub_code = isolate()->stub_code();
1488
1478 __ LoadObject(R4, arguments_descriptor, PP); 1489 __ LoadObject(R4, arguments_descriptor, PP);
1479 for (intptr_t i = 0; i < len; i++) { 1490 for (intptr_t i = 0; i < len; i++) {
1480 const bool is_last_check = (i == (len - 1)); 1491 const bool is_last_check = (i == (len - 1));
1481 Label next_test; 1492 Label next_test;
1482 __ CompareImmediate(class_id_reg, sorted[i].cid, PP); 1493 __ CompareImmediate(class_id_reg, sorted[i].cid, PP);
1483 if (is_last_check) { 1494 if (is_last_check) {
1484 __ b(deopt, NE); 1495 __ b(deopt, NE);
1485 } else { 1496 } else {
1486 __ b(&next_test, NE); 1497 __ b(&next_test, NE);
1487 } 1498 }
1488 // Do not use the code from the function, but let the code be patched so 1499 // Do not use the code from the function, but let the code be patched so
1489 // that we can record the outgoing edges to other code. 1500 // that we can record the outgoing edges to other code.
1490 GenerateDartCall(deopt_id, 1501 GenerateDartCall(deopt_id,
1491 token_index, 1502 token_index,
1492 &StubCode::CallStaticFunctionLabel(), 1503 &stub_code->CallStaticFunctionLabel(),
1493 PcDescriptors::kOptStaticCall, 1504 PcDescriptors::kOptStaticCall,
1494 locs); 1505 locs);
1495 const Function& function = *sorted[i].target; 1506 const Function& function = *sorted[i].target;
1496 AddStaticCallTarget(function); 1507 AddStaticCallTarget(function);
1497 __ Drop(argument_count); 1508 __ Drop(argument_count);
1498 if (!is_last_check) { 1509 if (!is_last_check) {
1499 __ b(&match_found); 1510 __ b(&match_found);
1500 } 1511 }
1501 __ Bind(&next_test); 1512 __ Bind(&next_test);
1502 } 1513 }
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after
1726 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { 1737 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) {
1727 UNIMPLEMENTED(); 1738 UNIMPLEMENTED();
1728 } 1739 }
1729 1740
1730 1741
1731 #undef __ 1742 #undef __
1732 1743
1733 } // namespace dart 1744 } // namespace dart
1734 1745
1735 #endif // defined TARGET_ARCH_ARM64 1746 #endif // defined TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698