Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(561)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm.cc

Issue 1644793002: Replace intptr_t with TokenDescriptor (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 } 259 }
260 260
261 261
262 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 262 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
263 // type test is conclusive, otherwise fallthrough if a type test could not 263 // type test is conclusive, otherwise fallthrough if a type test could not
264 // be completed. 264 // be completed.
265 // R0: instance being type checked (preserved). 265 // R0: instance being type checked (preserved).
266 // Clobbers R2. 266 // Clobbers R2.
267 RawSubtypeTestCache* 267 RawSubtypeTestCache*
268 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( 268 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
269 intptr_t token_pos, 269 TokenDescriptor token_pos,
270 const AbstractType& type, 270 const AbstractType& type,
271 Label* is_instance_lbl, 271 Label* is_instance_lbl,
272 Label* is_not_instance_lbl) { 272 Label* is_not_instance_lbl) {
273 __ Comment("InstantiatedTypeWithArgumentsTest"); 273 __ Comment("InstantiatedTypeWithArgumentsTest");
274 ASSERT(type.IsInstantiated()); 274 ASSERT(type.IsInstantiated());
275 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); 275 const Class& type_class = Class::ZoneHandle(zone(), type.type_class());
276 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0)); 276 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0));
277 const Register kInstanceReg = R0; 277 const Register kInstanceReg = R0;
278 Error& bound_error = Error::Handle(zone()); 278 Error& bound_error = Error::Handle(zone());
279 const Type& int_type = Type::Handle(zone(), Type::IntType()); 279 const Type& int_type = Type::Handle(zone(), Type::IntType());
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
349 __ b(is_not_equal_lbl); 349 __ b(is_not_equal_lbl);
350 } 350 }
351 351
352 352
353 // Testing against an instantiated type with no arguments, without 353 // Testing against an instantiated type with no arguments, without
354 // SubtypeTestCache. 354 // SubtypeTestCache.
355 // R0: instance being type checked (preserved). 355 // R0: instance being type checked (preserved).
356 // Clobbers R2, R3. 356 // Clobbers R2, R3.
357 // Returns true if there is a fallthrough. 357 // Returns true if there is a fallthrough.
358 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( 358 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
359 intptr_t token_pos, 359 TokenDescriptor token_pos,
360 const AbstractType& type, 360 const AbstractType& type,
361 Label* is_instance_lbl, 361 Label* is_instance_lbl,
362 Label* is_not_instance_lbl) { 362 Label* is_not_instance_lbl) {
363 __ Comment("InstantiatedTypeNoArgumentsTest"); 363 __ Comment("InstantiatedTypeNoArgumentsTest");
364 ASSERT(type.IsInstantiated()); 364 ASSERT(type.IsInstantiated());
365 if (type.IsFunctionType()) { 365 if (type.IsFunctionType()) {
366 // Fallthrough. 366 // Fallthrough.
367 return true; 367 return true;
368 } 368 }
369 const Class& type_class = Class::Handle(zone(), type.type_class()); 369 const Class& type_class = Class::Handle(zone(), type.type_class());
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 419
420 420
421 // Uses SubtypeTestCache to store instance class and result. 421 // Uses SubtypeTestCache to store instance class and result.
422 // R0: instance to test. 422 // R0: instance to test.
423 // Clobbers R1-R4,R9. 423 // Clobbers R1-R4,R9.
424 // Immediate class test already done. 424 // Immediate class test already done.
425 // TODO(srdjan): Implement a quicker subtype check, as type test 425 // TODO(srdjan): Implement a quicker subtype check, as type test
426 // arrays can grow too high, but they may be useful when optimizing 426 // arrays can grow too high, but they may be useful when optimizing
427 // code (type-feedback). 427 // code (type-feedback).
428 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( 428 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
429 intptr_t token_pos, 429 TokenDescriptor token_pos,
430 const Class& type_class, 430 const Class& type_class,
431 Label* is_instance_lbl, 431 Label* is_instance_lbl,
432 Label* is_not_instance_lbl) { 432 Label* is_not_instance_lbl) {
433 __ Comment("Subtype1TestCacheLookup"); 433 __ Comment("Subtype1TestCacheLookup");
434 const Register kInstanceReg = R0; 434 const Register kInstanceReg = R0;
435 __ LoadClass(R1, kInstanceReg, R2); 435 __ LoadClass(R1, kInstanceReg, R2);
436 // R1: instance class. 436 // R1: instance class.
437 // Check immediate superclass equality. 437 // Check immediate superclass equality.
438 __ ldr(R2, FieldAddress(R1, Class::super_type_offset())); 438 __ ldr(R2, FieldAddress(R1, Class::super_type_offset()));
439 __ ldr(R2, FieldAddress(R2, Type::type_class_offset())); 439 __ ldr(R2, FieldAddress(R2, Type::type_class_offset()));
440 __ CompareObject(R2, type_class); 440 __ CompareObject(R2, type_class);
441 __ b(is_instance_lbl, EQ); 441 __ b(is_instance_lbl, EQ);
442 442
443 const Register kTypeArgumentsReg = kNoRegister; 443 const Register kTypeArgumentsReg = kNoRegister;
444 const Register kTempReg = kNoRegister; 444 const Register kTempReg = kNoRegister;
445 return GenerateCallSubtypeTestStub(kTestTypeOneArg, 445 return GenerateCallSubtypeTestStub(kTestTypeOneArg,
446 kInstanceReg, 446 kInstanceReg,
447 kTypeArgumentsReg, 447 kTypeArgumentsReg,
448 kTempReg, 448 kTempReg,
449 is_instance_lbl, 449 is_instance_lbl,
450 is_not_instance_lbl); 450 is_not_instance_lbl);
451 } 451 }
452 452
453 453
454 // Generates inlined check if 'type' is a type parameter or type itself 454 // Generates inlined check if 'type' is a type parameter or type itself
455 // R0: instance (preserved). 455 // R0: instance (preserved).
456 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 456 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
457 intptr_t token_pos, 457 TokenDescriptor token_pos,
458 const AbstractType& type, 458 const AbstractType& type,
459 Label* is_instance_lbl, 459 Label* is_instance_lbl,
460 Label* is_not_instance_lbl) { 460 Label* is_not_instance_lbl) {
461 __ Comment("UninstantiatedTypeTest"); 461 __ Comment("UninstantiatedTypeTest");
462 ASSERT(!type.IsInstantiated()); 462 ASSERT(!type.IsInstantiated());
463 // Skip check if destination is a dynamic type. 463 // Skip check if destination is a dynamic type.
464 if (type.IsTypeParameter()) { 464 if (type.IsTypeParameter()) {
465 const TypeParameter& type_param = TypeParameter::Cast(type); 465 const TypeParameter& type_param = TypeParameter::Cast(type);
466 // Load instantiator type arguments on stack. 466 // Load instantiator type arguments on stack.
467 __ ldr(R1, Address(SP, 0)); // Get instantiator type arguments. 467 __ ldr(R1, Address(SP, 0)); // Get instantiator type arguments.
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
530 // Inputs: 530 // Inputs:
531 // - R0: instance being type checked (preserved). 531 // - R0: instance being type checked (preserved).
532 // - R1: optional instantiator type arguments (preserved). 532 // - R1: optional instantiator type arguments (preserved).
533 // Clobbers R2, R3. 533 // Clobbers R2, R3.
534 // Returns: 534 // Returns:
535 // - preserved instance in R0 and optional instantiator type arguments in R1. 535 // - preserved instance in R0 and optional instantiator type arguments in R1.
536 // Note that this inlined code must be followed by the runtime_call code, as it 536 // Note that this inlined code must be followed by the runtime_call code, as it
537 // may fall through to it. Otherwise, this inline code will jump to the label 537 // may fall through to it. Otherwise, this inline code will jump to the label
538 // is_instance or to the label is_not_instance. 538 // is_instance or to the label is_not_instance.
539 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof( 539 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof(
540 intptr_t token_pos, 540 TokenDescriptor token_pos,
541 const AbstractType& type, 541 const AbstractType& type,
542 Label* is_instance_lbl, 542 Label* is_instance_lbl,
543 Label* is_not_instance_lbl) { 543 Label* is_not_instance_lbl) {
544 __ Comment("InlineInstanceof"); 544 __ Comment("InlineInstanceof");
545 if (type.IsVoidType()) { 545 if (type.IsVoidType()) {
546 // A non-null value is returned from a void function, which will result in a 546 // A non-null value is returned from a void function, which will result in a
547 // type error. A null value is handled prior to executing this inline code. 547 // type error. A null value is handled prior to executing this inline code.
548 return SubtypeTestCache::null(); 548 return SubtypeTestCache::null();
549 } 549 }
550 if (type.IsInstantiated()) { 550 if (type.IsInstantiated()) {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
583 // If instanceof type test cannot be performed successfully at compile time and 583 // If instanceof type test cannot be performed successfully at compile time and
584 // therefore eliminated, optimize it by adding inlined tests for: 584 // therefore eliminated, optimize it by adding inlined tests for:
585 // - NULL -> return false. 585 // - NULL -> return false.
586 // - Smi -> compile time subtype check (only if dst class is not parameterized). 586 // - Smi -> compile time subtype check (only if dst class is not parameterized).
587 // - Class equality (only if class is not parameterized). 587 // - Class equality (only if class is not parameterized).
588 // Inputs: 588 // Inputs:
589 // - R0: object. 589 // - R0: object.
590 // - R1: instantiator type arguments or raw_null. 590 // - R1: instantiator type arguments or raw_null.
591 // Returns: 591 // Returns:
592 // - true or false in R0. 592 // - true or false in R0.
593 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, 593 void FlowGraphCompiler::GenerateInstanceOf(TokenDescriptor token_pos,
594 intptr_t deopt_id, 594 intptr_t deopt_id,
595 const AbstractType& type, 595 const AbstractType& type,
596 bool negate_result, 596 bool negate_result,
597 LocationSummary* locs) { 597 LocationSummary* locs) {
598 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded()); 598 ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded());
599 599
600 // Preserve instantiator type arguments (R1). 600 // Preserve instantiator type arguments (R1).
601 __ Push(R1); 601 __ Push(R1);
602 602
603 Label is_instance, is_not_instance; 603 Label is_instance, is_not_instance;
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
663 // - NULL -> return NULL. 663 // - NULL -> return NULL.
664 // - Smi -> compile time subtype check (only if dst class is not parameterized). 664 // - Smi -> compile time subtype check (only if dst class is not parameterized).
665 // - Class equality (only if class is not parameterized). 665 // - Class equality (only if class is not parameterized).
666 // Inputs: 666 // Inputs:
667 // - R0: instance being type checked. 667 // - R0: instance being type checked.
668 // - R1: instantiator type arguments or raw_null. 668 // - R1: instantiator type arguments or raw_null.
669 // Returns: 669 // Returns:
670 // - object in R0 for successful assignable check (or throws TypeError). 670 // - object in R0 for successful assignable check (or throws TypeError).
671 // Performance notes: positive checks must be quick, negative checks can be slow 671 // Performance notes: positive checks must be quick, negative checks can be slow
672 // as they throw an exception. 672 // as they throw an exception.
673 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos, 673 void FlowGraphCompiler::GenerateAssertAssignable(TokenDescriptor token_pos,
674 intptr_t deopt_id, 674 intptr_t deopt_id,
675 const AbstractType& dst_type, 675 const AbstractType& dst_type,
676 const String& dst_name, 676 const String& dst_name,
677 LocationSummary* locs) { 677 LocationSummary* locs) {
678 ASSERT(!Token::IsClassifying(token_pos)); 678 ASSERT(!TokenDescriptor(token_pos).IsClassifying());
679 ASSERT(!dst_type.IsNull()); 679 ASSERT(!dst_type.IsNull());
680 ASSERT(dst_type.IsFinalized()); 680 ASSERT(dst_type.IsFinalized());
681 // Assignable check is skipped in FlowGraphBuilder, not here. 681 // Assignable check is skipped in FlowGraphBuilder, not here.
682 ASSERT(dst_type.IsMalformedOrMalbounded() || 682 ASSERT(dst_type.IsMalformedOrMalbounded() ||
683 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 683 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
684 // Preserve instantiator type arguments (R1). 684 // Preserve instantiator type arguments (R1).
685 __ Push(R1); 685 __ Push(R1);
686 // A null object is always assignable and is returned as result. 686 // A null object is always assignable and is returned as result.
687 Label is_assignable, runtime_call; 687 Label is_assignable, runtime_call;
688 __ CompareObject(R0, Object::null_object()); 688 __ CompareObject(R0, Object::null_object());
(...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after
1131 i < CallPattern::DeoptCallPatternLengthInInstructions(); 1131 i < CallPattern::DeoptCallPatternLengthInInstructions();
1132 ++i) { 1132 ++i) {
1133 __ nop(); 1133 __ nop();
1134 } 1134 }
1135 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1135 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1136 __ Branch(*StubCode::DeoptimizeLazy_entry()); 1136 __ Branch(*StubCode::DeoptimizeLazy_entry());
1137 } 1137 }
1138 } 1138 }
1139 1139
1140 1140
1141 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1141 void FlowGraphCompiler::GenerateCall(TokenDescriptor token_pos,
1142 const StubEntry& stub_entry, 1142 const StubEntry& stub_entry,
1143 RawPcDescriptors::Kind kind, 1143 RawPcDescriptors::Kind kind,
1144 LocationSummary* locs) { 1144 LocationSummary* locs) {
1145 __ BranchLinkPatchable(stub_entry); 1145 __ BranchLinkPatchable(stub_entry);
1146 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos); 1146 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos);
1147 RecordSafepoint(locs); 1147 RecordSafepoint(locs);
1148 } 1148 }
1149 1149
1150 1150
1151 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 1151 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
1152 intptr_t token_pos, 1152 TokenDescriptor token_pos,
1153 const StubEntry& stub_entry, 1153 const StubEntry& stub_entry,
1154 RawPcDescriptors::Kind kind, 1154 RawPcDescriptors::Kind kind,
1155 LocationSummary* locs) { 1155 LocationSummary* locs) {
1156 __ BranchLinkPatchable(stub_entry); 1156 __ BranchLinkPatchable(stub_entry);
1157 AddCurrentDescriptor(kind, deopt_id, token_pos); 1157 AddCurrentDescriptor(kind, deopt_id, token_pos);
1158 RecordSafepoint(locs); 1158 RecordSafepoint(locs);
1159 // Marks either the continuation point in unoptimized code or the 1159 // Marks either the continuation point in unoptimized code or the
1160 // deoptimization point in optimized code, after call. 1160 // deoptimization point in optimized code, after call.
1161 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1161 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1162 if (is_optimizing()) { 1162 if (is_optimizing()) {
1163 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1163 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1164 } else { 1164 } else {
1165 // Add deoptimization continuation point after the call and before the 1165 // Add deoptimization continuation point after the call and before the
1166 // arguments are removed. 1166 // arguments are removed.
1167 AddCurrentDescriptor(RawPcDescriptors::kDeopt, 1167 AddCurrentDescriptor(RawPcDescriptors::kDeopt,
1168 deopt_id_after, token_pos); 1168 deopt_id_after, token_pos);
1169 } 1169 }
1170 } 1170 }
1171 1171
1172 1172
1173 void FlowGraphCompiler::GenerateRuntimeCall(intptr_t token_pos, 1173 void FlowGraphCompiler::GenerateRuntimeCall(TokenDescriptor token_pos,
1174 intptr_t deopt_id, 1174 intptr_t deopt_id,
1175 const RuntimeEntry& entry, 1175 const RuntimeEntry& entry,
1176 intptr_t argument_count, 1176 intptr_t argument_count,
1177 LocationSummary* locs) { 1177 LocationSummary* locs) {
1178 __ CallRuntime(entry, argument_count); 1178 __ CallRuntime(entry, argument_count);
1179 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); 1179 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos);
1180 RecordSafepoint(locs); 1180 RecordSafepoint(locs);
1181 if (deopt_id != Thread::kNoDeoptId) { 1181 if (deopt_id != Thread::kNoDeoptId) {
1182 // Marks either the continuation point in unoptimized code or the 1182 // Marks either the continuation point in unoptimized code or the
1183 // deoptimization point in optimized code, after call. 1183 // deoptimization point in optimized code, after call.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1217 assembler_->set_use_far_branches(old_use_far_branches); 1217 assembler_->set_use_far_branches(old_use_far_branches);
1218 #endif // DEBUG 1218 #endif // DEBUG
1219 } 1219 }
1220 1220
1221 1221
1222 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1222 void FlowGraphCompiler::EmitOptimizedInstanceCall(
1223 const StubEntry& stub_entry, 1223 const StubEntry& stub_entry,
1224 const ICData& ic_data, 1224 const ICData& ic_data,
1225 intptr_t argument_count, 1225 intptr_t argument_count,
1226 intptr_t deopt_id, 1226 intptr_t deopt_id,
1227 intptr_t token_pos, 1227 TokenDescriptor token_pos,
1228 LocationSummary* locs) { 1228 LocationSummary* locs) {
1229 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1229 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1230 // Each ICData propagated from unoptimized to optimized code contains the 1230 // Each ICData propagated from unoptimized to optimized code contains the
1231 // function that corresponds to the Dart function of that IC call. Due 1231 // function that corresponds to the Dart function of that IC call. Due
1232 // to inlining in optimized code, that function may not correspond to the 1232 // to inlining in optimized code, that function may not correspond to the
1233 // top-level function (parsed_function().function()) which could be 1233 // top-level function (parsed_function().function()) which could be
1234 // reoptimized and which counter needs to be incremented. 1234 // reoptimized and which counter needs to be incremented.
1235 // Pass the function explicitly, it is used in IC stub. 1235 // Pass the function explicitly, it is used in IC stub.
1236 1236
1237 __ LoadObject(R8, parsed_function().function()); 1237 __ LoadObject(R8, parsed_function().function());
1238 __ LoadUniqueObject(R9, ic_data); 1238 __ LoadUniqueObject(R9, ic_data);
1239 GenerateDartCall(deopt_id, 1239 GenerateDartCall(deopt_id,
1240 token_pos, 1240 token_pos,
1241 stub_entry, 1241 stub_entry,
1242 RawPcDescriptors::kIcCall, 1242 RawPcDescriptors::kIcCall,
1243 locs); 1243 locs);
1244 __ Drop(argument_count); 1244 __ Drop(argument_count);
1245 } 1245 }
1246 1246
1247 1247
1248 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, 1248 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
1249 const ICData& ic_data, 1249 const ICData& ic_data,
1250 intptr_t argument_count, 1250 intptr_t argument_count,
1251 intptr_t deopt_id, 1251 intptr_t deopt_id,
1252 intptr_t token_pos, 1252 TokenDescriptor token_pos,
1253 LocationSummary* locs) { 1253 LocationSummary* locs) {
1254 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0); 1254 ASSERT(Array::Handle(zone(), ic_data.arguments_descriptor()).Length() > 0);
1255 __ LoadUniqueObject(R9, ic_data); 1255 __ LoadUniqueObject(R9, ic_data);
1256 GenerateDartCall(deopt_id, 1256 GenerateDartCall(deopt_id,
1257 token_pos, 1257 token_pos,
1258 stub_entry, 1258 stub_entry,
1259 RawPcDescriptors::kIcCall, 1259 RawPcDescriptors::kIcCall,
1260 locs); 1260 locs);
1261 __ Drop(argument_count); 1261 __ Drop(argument_count);
1262 } 1262 }
1263 1263
1264 1264
1265 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1265 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
1266 const ICData& ic_data, 1266 const ICData& ic_data,
1267 intptr_t argument_count, 1267 intptr_t argument_count,
1268 intptr_t deopt_id, 1268 intptr_t deopt_id,
1269 intptr_t token_pos, 1269 TokenDescriptor token_pos,
1270 LocationSummary* locs, 1270 LocationSummary* locs,
1271 intptr_t try_index) { 1271 intptr_t try_index) {
1272 const String& name = String::Handle(zone(), ic_data.target_name()); 1272 const String& name = String::Handle(zone(), ic_data.target_name());
1273 const Array& arguments_descriptor = 1273 const Array& arguments_descriptor =
1274 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1274 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
1275 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1275 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1276 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), 1276 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(),
1277 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); 1277 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor));
1278 1278
1279 __ Comment("MegamorphicCall"); 1279 __ Comment("MegamorphicCall");
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1312 deopt_id_after, token_pos); 1312 deopt_id_after, token_pos);
1313 } 1313 }
1314 __ Drop(argument_count); 1314 __ Drop(argument_count);
1315 } 1315 }
1316 1316
1317 1317
1318 void FlowGraphCompiler::EmitSwitchableInstanceCall( 1318 void FlowGraphCompiler::EmitSwitchableInstanceCall(
1319 const ICData& ic_data, 1319 const ICData& ic_data,
1320 intptr_t argument_count, 1320 intptr_t argument_count,
1321 intptr_t deopt_id, 1321 intptr_t deopt_id,
1322 intptr_t token_pos, 1322 TokenDescriptor token_pos,
1323 LocationSummary* locs) { 1323 LocationSummary* locs) {
1324 __ Comment("SwitchableCall"); 1324 __ Comment("SwitchableCall");
1325 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize); 1325 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize);
1326 if (ic_data.NumArgsTested() == 1) { 1326 if (ic_data.NumArgsTested() == 1) {
1327 __ LoadUniqueObject(R9, ic_data); 1327 __ LoadUniqueObject(R9, ic_data);
1328 __ BranchLinkPatchable(*StubCode::ICLookup_entry()); 1328 __ BranchLinkPatchable(*StubCode::ICLookup_entry());
1329 } else { 1329 } else {
1330 const String& name = String::Handle(zone(), ic_data.target_name()); 1330 const String& name = String::Handle(zone(), ic_data.target_name());
1331 const Array& arguments_descriptor = 1331 const Array& arguments_descriptor =
1332 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1332 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
(...skipping 18 matching lines...) Expand all
1351 AddCurrentDescriptor(RawPcDescriptors::kDeopt, 1351 AddCurrentDescriptor(RawPcDescriptors::kDeopt,
1352 deopt_id_after, token_pos); 1352 deopt_id_after, token_pos);
1353 } 1353 }
1354 __ Drop(argument_count); 1354 __ Drop(argument_count);
1355 } 1355 }
1356 1356
1357 1357
1358 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1358 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1359 intptr_t argument_count, 1359 intptr_t argument_count,
1360 intptr_t deopt_id, 1360 intptr_t deopt_id,
1361 intptr_t token_pos, 1361 TokenDescriptor token_pos,
1362 LocationSummary* locs, 1362 LocationSummary* locs,
1363 const ICData& ic_data) { 1363 const ICData& ic_data) {
1364 const StubEntry* stub_entry = 1364 const StubEntry* stub_entry =
1365 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); 1365 StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
1366 __ LoadObject(R9, ic_data); 1366 __ LoadObject(R9, ic_data);
1367 GenerateDartCall(deopt_id, 1367 GenerateDartCall(deopt_id,
1368 token_pos, 1368 token_pos,
1369 *stub_entry, 1369 *stub_entry,
1370 RawPcDescriptors::kUnoptStaticCall, 1370 RawPcDescriptors::kUnoptStaticCall,
1371 locs); 1371 locs);
1372 __ Drop(argument_count); 1372 __ Drop(argument_count);
1373 } 1373 }
1374 1374
1375 1375
1376 void FlowGraphCompiler::EmitOptimizedStaticCall( 1376 void FlowGraphCompiler::EmitOptimizedStaticCall(
1377 const Function& function, 1377 const Function& function,
1378 const Array& arguments_descriptor, 1378 const Array& arguments_descriptor,
1379 intptr_t argument_count, 1379 intptr_t argument_count,
1380 intptr_t deopt_id, 1380 intptr_t deopt_id,
1381 intptr_t token_pos, 1381 TokenDescriptor token_pos,
1382 LocationSummary* locs) { 1382 LocationSummary* locs) {
1383 __ LoadObject(R4, arguments_descriptor); 1383 __ LoadObject(R4, arguments_descriptor);
1384 // Do not use the code from the function, but let the code be patched so that 1384 // Do not use the code from the function, but let the code be patched so that
1385 // we can record the outgoing edges to other code. 1385 // we can record the outgoing edges to other code.
1386 GenerateDartCall(deopt_id, 1386 GenerateDartCall(deopt_id,
1387 token_pos, 1387 token_pos,
1388 *StubCode::CallStaticFunction_entry(), 1388 *StubCode::CallStaticFunction_entry(),
1389 RawPcDescriptors::kOther, 1389 RawPcDescriptors::kOther,
1390 locs); 1390 locs);
1391 AddStaticCallTarget(function); 1391 AddStaticCallTarget(function);
1392 __ Drop(argument_count); 1392 __ Drop(argument_count);
1393 } 1393 }
1394 1394
1395 1395
1396 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1396 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1397 Register reg, 1397 Register reg,
1398 const Object& obj, 1398 const Object& obj,
1399 bool needs_number_check, 1399 bool needs_number_check,
1400 intptr_t token_pos) { 1400 TokenDescriptor token_pos) {
1401 if (needs_number_check) { 1401 if (needs_number_check) {
1402 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()); 1402 ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
1403 __ Push(reg); 1403 __ Push(reg);
1404 __ PushObject(obj); 1404 __ PushObject(obj);
1405 if (is_optimizing()) { 1405 if (is_optimizing()) {
1406 __ BranchLinkPatchable( 1406 __ BranchLinkPatchable(
1407 *StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1407 *StubCode::OptimizedIdenticalWithNumberCheck_entry());
1408 } else { 1408 } else {
1409 __ BranchLinkPatchable( 1409 __ BranchLinkPatchable(
1410 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1410 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1411 } 1411 }
1412 if (token_pos >= 0) { 1412 if (token_pos.IsReal()) {
1413 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1413 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1414 Thread::kNoDeoptId, 1414 Thread::kNoDeoptId,
1415 token_pos); 1415 token_pos);
1416 } 1416 }
1417 // Stub returns result in flags (result of a cmp, we need Z computed). 1417 // Stub returns result in flags (result of a cmp, we need Z computed).
1418 __ Drop(1); // Discard constant. 1418 __ Drop(1); // Discard constant.
1419 __ Pop(reg); // Restore 'reg'. 1419 __ Pop(reg); // Restore 'reg'.
1420 } else { 1420 } else {
1421 __ CompareObject(reg, obj); 1421 __ CompareObject(reg, obj);
1422 } 1422 }
1423 return EQ; 1423 return EQ;
1424 } 1424 }
1425 1425
1426 1426
1427 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1427 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
1428 Register right, 1428 Register left,
1429 bool needs_number_check, 1429 Register right,
1430 intptr_t token_pos) { 1430 bool needs_number_check,
1431 TokenDescriptor token_pos) {
1431 if (needs_number_check) { 1432 if (needs_number_check) {
1432 __ Push(left); 1433 __ Push(left);
1433 __ Push(right); 1434 __ Push(right);
1434 if (is_optimizing()) { 1435 if (is_optimizing()) {
1435 __ BranchLinkPatchable( 1436 __ BranchLinkPatchable(
1436 *StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1437 *StubCode::OptimizedIdenticalWithNumberCheck_entry());
1437 } else { 1438 } else {
1438 __ BranchLinkPatchable( 1439 __ BranchLinkPatchable(
1439 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1440 *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1440 } 1441 }
1441 if (token_pos >= 0) { 1442 if (token_pos.IsReal()) {
1442 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1443 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1443 Thread::kNoDeoptId, 1444 Thread::kNoDeoptId,
1444 token_pos); 1445 token_pos);
1445 } 1446 }
1446 // Stub returns result in flags (result of a cmp, we need Z computed). 1447 // Stub returns result in flags (result of a cmp, we need Z computed).
1447 __ Pop(right); 1448 __ Pop(right);
1448 __ Pop(left); 1449 __ Pop(left);
1449 } else { 1450 } else {
1450 __ cmp(left, Operand(right)); 1451 __ cmp(left, Operand(right));
1451 } 1452 }
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
1545 } 1546 }
1546 #endif 1547 #endif
1547 1548
1548 1549
1549 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, 1550 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
1550 intptr_t argument_count, 1551 intptr_t argument_count,
1551 const Array& argument_names, 1552 const Array& argument_names,
1552 Label* failed, 1553 Label* failed,
1553 Label* match_found, 1554 Label* match_found,
1554 intptr_t deopt_id, 1555 intptr_t deopt_id,
1555 intptr_t token_index, 1556 TokenDescriptor token_index,
1556 LocationSummary* locs) { 1557 LocationSummary* locs) {
1557 ASSERT(is_optimizing()); 1558 ASSERT(is_optimizing());
1558 __ Comment("EmitTestAndCall"); 1559 __ Comment("EmitTestAndCall");
1559 const Array& arguments_descriptor = 1560 const Array& arguments_descriptor =
1560 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, 1561 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count,
1561 argument_names)); 1562 argument_names));
1562 1563
1563 // Load receiver into R0. 1564 // Load receiver into R0.
1564 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize); 1565 __ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize);
1565 __ LoadObject(R4, arguments_descriptor); 1566 __ LoadObject(R4, arguments_descriptor);
(...skipping 368 matching lines...) Expand 10 before | Expand all | Expand 10 after
1934 DRegister dreg = EvenDRegisterOf(reg); 1935 DRegister dreg = EvenDRegisterOf(reg);
1935 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); 1936 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex));
1936 } 1937 }
1937 1938
1938 1939
1939 #undef __ 1940 #undef __
1940 1941
1941 } // namespace dart 1942 } // namespace dart
1942 1943
1943 #endif // defined TARGET_ARCH_ARM 1944 #endif // defined TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698