Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(169)

Side by Side Diff: runtime/vm/flow_graph_compiler_ia32.cc

Issue 1644793002: Replace intptr_t with TokenDescriptor (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6 #if defined(TARGET_ARCH_IA32) 6 #if defined(TARGET_ARCH_IA32)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
263 } 263 }
264 264
265 265
266 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if 266 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if
267 // type test is conclusive, otherwise fallthrough if a type test could not 267 // type test is conclusive, otherwise fallthrough if a type test could not
268 // be completed. 268 // be completed.
269 // EAX: instance (must survive). 269 // EAX: instance (must survive).
270 // Clobbers ECX, EDI. 270 // Clobbers ECX, EDI.
271 RawSubtypeTestCache* 271 RawSubtypeTestCache*
272 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( 272 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
273 intptr_t token_pos, 273 TokenDescriptor token_pos,
274 const AbstractType& type, 274 const AbstractType& type,
275 Label* is_instance_lbl, 275 Label* is_instance_lbl,
276 Label* is_not_instance_lbl) { 276 Label* is_not_instance_lbl) {
277 __ Comment("InstantiatedTypeWithArgumentsTest"); 277 __ Comment("InstantiatedTypeWithArgumentsTest");
278 ASSERT(type.IsInstantiated()); 278 ASSERT(type.IsInstantiated());
279 const Class& type_class = Class::ZoneHandle(zone(), type.type_class()); 279 const Class& type_class = Class::ZoneHandle(zone(), type.type_class());
280 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0)); 280 ASSERT(type.IsFunctionType() || (type_class.NumTypeArguments() > 0));
281 const Register kInstanceReg = EAX; 281 const Register kInstanceReg = EAX;
282 Error& bound_error = Error::Handle(zone()); 282 Error& bound_error = Error::Handle(zone());
283 const Type& int_type = Type::Handle(zone(), Type::IntType()); 283 const Type& int_type = Type::Handle(zone(), Type::IntType());
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 __ jmp(is_not_equal_lbl); 352 __ jmp(is_not_equal_lbl);
353 } 353 }
354 354
355 355
356 // Testing against an instantiated type with no arguments, without 356 // Testing against an instantiated type with no arguments, without
357 // SubtypeTestCache. 357 // SubtypeTestCache.
358 // EAX: instance to test against (preserved). 358 // EAX: instance to test against (preserved).
359 // Clobbers ECX, EDI. 359 // Clobbers ECX, EDI.
360 // Returns true if there is a fallthrough. 360 // Returns true if there is a fallthrough.
361 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( 361 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
362 intptr_t token_pos, 362 TokenDescriptor token_pos,
363 const AbstractType& type, 363 const AbstractType& type,
364 Label* is_instance_lbl, 364 Label* is_instance_lbl,
365 Label* is_not_instance_lbl) { 365 Label* is_not_instance_lbl) {
366 __ Comment("InstantiatedTypeNoArgumentsTest"); 366 __ Comment("InstantiatedTypeNoArgumentsTest");
367 ASSERT(type.IsInstantiated()); 367 ASSERT(type.IsInstantiated());
368 if (type.IsFunctionType()) { 368 if (type.IsFunctionType()) {
369 // Fallthrough. 369 // Fallthrough.
370 return true; 370 return true;
371 } 371 }
372 const Class& type_class = Class::Handle(zone(), type.type_class()); 372 const Class& type_class = Class::Handle(zone(), type.type_class());
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
422 422
423 423
424 // Uses SubtypeTestCache to store instance class and result. 424 // Uses SubtypeTestCache to store instance class and result.
425 // EAX: instance to test. 425 // EAX: instance to test.
426 // Clobbers EDI, ECX. 426 // Clobbers EDI, ECX.
427 // Immediate class test already done. 427 // Immediate class test already done.
428 // TODO(srdjan): Implement a quicker subtype check, as type test 428 // TODO(srdjan): Implement a quicker subtype check, as type test
429 // arrays can grow too high, but they may be useful when optimizing 429 // arrays can grow too high, but they may be useful when optimizing
430 // code (type-feedback). 430 // code (type-feedback).
431 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( 431 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
432 intptr_t token_pos, 432 TokenDescriptor token_pos,
433 const Class& type_class, 433 const Class& type_class,
434 Label* is_instance_lbl, 434 Label* is_instance_lbl,
435 Label* is_not_instance_lbl) { 435 Label* is_not_instance_lbl) {
436 __ Comment("Subtype1TestCacheLookup"); 436 __ Comment("Subtype1TestCacheLookup");
437 const Register kInstanceReg = EAX; 437 const Register kInstanceReg = EAX;
438 __ LoadClass(ECX, kInstanceReg, EDI); 438 __ LoadClass(ECX, kInstanceReg, EDI);
439 // ECX: instance class. 439 // ECX: instance class.
440 // Check immediate superclass equality. 440 // Check immediate superclass equality.
441 __ movl(EDI, FieldAddress(ECX, Class::super_type_offset())); 441 __ movl(EDI, FieldAddress(ECX, Class::super_type_offset()));
442 __ movl(EDI, FieldAddress(EDI, Type::type_class_offset())); 442 __ movl(EDI, FieldAddress(EDI, Type::type_class_offset()));
443 __ CompareObject(EDI, type_class); 443 __ CompareObject(EDI, type_class);
444 __ j(EQUAL, is_instance_lbl); 444 __ j(EQUAL, is_instance_lbl);
445 445
446 const Register kTypeArgumentsReg = kNoRegister; 446 const Register kTypeArgumentsReg = kNoRegister;
447 const Register kTempReg = EDI; 447 const Register kTempReg = EDI;
448 return GenerateCallSubtypeTestStub(kTestTypeOneArg, 448 return GenerateCallSubtypeTestStub(kTestTypeOneArg,
449 kInstanceReg, 449 kInstanceReg,
450 kTypeArgumentsReg, 450 kTypeArgumentsReg,
451 kTempReg, 451 kTempReg,
452 is_instance_lbl, 452 is_instance_lbl,
453 is_not_instance_lbl); 453 is_not_instance_lbl);
454 } 454 }
455 455
456 456
457 // Generates inlined check if 'type' is a type parameter or type itself 457 // Generates inlined check if 'type' is a type parameter or type itself
458 // EAX: instance (preserved). 458 // EAX: instance (preserved).
459 // Clobbers EDX, EDI, ECX. 459 // Clobbers EDX, EDI, ECX.
460 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( 460 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
461 intptr_t token_pos, 461 TokenDescriptor token_pos,
462 const AbstractType& type, 462 const AbstractType& type,
463 Label* is_instance_lbl, 463 Label* is_instance_lbl,
464 Label* is_not_instance_lbl) { 464 Label* is_not_instance_lbl) {
465 __ Comment("UninstantiatedTypeTest"); 465 __ Comment("UninstantiatedTypeTest");
466 ASSERT(!type.IsInstantiated()); 466 ASSERT(!type.IsInstantiated());
467 // Skip check if destination is a dynamic type. 467 // Skip check if destination is a dynamic type.
468 const Immediate& raw_null = 468 const Immediate& raw_null =
469 Immediate(reinterpret_cast<intptr_t>(Object::null())); 469 Immediate(reinterpret_cast<intptr_t>(Object::null()));
470 if (type.IsTypeParameter()) { 470 if (type.IsTypeParameter()) {
471 const TypeParameter& type_param = TypeParameter::Cast(type); 471 const TypeParameter& type_param = TypeParameter::Cast(type);
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
536 // Inputs: 536 // Inputs:
537 // - EAX: instance to test against (preserved). 537 // - EAX: instance to test against (preserved).
538 // - EDX: optional instantiator type arguments (preserved). 538 // - EDX: optional instantiator type arguments (preserved).
539 // Clobbers ECX, EDI. 539 // Clobbers ECX, EDI.
540 // Returns: 540 // Returns:
541 // - preserved instance in EAX and optional instantiator type arguments in EDX. 541 // - preserved instance in EAX and optional instantiator type arguments in EDX.
542 // Note that this inlined code must be followed by the runtime_call code, as it 542 // Note that this inlined code must be followed by the runtime_call code, as it
543 // may fall through to it. Otherwise, this inline code will jump to the label 543 // may fall through to it. Otherwise, this inline code will jump to the label
544 // is_instance or to the label is_not_instance. 544 // is_instance or to the label is_not_instance.
545 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof( 545 RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof(
546 intptr_t token_pos, 546 TokenDescriptor token_pos,
547 const AbstractType& type, 547 const AbstractType& type,
548 Label* is_instance_lbl, 548 Label* is_instance_lbl,
549 Label* is_not_instance_lbl) { 549 Label* is_not_instance_lbl) {
550 __ Comment("InlineInstanceof"); 550 __ Comment("InlineInstanceof");
551 if (type.IsVoidType()) { 551 if (type.IsVoidType()) {
552 // A non-null value is returned from a void function, which will result in a 552 // A non-null value is returned from a void function, which will result in a
553 // type error. A null value is handled prior to executing this inline code. 553 // type error. A null value is handled prior to executing this inline code.
554 return SubtypeTestCache::null(); 554 return SubtypeTestCache::null();
555 } 555 }
556 if (type.IsInstantiated()) { 556 if (type.IsInstantiated()) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 // therefore eliminated, optimize it by adding inlined tests for: 590 // therefore eliminated, optimize it by adding inlined tests for:
591 // - NULL -> return false. 591 // - NULL -> return false.
592 // - Smi -> compile time subtype check (only if dst class is not parameterized). 592 // - Smi -> compile time subtype check (only if dst class is not parameterized).
593 // - Class equality (only if class is not parameterized). 593 // - Class equality (only if class is not parameterized).
594 // Inputs: 594 // Inputs:
595 // - EAX: object. 595 // - EAX: object.
596 // - EDX: instantiator type arguments or raw_null. 596 // - EDX: instantiator type arguments or raw_null.
597 // Clobbers EDX. 597 // Clobbers EDX.
598 // Returns: 598 // Returns:
599 // - true or false in EAX. 599 // - true or false in EAX.
600 void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos, 600 void FlowGraphCompiler::GenerateInstanceOf(TokenDescriptor token_pos,
601 intptr_t deopt_id, 601 intptr_t deopt_id,
602 const AbstractType& type, 602 const AbstractType& type,
603 bool negate_result, 603 bool negate_result,
604 LocationSummary* locs) { 604 LocationSummary* locs) {
605 ASSERT(type.IsFinalized() && !type.IsMalformedOrMalbounded()); 605 ASSERT(type.IsFinalized() && !type.IsMalformedOrMalbounded());
606 606
607 const Immediate& raw_null = 607 const Immediate& raw_null =
608 Immediate(reinterpret_cast<intptr_t>(Object::null())); 608 Immediate(reinterpret_cast<intptr_t>(Object::null()));
609 Label is_instance, is_not_instance; 609 Label is_instance, is_not_instance;
610 __ pushl(EDX); // Store instantiator type arguments. 610 __ pushl(EDX); // Store instantiator type arguments.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
672 // - NULL -> return NULL. 672 // - NULL -> return NULL.
673 // - Smi -> compile time subtype check (only if dst class is not parameterized). 673 // - Smi -> compile time subtype check (only if dst class is not parameterized).
674 // - Class equality (only if class is not parameterized). 674 // - Class equality (only if class is not parameterized).
675 // Inputs: 675 // Inputs:
676 // - EAX: object. 676 // - EAX: object.
677 // - EDX: instantiator type arguments or raw_null. 677 // - EDX: instantiator type arguments or raw_null.
678 // Returns: 678 // Returns:
679 // - object in EAX for successful assignable check (or throws TypeError). 679 // - object in EAX for successful assignable check (or throws TypeError).
680 // Performance notes: positive checks must be quick, negative checks can be slow 680 // Performance notes: positive checks must be quick, negative checks can be slow
681 // as they throw an exception. 681 // as they throw an exception.
682 void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos, 682 void FlowGraphCompiler::GenerateAssertAssignable(TokenDescriptor token_pos,
683 intptr_t deopt_id, 683 intptr_t deopt_id,
684 const AbstractType& dst_type, 684 const AbstractType& dst_type,
685 const String& dst_name, 685 const String& dst_name,
686 LocationSummary* locs) { 686 LocationSummary* locs) {
687 ASSERT(!Token::IsClassifying(token_pos)); 687 ASSERT(!TokenDescriptor(token_pos).IsClassifying());
688 ASSERT(!dst_type.IsNull()); 688 ASSERT(!dst_type.IsNull());
689 ASSERT(dst_type.IsFinalized()); 689 ASSERT(dst_type.IsFinalized());
690 // Assignable check is skipped in FlowGraphBuilder, not here. 690 // Assignable check is skipped in FlowGraphBuilder, not here.
691 ASSERT(dst_type.IsMalformedOrMalbounded() || 691 ASSERT(dst_type.IsMalformedOrMalbounded() ||
692 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 692 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
693 __ pushl(EDX); // Store instantiator type arguments. 693 __ pushl(EDX); // Store instantiator type arguments.
694 // A null object is always assignable and is returned as result. 694 // A null object is always assignable and is returned as result.
695 const Immediate& raw_null = 695 const Immediate& raw_null =
696 Immediate(reinterpret_cast<intptr_t>(Object::null())); 696 Immediate(reinterpret_cast<intptr_t>(Object::null()));
697 Label is_assignable, runtime_call; 697 Label is_assignable, runtime_call;
(...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after
1140 if (is_optimizing() && Compiler::allow_recompilation()) { 1140 if (is_optimizing() && Compiler::allow_recompilation()) {
1141 // Leave enough space for patching in case of lazy deoptimization from 1141 // Leave enough space for patching in case of lazy deoptimization from
1142 // deferred code. 1142 // deferred code.
1143 __ nop(CallPattern::pattern_length_in_bytes()); 1143 __ nop(CallPattern::pattern_length_in_bytes());
1144 lazy_deopt_pc_offset_ = assembler()->CodeSize(); 1144 lazy_deopt_pc_offset_ = assembler()->CodeSize();
1145 __ Jmp(*StubCode::DeoptimizeLazy_entry()); 1145 __ Jmp(*StubCode::DeoptimizeLazy_entry());
1146 } 1146 }
1147 } 1147 }
1148 1148
1149 1149
1150 void FlowGraphCompiler::GenerateCall(intptr_t token_pos, 1150 void FlowGraphCompiler::GenerateCall(TokenDescriptor token_pos,
1151 const StubEntry& stub_entry, 1151 const StubEntry& stub_entry,
1152 RawPcDescriptors::Kind kind, 1152 RawPcDescriptors::Kind kind,
1153 LocationSummary* locs) { 1153 LocationSummary* locs) {
1154 __ Call(stub_entry); 1154 __ Call(stub_entry);
1155 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos); 1155 AddCurrentDescriptor(kind, Thread::kNoDeoptId, token_pos);
1156 RecordSafepoint(locs); 1156 RecordSafepoint(locs);
1157 } 1157 }
1158 1158
1159 1159
1160 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, 1160 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
1161 intptr_t token_pos, 1161 TokenDescriptor token_pos,
1162 const StubEntry& stub_entry, 1162 const StubEntry& stub_entry,
1163 RawPcDescriptors::Kind kind, 1163 RawPcDescriptors::Kind kind,
1164 LocationSummary* locs) { 1164 LocationSummary* locs) {
1165 __ Call(stub_entry); 1165 __ Call(stub_entry);
1166 AddCurrentDescriptor(kind, deopt_id, token_pos); 1166 AddCurrentDescriptor(kind, deopt_id, token_pos);
1167 RecordSafepoint(locs); 1167 RecordSafepoint(locs);
1168 // Marks either the continuation point in unoptimized code or the 1168 // Marks either the continuation point in unoptimized code or the
1169 // deoptimization point in optimized code, after call. 1169 // deoptimization point in optimized code, after call.
1170 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1170 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1171 if (is_optimizing()) { 1171 if (is_optimizing()) {
1172 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1172 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1173 } else { 1173 } else {
1174 // Add deoptimization continuation point after the call and before the 1174 // Add deoptimization continuation point after the call and before the
1175 // arguments are removed. 1175 // arguments are removed.
1176 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1176 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1177 } 1177 }
1178 } 1178 }
1179 1179
1180 1180
1181 void FlowGraphCompiler::GenerateRuntimeCall(intptr_t token_pos, 1181 void FlowGraphCompiler::GenerateRuntimeCall(TokenDescriptor token_pos,
1182 intptr_t deopt_id, 1182 intptr_t deopt_id,
1183 const RuntimeEntry& entry, 1183 const RuntimeEntry& entry,
1184 intptr_t argument_count, 1184 intptr_t argument_count,
1185 LocationSummary* locs) { 1185 LocationSummary* locs) {
1186 __ CallRuntime(entry, argument_count); 1186 __ CallRuntime(entry, argument_count);
1187 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); 1187 AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos);
1188 RecordSafepoint(locs); 1188 RecordSafepoint(locs);
1189 if (deopt_id != Thread::kNoDeoptId) { 1189 if (deopt_id != Thread::kNoDeoptId) {
1190 // Marks either the continuation point in unoptimized code or the 1190 // Marks either the continuation point in unoptimized code or the
1191 // deoptimization point in optimized code, after call. 1191 // deoptimization point in optimized code, after call.
1192 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); 1192 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id);
1193 if (is_optimizing()) { 1193 if (is_optimizing()) {
1194 AddDeoptIndexAtCall(deopt_id_after, token_pos); 1194 AddDeoptIndexAtCall(deopt_id_after, token_pos);
1195 } else { 1195 } else {
1196 // Add deoptimization continuation point after the call and before the 1196 // Add deoptimization continuation point after the call and before the
1197 // arguments are removed. 1197 // arguments are removed.
1198 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1198 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1199 } 1199 }
1200 } 1200 }
1201 } 1201 }
1202 1202
1203 1203
1204 void FlowGraphCompiler::EmitUnoptimizedStaticCall( 1204 void FlowGraphCompiler::EmitUnoptimizedStaticCall(
1205 intptr_t argument_count, 1205 intptr_t argument_count,
1206 intptr_t deopt_id, 1206 intptr_t deopt_id,
1207 intptr_t token_pos, 1207 TokenDescriptor token_pos,
1208 LocationSummary* locs, 1208 LocationSummary* locs,
1209 const ICData& ic_data) { 1209 const ICData& ic_data) {
1210 const StubEntry& stub_entry = 1210 const StubEntry& stub_entry =
1211 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); 1211 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
1212 __ LoadObject(ECX, ic_data); 1212 __ LoadObject(ECX, ic_data);
1213 GenerateDartCall(deopt_id, 1213 GenerateDartCall(deopt_id,
1214 token_pos, 1214 token_pos,
1215 stub_entry, 1215 stub_entry,
1216 RawPcDescriptors::kUnoptStaticCall, 1216 RawPcDescriptors::kUnoptStaticCall,
1217 locs); 1217 locs);
(...skipping 12 matching lines...) Expand all
1230 __ LoadObject(EAX, edge_counters_array_); 1230 __ LoadObject(EAX, edge_counters_array_);
1231 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1); 1231 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1);
1232 } 1232 }
1233 1233
1234 1234
1235 void FlowGraphCompiler::EmitOptimizedInstanceCall( 1235 void FlowGraphCompiler::EmitOptimizedInstanceCall(
1236 const StubEntry& stub_entry, 1236 const StubEntry& stub_entry,
1237 const ICData& ic_data, 1237 const ICData& ic_data,
1238 intptr_t argument_count, 1238 intptr_t argument_count,
1239 intptr_t deopt_id, 1239 intptr_t deopt_id,
1240 intptr_t token_pos, 1240 TokenDescriptor token_pos,
1241 LocationSummary* locs) { 1241 LocationSummary* locs) {
1242 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); 1242 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0);
1243 // Each ICData propagated from unoptimized to optimized code contains the 1243 // Each ICData propagated from unoptimized to optimized code contains the
1244 // function that corresponds to the Dart function of that IC call. Due 1244 // function that corresponds to the Dart function of that IC call. Due
1245 // to inlining in optimized code, that function may not correspond to the 1245 // to inlining in optimized code, that function may not correspond to the
1246 // top-level function (parsed_function().function()) which could be 1246 // top-level function (parsed_function().function()) which could be
1247 // reoptimized and which counter needs to be incremented. 1247 // reoptimized and which counter needs to be incremented.
1248 // Pass the function explicitly, it is used in IC stub. 1248 // Pass the function explicitly, it is used in IC stub.
1249 __ LoadObject(EBX, parsed_function().function()); 1249 __ LoadObject(EBX, parsed_function().function());
1250 __ LoadObject(ECX, ic_data); 1250 __ LoadObject(ECX, ic_data);
1251 GenerateDartCall(deopt_id, 1251 GenerateDartCall(deopt_id,
1252 token_pos, 1252 token_pos,
1253 stub_entry, 1253 stub_entry,
1254 RawPcDescriptors::kIcCall, 1254 RawPcDescriptors::kIcCall,
1255 locs); 1255 locs);
1256 __ Drop(argument_count); 1256 __ Drop(argument_count);
1257 } 1257 }
1258 1258
1259 1259
1260 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, 1260 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
1261 const ICData& ic_data, 1261 const ICData& ic_data,
1262 intptr_t argument_count, 1262 intptr_t argument_count,
1263 intptr_t deopt_id, 1263 intptr_t deopt_id,
1264 intptr_t token_pos, 1264 TokenDescriptor token_pos,
1265 LocationSummary* locs) { 1265 LocationSummary* locs) {
1266 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); 1266 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0);
1267 __ LoadObject(ECX, ic_data); 1267 __ LoadObject(ECX, ic_data);
1268 GenerateDartCall(deopt_id, 1268 GenerateDartCall(deopt_id,
1269 token_pos, 1269 token_pos,
1270 stub_entry, 1270 stub_entry,
1271 RawPcDescriptors::kIcCall, 1271 RawPcDescriptors::kIcCall,
1272 locs); 1272 locs);
1273 __ Drop(argument_count); 1273 __ Drop(argument_count);
1274 } 1274 }
1275 1275
1276 1276
1277 void FlowGraphCompiler::EmitMegamorphicInstanceCall( 1277 void FlowGraphCompiler::EmitMegamorphicInstanceCall(
1278 const ICData& ic_data, 1278 const ICData& ic_data,
1279 intptr_t argument_count, 1279 intptr_t argument_count,
1280 intptr_t deopt_id, 1280 intptr_t deopt_id,
1281 intptr_t token_pos, 1281 TokenDescriptor token_pos,
1282 LocationSummary* locs, 1282 LocationSummary* locs,
1283 intptr_t try_index) { 1283 intptr_t try_index) {
1284 const String& name = String::Handle(zone(), ic_data.target_name()); 1284 const String& name = String::Handle(zone(), ic_data.target_name());
1285 const Array& arguments_descriptor = 1285 const Array& arguments_descriptor =
1286 Array::ZoneHandle(zone(), ic_data.arguments_descriptor()); 1286 Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
1287 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); 1287 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
1288 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(), 1288 const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(zone(),
1289 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor)); 1289 MegamorphicCacheTable::Lookup(isolate(), name, arguments_descriptor));
1290 1290
1291 __ Comment("MegamorphicCall"); 1291 __ Comment("MegamorphicCall");
(...skipping 20 matching lines...) Expand all
1312 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); 1312 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos);
1313 } 1313 }
1314 __ Drop(argument_count); 1314 __ Drop(argument_count);
1315 } 1315 }
1316 1316
1317 1317
1318 void FlowGraphCompiler::EmitSwitchableInstanceCall( 1318 void FlowGraphCompiler::EmitSwitchableInstanceCall(
1319 const ICData& ic_data, 1319 const ICData& ic_data,
1320 intptr_t argument_count, 1320 intptr_t argument_count,
1321 intptr_t deopt_id, 1321 intptr_t deopt_id,
1322 intptr_t token_pos, 1322 TokenDescriptor token_pos,
1323 LocationSummary* locs) { 1323 LocationSummary* locs) {
1324 // Only generated with precompilation. 1324 // Only generated with precompilation.
1325 UNREACHABLE(); 1325 UNREACHABLE();
1326 } 1326 }
1327 1327
1328 1328
1329 void FlowGraphCompiler::EmitOptimizedStaticCall( 1329 void FlowGraphCompiler::EmitOptimizedStaticCall(
1330 const Function& function, 1330 const Function& function,
1331 const Array& arguments_descriptor, 1331 const Array& arguments_descriptor,
1332 intptr_t argument_count, 1332 intptr_t argument_count,
1333 intptr_t deopt_id, 1333 intptr_t deopt_id,
1334 intptr_t token_pos, 1334 TokenDescriptor token_pos,
1335 LocationSummary* locs) { 1335 LocationSummary* locs) {
1336 __ LoadObject(EDX, arguments_descriptor); 1336 __ LoadObject(EDX, arguments_descriptor);
1337 // Do not use the code from the function, but let the code be patched so that 1337 // Do not use the code from the function, but let the code be patched so that
1338 // we can record the outgoing edges to other code. 1338 // we can record the outgoing edges to other code.
1339 GenerateDartCall(deopt_id, 1339 GenerateDartCall(deopt_id,
1340 token_pos, 1340 token_pos,
1341 *StubCode::CallStaticFunction_entry(), 1341 *StubCode::CallStaticFunction_entry(),
1342 RawPcDescriptors::kOther, 1342 RawPcDescriptors::kOther,
1343 locs); 1343 locs);
1344 AddStaticCallTarget(function); 1344 AddStaticCallTarget(function);
1345 __ Drop(argument_count); 1345 __ Drop(argument_count);
1346 } 1346 }
1347 1347
1348 1348
1349 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( 1349 Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
1350 Register reg, 1350 Register reg,
1351 const Object& obj, 1351 const Object& obj,
1352 bool needs_number_check, 1352 bool needs_number_check,
1353 intptr_t token_pos) { 1353 TokenDescriptor token_pos) {
1354 ASSERT(!needs_number_check || 1354 ASSERT(!needs_number_check ||
1355 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); 1355 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint()));
1356 1356
1357 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { 1357 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) {
1358 ASSERT(!needs_number_check); 1358 ASSERT(!needs_number_check);
1359 __ testl(reg, reg); 1359 __ testl(reg, reg);
1360 return EQUAL; 1360 return EQUAL;
1361 } 1361 }
1362 1362
1363 if (needs_number_check) { 1363 if (needs_number_check) {
1364 __ pushl(reg); 1364 __ pushl(reg);
1365 __ PushObject(obj); 1365 __ PushObject(obj);
1366 if (is_optimizing()) { 1366 if (is_optimizing()) {
1367 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1367 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1368 } else { 1368 } else {
1369 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1369 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1370 } 1370 }
1371 if (token_pos >= 0) { 1371 if (token_pos.IsReal()) {
1372 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1372 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1373 Thread::kNoDeoptId, 1373 Thread::kNoDeoptId,
1374 token_pos); 1374 token_pos);
1375 } 1375 }
1376 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1376 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1377 __ popl(reg); // Discard constant. 1377 __ popl(reg); // Discard constant.
1378 __ popl(reg); // Restore 'reg'. 1378 __ popl(reg); // Restore 'reg'.
1379 } else { 1379 } else {
1380 __ CompareObject(reg, obj); 1380 __ CompareObject(reg, obj);
1381 } 1381 }
1382 return EQUAL; 1382 return EQUAL;
1383 } 1383 }
1384 1384
1385 1385
1386 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, 1386 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(
1387 Register right, 1387 Register left,
1388 bool needs_number_check, 1388 Register right,
1389 intptr_t token_pos) { 1389 bool needs_number_check,
1390 TokenDescriptor token_pos) {
1390 if (needs_number_check) { 1391 if (needs_number_check) {
1391 __ pushl(left); 1392 __ pushl(left);
1392 __ pushl(right); 1393 __ pushl(right);
1393 if (is_optimizing()) { 1394 if (is_optimizing()) {
1394 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); 1395 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry());
1395 } else { 1396 } else {
1396 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); 1397 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
1397 } 1398 }
1398 if (token_pos >= 0) { 1399 if (token_pos.IsReal()) {
1399 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 1400 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
1400 Thread::kNoDeoptId, 1401 Thread::kNoDeoptId,
1401 token_pos); 1402 token_pos);
1402 } 1403 }
1403 // Stub returns result in flags (result of a cmpl, we need ZF computed). 1404 // Stub returns result in flags (result of a cmpl, we need ZF computed).
1404 __ popl(right); 1405 __ popl(right);
1405 __ popl(left); 1406 __ popl(left);
1406 } else { 1407 } else {
1407 __ cmpl(left, right); 1408 __ cmpl(left, right);
1408 } 1409 }
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
1485 } 1486 }
1486 #endif 1487 #endif
1487 1488
1488 1489
1489 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data, 1490 void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
1490 intptr_t argument_count, 1491 intptr_t argument_count,
1491 const Array& argument_names, 1492 const Array& argument_names,
1492 Label* failed, 1493 Label* failed,
1493 Label* match_found, 1494 Label* match_found,
1494 intptr_t deopt_id, 1495 intptr_t deopt_id,
1495 intptr_t token_index, 1496 TokenDescriptor token_index,
1496 LocationSummary* locs) { 1497 LocationSummary* locs) {
1497 ASSERT(is_optimizing()); 1498 ASSERT(is_optimizing());
1498 __ Comment("EmitTestAndCall"); 1499 __ Comment("EmitTestAndCall");
1499 const Array& arguments_descriptor = 1500 const Array& arguments_descriptor =
1500 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count, 1501 Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count,
1501 argument_names)); 1502 argument_names));
1502 // Load receiver into EAX. 1503 // Load receiver into EAX.
1503 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize)); 1504 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize));
1504 __ LoadObject(EDX, arguments_descriptor); 1505 __ LoadObject(EDX, arguments_descriptor);
1505 1506
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after
1838 __ movups(reg, Address(ESP, 0)); 1839 __ movups(reg, Address(ESP, 0));
1839 __ addl(ESP, Immediate(kFpuRegisterSize)); 1840 __ addl(ESP, Immediate(kFpuRegisterSize));
1840 } 1841 }
1841 1842
1842 1843
1843 #undef __ 1844 #undef __
1844 1845
1845 } // namespace dart 1846 } // namespace dart
1846 1847
1847 #endif // defined TARGET_ARCH_IA32 1848 #endif // defined TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698