OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 12 matching lines...) Expand all Loading... |
23 #include "vm/stub_code.h" | 23 #include "vm/stub_code.h" |
24 #include "vm/symbols.h" | 24 #include "vm/symbols.h" |
25 | 25 |
26 namespace dart { | 26 namespace dart { |
27 | 27 |
28 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); | 28 DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization."); |
29 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); | 29 DEFINE_FLAG(bool, unbox_mints, true, "Optimize 64-bit integer arithmetic."); |
30 | 30 |
31 DECLARE_FLAG(bool, enable_simd_inline); | 31 DECLARE_FLAG(bool, enable_simd_inline); |
32 | 32 |
33 | |
34 FlowGraphCompiler::~FlowGraphCompiler() { | 33 FlowGraphCompiler::~FlowGraphCompiler() { |
35 // BlockInfos are zone-allocated, so their destructors are not called. | 34 // BlockInfos are zone-allocated, so their destructors are not called. |
36 // Verify the labels explicitly here. | 35 // Verify the labels explicitly here. |
37 for (int i = 0; i < block_info_.length(); ++i) { | 36 for (int i = 0; i < block_info_.length(); ++i) { |
38 ASSERT(!block_info_[i]->jump_label()->IsLinked()); | 37 ASSERT(!block_info_[i]->jump_label()->IsLinked()); |
39 ASSERT(!block_info_[i]->jump_label()->HasNear()); | 38 ASSERT(!block_info_[i]->jump_label()->HasNear()); |
40 } | 39 } |
41 } | 40 } |
42 | 41 |
43 | |
44 bool FlowGraphCompiler::SupportsUnboxedDoubles() { | 42 bool FlowGraphCompiler::SupportsUnboxedDoubles() { |
45 return true; | 43 return true; |
46 } | 44 } |
47 | 45 |
48 | |
49 bool FlowGraphCompiler::SupportsUnboxedMints() { | 46 bool FlowGraphCompiler::SupportsUnboxedMints() { |
50 return FLAG_unbox_mints; | 47 return FLAG_unbox_mints; |
51 } | 48 } |
52 | 49 |
53 | |
54 bool FlowGraphCompiler::SupportsUnboxedSimd128() { | 50 bool FlowGraphCompiler::SupportsUnboxedSimd128() { |
55 return FLAG_enable_simd_inline; | 51 return FLAG_enable_simd_inline; |
56 } | 52 } |
57 | 53 |
58 | |
59 bool FlowGraphCompiler::SupportsHardwareDivision() { | 54 bool FlowGraphCompiler::SupportsHardwareDivision() { |
60 return true; | 55 return true; |
61 } | 56 } |
62 | 57 |
63 | |
64 bool FlowGraphCompiler::CanConvertUnboxedMintToDouble() { | 58 bool FlowGraphCompiler::CanConvertUnboxedMintToDouble() { |
65 return true; | 59 return true; |
66 } | 60 } |
67 | 61 |
68 | |
69 void FlowGraphCompiler::EnterIntrinsicMode() { | 62 void FlowGraphCompiler::EnterIntrinsicMode() { |
70 ASSERT(!intrinsic_mode()); | 63 ASSERT(!intrinsic_mode()); |
71 intrinsic_mode_ = true; | 64 intrinsic_mode_ = true; |
72 } | 65 } |
73 | 66 |
74 | |
75 void FlowGraphCompiler::ExitIntrinsicMode() { | 67 void FlowGraphCompiler::ExitIntrinsicMode() { |
76 ASSERT(intrinsic_mode()); | 68 ASSERT(intrinsic_mode()); |
77 intrinsic_mode_ = false; | 69 intrinsic_mode_ = false; |
78 } | 70 } |
79 | 71 |
80 | |
81 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, | 72 RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, |
82 DeoptInfoBuilder* builder, | 73 DeoptInfoBuilder* builder, |
83 const Array& deopt_table) { | 74 const Array& deopt_table) { |
84 if (deopt_env_ == NULL) { | 75 if (deopt_env_ == NULL) { |
85 ++builder->current_info_number_; | 76 ++builder->current_info_number_; |
86 return TypedData::null(); | 77 return TypedData::null(); |
87 } | 78 } |
88 | 79 |
89 intptr_t stack_height = compiler->StackSize(); | 80 intptr_t stack_height = compiler->StackSize(); |
90 AllocateIncomingParametersRecursive(deopt_env_, &stack_height); | 81 AllocateIncomingParametersRecursive(deopt_env_, &stack_height); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
155 builder->AddCallerPc(slot_ix++); | 146 builder->AddCallerPc(slot_ix++); |
156 | 147 |
157 // For the outermost environment, set the incoming arguments. | 148 // For the outermost environment, set the incoming arguments. |
158 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { | 149 for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
159 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); | 150 builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); |
160 } | 151 } |
161 | 152 |
162 return builder->CreateDeoptInfo(deopt_table); | 153 return builder->CreateDeoptInfo(deopt_table); |
163 } | 154 } |
164 | 155 |
165 | |
166 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, | 156 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler, |
167 intptr_t stub_ix) { | 157 intptr_t stub_ix) { |
168 // Calls do not need stubs, they share a deoptimization trampoline. | 158 // Calls do not need stubs, they share a deoptimization trampoline. |
169 ASSERT(reason() != ICData::kDeoptAtCall); | 159 ASSERT(reason() != ICData::kDeoptAtCall); |
170 Assembler* assembler = compiler->assembler(); | 160 Assembler* assembler = compiler->assembler(); |
171 #define __ assembler-> | 161 #define __ assembler-> |
172 __ Comment("%s", Name()); | 162 __ Comment("%s", Name()); |
173 __ Bind(entry_label()); | 163 __ Bind(entry_label()); |
174 if (FLAG_trap_on_deoptimization) { | 164 if (FLAG_trap_on_deoptimization) { |
175 __ int3(); | 165 __ int3(); |
176 } | 166 } |
177 | 167 |
178 ASSERT(deopt_env() != NULL); | 168 ASSERT(deopt_env() != NULL); |
179 __ pushl(CODE_REG); | 169 __ pushl(CODE_REG); |
180 __ Call(*StubCode::Deoptimize_entry()); | 170 __ Call(*StubCode::Deoptimize_entry()); |
181 set_pc_offset(assembler->CodeSize()); | 171 set_pc_offset(assembler->CodeSize()); |
182 __ int3(); | 172 __ int3(); |
183 #undef __ | 173 #undef __ |
184 } | 174 } |
185 | 175 |
186 | |
187 #define __ assembler()-> | 176 #define __ assembler()-> |
188 | 177 |
189 | |
190 // Fall through if bool_register contains null. | 178 // Fall through if bool_register contains null. |
191 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, | 179 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, |
192 Label* is_true, | 180 Label* is_true, |
193 Label* is_false) { | 181 Label* is_false) { |
194 const Immediate& raw_null = | 182 const Immediate& raw_null = |
195 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 183 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
196 Label fall_through; | 184 Label fall_through; |
197 __ cmpl(bool_register, raw_null); | 185 __ cmpl(bool_register, raw_null); |
198 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 186 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
199 __ CompareObject(bool_register, Bool::True()); | 187 __ CompareObject(bool_register, Bool::True()); |
200 __ j(EQUAL, is_true); | 188 __ j(EQUAL, is_true); |
201 __ jmp(is_false); | 189 __ jmp(is_false); |
202 __ Bind(&fall_through); | 190 __ Bind(&fall_through); |
203 } | 191 } |
204 | 192 |
205 | |
206 // Clobbers ECX. | 193 // Clobbers ECX. |
207 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( | 194 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( |
208 TypeTestStubKind test_kind, | 195 TypeTestStubKind test_kind, |
209 Register instance_reg, | 196 Register instance_reg, |
210 Register instantiator_type_arguments_reg, | 197 Register instantiator_type_arguments_reg, |
211 Register function_type_arguments_reg, | 198 Register function_type_arguments_reg, |
212 Register temp_reg, | 199 Register temp_reg, |
213 Label* is_instance_lbl, | 200 Label* is_instance_lbl, |
214 Label* is_not_instance_lbl) { | 201 Label* is_not_instance_lbl) { |
215 const SubtypeTestCache& type_test_cache = | 202 const SubtypeTestCache& type_test_cache = |
(...skipping 25 matching lines...) Expand all Loading... |
241 // Result is in ECX: null -> not found, otherwise Bool::True or Bool::False. | 228 // Result is in ECX: null -> not found, otherwise Bool::True or Bool::False. |
242 ASSERT(instance_reg != ECX); | 229 ASSERT(instance_reg != ECX); |
243 ASSERT(temp_reg != ECX); | 230 ASSERT(temp_reg != ECX); |
244 __ Drop(2); | 231 __ Drop(2); |
245 __ popl(instance_reg); // Restore receiver. | 232 __ popl(instance_reg); // Restore receiver. |
246 __ popl(temp_reg); // Discard. | 233 __ popl(temp_reg); // Discard. |
247 GenerateBoolToJump(ECX, is_instance_lbl, is_not_instance_lbl); | 234 GenerateBoolToJump(ECX, is_instance_lbl, is_not_instance_lbl); |
248 return type_test_cache.raw(); | 235 return type_test_cache.raw(); |
249 } | 236 } |
250 | 237 |
251 | |
252 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if | 238 // Jumps to labels 'is_instance' or 'is_not_instance' respectively, if |
253 // type test is conclusive, otherwise fallthrough if a type test could not | 239 // type test is conclusive, otherwise fallthrough if a type test could not |
254 // be completed. | 240 // be completed. |
255 // EAX: instance (must survive). | 241 // EAX: instance (must survive). |
256 // Clobbers ECX, EDI. | 242 // Clobbers ECX, EDI. |
257 RawSubtypeTestCache* | 243 RawSubtypeTestCache* |
258 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( | 244 FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest( |
259 TokenPosition token_pos, | 245 TokenPosition token_pos, |
260 const AbstractType& type, | 246 const AbstractType& type, |
261 Label* is_instance_lbl, | 247 Label* is_instance_lbl, |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
319 // Regular subtype test cache involving instance's type arguments. | 305 // Regular subtype test cache involving instance's type arguments. |
320 const Register kInstantiatorTypeArgumentsReg = kNoRegister; | 306 const Register kInstantiatorTypeArgumentsReg = kNoRegister; |
321 const Register kFunctionTypeArgumentsReg = kNoRegister; | 307 const Register kFunctionTypeArgumentsReg = kNoRegister; |
322 const Register kTempReg = EDI; | 308 const Register kTempReg = EDI; |
323 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg, | 309 return GenerateCallSubtypeTestStub(kTestTypeTwoArgs, kInstanceReg, |
324 kInstantiatorTypeArgumentsReg, | 310 kInstantiatorTypeArgumentsReg, |
325 kFunctionTypeArgumentsReg, kTempReg, | 311 kFunctionTypeArgumentsReg, kTempReg, |
326 is_instance_lbl, is_not_instance_lbl); | 312 is_instance_lbl, is_not_instance_lbl); |
327 } | 313 } |
328 | 314 |
329 | |
330 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, | 315 void FlowGraphCompiler::CheckClassIds(Register class_id_reg, |
331 const GrowableArray<intptr_t>& class_ids, | 316 const GrowableArray<intptr_t>& class_ids, |
332 Label* is_equal_lbl, | 317 Label* is_equal_lbl, |
333 Label* is_not_equal_lbl) { | 318 Label* is_not_equal_lbl) { |
334 for (intptr_t i = 0; i < class_ids.length(); i++) { | 319 for (intptr_t i = 0; i < class_ids.length(); i++) { |
335 __ cmpl(class_id_reg, Immediate(class_ids[i])); | 320 __ cmpl(class_id_reg, Immediate(class_ids[i])); |
336 __ j(EQUAL, is_equal_lbl); | 321 __ j(EQUAL, is_equal_lbl); |
337 } | 322 } |
338 __ jmp(is_not_equal_lbl); | 323 __ jmp(is_not_equal_lbl); |
339 } | 324 } |
340 | 325 |
341 | |
342 // Testing against an instantiated type with no arguments, without | 326 // Testing against an instantiated type with no arguments, without |
343 // SubtypeTestCache. | 327 // SubtypeTestCache. |
344 // EAX: instance to test against (preserved). | 328 // EAX: instance to test against (preserved). |
345 // Clobbers ECX, EDI. | 329 // Clobbers ECX, EDI. |
346 // Returns true if there is a fallthrough. | 330 // Returns true if there is a fallthrough. |
347 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( | 331 bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( |
348 TokenPosition token_pos, | 332 TokenPosition token_pos, |
349 const AbstractType& type, | 333 const AbstractType& type, |
350 Label* is_instance_lbl, | 334 Label* is_instance_lbl, |
351 Label* is_not_instance_lbl) { | 335 Label* is_not_instance_lbl) { |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
399 } | 383 } |
400 // Compare if the classes are equal. | 384 // Compare if the classes are equal. |
401 if (!type_class.is_abstract()) { | 385 if (!type_class.is_abstract()) { |
402 __ cmpl(kClassIdReg, Immediate(type_class.id())); | 386 __ cmpl(kClassIdReg, Immediate(type_class.id())); |
403 __ j(EQUAL, is_instance_lbl); | 387 __ j(EQUAL, is_instance_lbl); |
404 } | 388 } |
405 // Otherwise fallthrough. | 389 // Otherwise fallthrough. |
406 return true; | 390 return true; |
407 } | 391 } |
408 | 392 |
409 | |
410 // Uses SubtypeTestCache to store instance class and result. | 393 // Uses SubtypeTestCache to store instance class and result. |
411 // EAX: instance to test. | 394 // EAX: instance to test. |
412 // Clobbers EDI, ECX. | 395 // Clobbers EDI, ECX. |
413 // Immediate class test already done. | 396 // Immediate class test already done. |
414 // TODO(srdjan): Implement a quicker subtype check, as type test | 397 // TODO(srdjan): Implement a quicker subtype check, as type test |
415 // arrays can grow too high, but they may be useful when optimizing | 398 // arrays can grow too high, but they may be useful when optimizing |
416 // code (type-feedback). | 399 // code (type-feedback). |
417 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( | 400 RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup( |
418 TokenPosition token_pos, | 401 TokenPosition token_pos, |
419 const Class& type_class, | 402 const Class& type_class, |
(...skipping 11 matching lines...) Expand all Loading... |
431 | 414 |
432 const Register kInstantiatorTypeArgumentsReg = kNoRegister; | 415 const Register kInstantiatorTypeArgumentsReg = kNoRegister; |
433 const Register kFunctionTypeArgumentsReg = kNoRegister; | 416 const Register kFunctionTypeArgumentsReg = kNoRegister; |
434 const Register kTempReg = EDI; | 417 const Register kTempReg = EDI; |
435 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg, | 418 return GenerateCallSubtypeTestStub(kTestTypeOneArg, kInstanceReg, |
436 kInstantiatorTypeArgumentsReg, | 419 kInstantiatorTypeArgumentsReg, |
437 kFunctionTypeArgumentsReg, kTempReg, | 420 kFunctionTypeArgumentsReg, kTempReg, |
438 is_instance_lbl, is_not_instance_lbl); | 421 is_instance_lbl, is_not_instance_lbl); |
439 } | 422 } |
440 | 423 |
441 | |
442 // Generates inlined check if 'type' is a type parameter or type itself | 424 // Generates inlined check if 'type' is a type parameter or type itself |
443 // EAX: instance (preserved). | 425 // EAX: instance (preserved). |
444 // Clobbers EDX, EDI, ECX. | 426 // Clobbers EDX, EDI, ECX. |
445 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( | 427 RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest( |
446 TokenPosition token_pos, | 428 TokenPosition token_pos, |
447 const AbstractType& type, | 429 const AbstractType& type, |
448 Label* is_instance_lbl, | 430 Label* is_instance_lbl, |
449 Label* is_not_instance_lbl) { | 431 Label* is_not_instance_lbl) { |
450 __ Comment("UninstantiatedTypeTest"); | 432 __ Comment("UninstantiatedTypeTest"); |
451 ASSERT(!type.IsInstantiated()); | 433 ASSERT(!type.IsInstantiated()); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
513 // arguments are determined at runtime by the instantiator(s). | 495 // arguments are determined at runtime by the instantiator(s). |
514 const Register kTempReg = EDI; | 496 const Register kTempReg = EDI; |
515 return GenerateCallSubtypeTestStub(kTestTypeFourArgs, kInstanceReg, | 497 return GenerateCallSubtypeTestStub(kTestTypeFourArgs, kInstanceReg, |
516 kInstantiatorTypeArgumentsReg, | 498 kInstantiatorTypeArgumentsReg, |
517 kFunctionTypeArgumentsReg, kTempReg, | 499 kFunctionTypeArgumentsReg, kTempReg, |
518 is_instance_lbl, is_not_instance_lbl); | 500 is_instance_lbl, is_not_instance_lbl); |
519 } | 501 } |
520 return SubtypeTestCache::null(); | 502 return SubtypeTestCache::null(); |
521 } | 503 } |
522 | 504 |
523 | |
524 // Inputs: | 505 // Inputs: |
525 // - EAX: instance to test against (preserved). | 506 // - EAX: instance to test against (preserved). |
526 // - EDX: optional instantiator type arguments (preserved). | 507 // - EDX: optional instantiator type arguments (preserved). |
527 // - ECX: optional function type arguments (preserved). | 508 // - ECX: optional function type arguments (preserved). |
528 // Clobbers EDI. | 509 // Clobbers EDI. |
529 // Returns: | 510 // Returns: |
530 // - preserved instance in EAX, optional instantiator type arguments in EDX, and | 511 // - preserved instance in EAX, optional instantiator type arguments in EDX, and |
531 // optional function type arguments in RCX. | 512 // optional function type arguments in RCX. |
532 // Note that this inlined code must be followed by the runtime_call code, as it | 513 // Note that this inlined code must be followed by the runtime_call code, as it |
533 // may fall through to it. Otherwise, this inline code will jump to the label | 514 // may fall through to it. Otherwise, this inline code will jump to the label |
(...skipping 22 matching lines...) Expand all Loading... |
556 return GenerateSubtype1TestCacheLookup( | 537 return GenerateSubtype1TestCacheLookup( |
557 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); | 538 token_pos, type_class, is_instance_lbl, is_not_instance_lbl); |
558 } else { | 539 } else { |
559 return SubtypeTestCache::null(); | 540 return SubtypeTestCache::null(); |
560 } | 541 } |
561 } | 542 } |
562 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl, | 543 return GenerateUninstantiatedTypeTest(token_pos, type, is_instance_lbl, |
563 is_not_instance_lbl); | 544 is_not_instance_lbl); |
564 } | 545 } |
565 | 546 |
566 | |
567 // If instanceof type test cannot be performed successfully at compile time and | 547 // If instanceof type test cannot be performed successfully at compile time and |
568 // therefore eliminated, optimize it by adding inlined tests for: | 548 // therefore eliminated, optimize it by adding inlined tests for: |
569 // - NULL -> return type == Null (type is not Object or dynamic). | 549 // - NULL -> return type == Null (type is not Object or dynamic). |
570 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 550 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
571 // - Class equality (only if class is not parameterized). | 551 // - Class equality (only if class is not parameterized). |
572 // Inputs: | 552 // Inputs: |
573 // - EAX: object. | 553 // - EAX: object. |
574 // - EDX: instantiator type arguments or raw_null. | 554 // - EDX: instantiator type arguments or raw_null. |
575 // - ECX: function type arguments or raw_null. | 555 // - ECX: function type arguments or raw_null. |
576 // Returns: | 556 // Returns: |
(...skipping 29 matching lines...) Expand all Loading... |
606 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); | 586 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); |
607 test_cache = | 587 test_cache = |
608 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance); | 588 GenerateInlineInstanceof(token_pos, type, &is_instance, &is_not_instance); |
609 | 589 |
610 // test_cache is null if there is no fall-through. | 590 // test_cache is null if there is no fall-through. |
611 Label done; | 591 Label done; |
612 if (!test_cache.IsNull()) { | 592 if (!test_cache.IsNull()) { |
613 // Generate runtime call. | 593 // Generate runtime call. |
614 __ movl(EDX, Address(ESP, 1 * kWordSize)); // Get instantiator type args. | 594 __ movl(EDX, Address(ESP, 1 * kWordSize)); // Get instantiator type args. |
615 __ movl(ECX, Address(ESP, 0 * kWordSize)); // Get function type args. | 595 __ movl(ECX, Address(ESP, 0 * kWordSize)); // Get function type args. |
616 __ PushObject(Object::null_object()); // Make room for the result. | 596 __ PushObject(Object::null_object()); // Make room for the result. |
617 __ pushl(EAX); // Push the instance. | 597 __ pushl(EAX); // Push the instance. |
618 __ PushObject(type); // Push the type. | 598 __ PushObject(type); // Push the type. |
619 __ pushl(EDX); // Instantiator type arguments. | 599 __ pushl(EDX); // Instantiator type arguments. |
620 __ pushl(ECX); // Function type arguments. | 600 __ pushl(ECX); // Function type arguments. |
621 __ LoadObject(EAX, test_cache); | 601 __ LoadObject(EAX, test_cache); |
622 __ pushl(EAX); | 602 __ pushl(EAX); |
623 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 5, locs); | 603 GenerateRuntimeCall(token_pos, deopt_id, kInstanceofRuntimeEntry, 5, locs); |
624 // Pop the parameters supplied to the runtime entry. The result of the | 604 // Pop the parameters supplied to the runtime entry. The result of the |
625 // instanceof runtime call will be left as the result of the operation. | 605 // instanceof runtime call will be left as the result of the operation. |
626 __ Drop(5); | 606 __ Drop(5); |
627 __ popl(EAX); | 607 __ popl(EAX); |
628 __ jmp(&done, Assembler::kNearJump); | 608 __ jmp(&done, Assembler::kNearJump); |
629 } | 609 } |
630 __ Bind(&is_not_instance); | 610 __ Bind(&is_not_instance); |
631 __ LoadObject(EAX, Bool::Get(false)); | 611 __ LoadObject(EAX, Bool::Get(false)); |
632 __ jmp(&done, Assembler::kNearJump); | 612 __ jmp(&done, Assembler::kNearJump); |
633 | 613 |
634 __ Bind(&is_instance); | 614 __ Bind(&is_instance); |
635 __ LoadObject(EAX, Bool::Get(true)); | 615 __ LoadObject(EAX, Bool::Get(true)); |
636 __ Bind(&done); | 616 __ Bind(&done); |
637 __ popl(ECX); // Remove pushed function type arguments. | 617 __ popl(ECX); // Remove pushed function type arguments. |
638 __ popl(EDX); // Remove pushed instantiator type arguments. | 618 __ popl(EDX); // Remove pushed instantiator type arguments. |
639 } | 619 } |
640 | 620 |
641 | |
642 // Optimize assignable type check by adding inlined tests for: | 621 // Optimize assignable type check by adding inlined tests for: |
643 // - NULL -> return NULL. | 622 // - NULL -> return NULL. |
644 // - Smi -> compile time subtype check (only if dst class is not parameterized). | 623 // - Smi -> compile time subtype check (only if dst class is not parameterized). |
645 // - Class equality (only if class is not parameterized). | 624 // - Class equality (only if class is not parameterized). |
646 // Inputs: | 625 // Inputs: |
647 // - EAX: object. | 626 // - EAX: object. |
648 // - EDX: instantiator type arguments or raw_null. | 627 // - EDX: instantiator type arguments or raw_null. |
649 // - ECX: function type arguments or raw_null. | 628 // - ECX: function type arguments or raw_null. |
650 // Returns: | 629 // Returns: |
651 // - object in EAX for successful assignable check (or throws TypeError). | 630 // - object in EAX for successful assignable check (or throws TypeError). |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
690 } | 669 } |
691 | 670 |
692 // Generate inline type check, linking to runtime call if not assignable. | 671 // Generate inline type check, linking to runtime call if not assignable. |
693 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); | 672 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); |
694 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, | 673 test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, |
695 &runtime_call); | 674 &runtime_call); |
696 | 675 |
697 __ Bind(&runtime_call); | 676 __ Bind(&runtime_call); |
698 __ movl(EDX, Address(ESP, 1 * kWordSize)); // Get instantiator type args. | 677 __ movl(EDX, Address(ESP, 1 * kWordSize)); // Get instantiator type args. |
699 __ movl(ECX, Address(ESP, 0 * kWordSize)); // Get function type args. | 678 __ movl(ECX, Address(ESP, 0 * kWordSize)); // Get function type args. |
700 __ PushObject(Object::null_object()); // Make room for the result. | 679 __ PushObject(Object::null_object()); // Make room for the result. |
701 __ pushl(EAX); // Push the source object. | 680 __ pushl(EAX); // Push the source object. |
702 __ PushObject(dst_type); // Push the type of the destination. | 681 __ PushObject(dst_type); // Push the type of the destination. |
703 __ pushl(EDX); // Instantiator type arguments. | 682 __ pushl(EDX); // Instantiator type arguments. |
704 __ pushl(ECX); // Function type arguments. | 683 __ pushl(ECX); // Function type arguments. |
705 __ PushObject(dst_name); // Push the name of the destination. | 684 __ PushObject(dst_name); // Push the name of the destination. |
706 __ LoadObject(EAX, test_cache); | 685 __ LoadObject(EAX, test_cache); |
707 __ pushl(EAX); | 686 __ pushl(EAX); |
708 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); | 687 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); |
709 // Pop the parameters supplied to the runtime entry. The result of the | 688 // Pop the parameters supplied to the runtime entry. The result of the |
710 // type check runtime call is the checked value. | 689 // type check runtime call is the checked value. |
711 __ Drop(6); | 690 __ Drop(6); |
712 __ popl(EAX); | 691 __ popl(EAX); |
713 | 692 |
714 __ Bind(&is_assignable); | 693 __ Bind(&is_assignable); |
715 __ popl(ECX); // Remove pushed function type arguments. | 694 __ popl(ECX); // Remove pushed function type arguments. |
716 __ popl(EDX); // Remove pushed instantiator type arguments. | 695 __ popl(EDX); // Remove pushed instantiator type arguments. |
717 } | 696 } |
718 | 697 |
719 | |
720 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { | 698 void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { |
721 if (is_optimizing()) { | 699 if (is_optimizing()) { |
722 return; | 700 return; |
723 } | 701 } |
724 Definition* defn = instr->AsDefinition(); | 702 Definition* defn = instr->AsDefinition(); |
725 if ((defn != NULL) && defn->HasTemp()) { | 703 if ((defn != NULL) && defn->HasTemp()) { |
726 Location value = defn->locs()->out(0); | 704 Location value = defn->locs()->out(0); |
727 if (value.IsRegister()) { | 705 if (value.IsRegister()) { |
728 __ pushl(value.reg()); | 706 __ pushl(value.reg()); |
729 } else if (value.IsConstant()) { | 707 } else if (value.IsConstant()) { |
730 __ PushObject(value.constant()); | 708 __ PushObject(value.constant()); |
731 } else { | 709 } else { |
732 ASSERT(value.IsStackSlot()); | 710 ASSERT(value.IsStackSlot()); |
733 __ pushl(value.ToStackSlotAddress()); | 711 __ pushl(value.ToStackSlotAddress()); |
734 } | 712 } |
735 } | 713 } |
736 } | 714 } |
737 | 715 |
738 | |
739 void FlowGraphCompiler::CopyParameters() { | 716 void FlowGraphCompiler::CopyParameters() { |
740 __ Comment("Copy parameters"); | 717 __ Comment("Copy parameters"); |
741 const Function& function = parsed_function().function(); | 718 const Function& function = parsed_function().function(); |
742 LocalScope* scope = parsed_function().node_sequence()->scope(); | 719 LocalScope* scope = parsed_function().node_sequence()->scope(); |
743 const int num_fixed_params = function.num_fixed_parameters(); | 720 const int num_fixed_params = function.num_fixed_parameters(); |
744 const int num_opt_pos_params = function.NumOptionalPositionalParameters(); | 721 const int num_opt_pos_params = function.NumOptionalPositionalParameters(); |
745 const int num_opt_named_params = function.NumOptionalNamedParameters(); | 722 const int num_opt_named_params = function.NumOptionalNamedParameters(); |
746 const int num_params = | 723 const int num_params = |
747 num_fixed_params + num_opt_pos_params + num_opt_named_params; | 724 num_fixed_params + num_opt_pos_params + num_opt_named_params; |
748 ASSERT(function.NumParameters() == num_params); | 725 ASSERT(function.NumParameters() == num_params); |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
929 __ jmp(&null_args_loop_condition, Assembler::kNearJump); | 906 __ jmp(&null_args_loop_condition, Assembler::kNearJump); |
930 const Address original_argument_addr(EBP, ECX, TIMES_4, | 907 const Address original_argument_addr(EBP, ECX, TIMES_4, |
931 (kParamEndSlotFromFp + 1) * kWordSize); | 908 (kParamEndSlotFromFp + 1) * kWordSize); |
932 __ Bind(&null_args_loop); | 909 __ Bind(&null_args_loop); |
933 __ movl(original_argument_addr, raw_null); | 910 __ movl(original_argument_addr, raw_null); |
934 __ Bind(&null_args_loop_condition); | 911 __ Bind(&null_args_loop_condition); |
935 __ decl(ECX); | 912 __ decl(ECX); |
936 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); | 913 __ j(POSITIVE, &null_args_loop, Assembler::kNearJump); |
937 } | 914 } |
938 | 915 |
939 | |
940 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 916 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
941 // TOS: return address. | 917 // TOS: return address. |
942 // +1 : receiver. | 918 // +1 : receiver. |
943 // Sequence node has one return node, its input is load field node. | 919 // Sequence node has one return node, its input is load field node. |
944 __ Comment("Inlined Getter"); | 920 __ Comment("Inlined Getter"); |
945 __ movl(EAX, Address(ESP, 1 * kWordSize)); | 921 __ movl(EAX, Address(ESP, 1 * kWordSize)); |
946 __ movl(EAX, FieldAddress(EAX, offset)); | 922 __ movl(EAX, FieldAddress(EAX, offset)); |
947 __ ret(); | 923 __ ret(); |
948 } | 924 } |
949 | 925 |
950 | |
951 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 926 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
952 // TOS: return address. | 927 // TOS: return address. |
953 // +1 : value | 928 // +1 : value |
954 // +2 : receiver. | 929 // +2 : receiver. |
955 // Sequence node has one store node and one return NULL node. | 930 // Sequence node has one store node and one return NULL node. |
956 __ Comment("Inlined Setter"); | 931 __ Comment("Inlined Setter"); |
957 __ movl(EAX, Address(ESP, 2 * kWordSize)); // Receiver. | 932 __ movl(EAX, Address(ESP, 2 * kWordSize)); // Receiver. |
958 __ movl(EBX, Address(ESP, 1 * kWordSize)); // Value. | 933 __ movl(EBX, Address(ESP, 1 * kWordSize)); // Value. |
959 __ StoreIntoObject(EAX, FieldAddress(EAX, offset), EBX); | 934 __ StoreIntoObject(EAX, FieldAddress(EAX, offset), EBX); |
960 const Immediate& raw_null = | 935 const Immediate& raw_null = |
961 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 936 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
962 __ movl(EAX, raw_null); | 937 __ movl(EAX, raw_null); |
963 __ ret(); | 938 __ ret(); |
964 } | 939 } |
965 | 940 |
966 | |
967 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc | 941 // NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc |
968 // needs to be updated to match. | 942 // needs to be updated to match. |
969 void FlowGraphCompiler::EmitFrameEntry() { | 943 void FlowGraphCompiler::EmitFrameEntry() { |
970 const Function& function = parsed_function().function(); | 944 const Function& function = parsed_function().function(); |
971 if (CanOptimizeFunction() && function.IsOptimizable() && | 945 if (CanOptimizeFunction() && function.IsOptimizable() && |
972 (!is_optimizing() || may_reoptimize())) { | 946 (!is_optimizing() || may_reoptimize())) { |
973 __ Comment("Invocation Count Check"); | 947 __ Comment("Invocation Count Check"); |
974 const Register function_reg = EBX; | 948 const Register function_reg = EBX; |
975 __ LoadObject(function_reg, function); | 949 __ LoadObject(function_reg, function); |
976 | 950 |
(...skipping 12 matching lines...) Expand all Loading... |
989 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() - | 963 intptr_t extra_slots = StackSize() - flow_graph().num_stack_locals() - |
990 flow_graph().num_copied_params(); | 964 flow_graph().num_copied_params(); |
991 ASSERT(extra_slots >= 0); | 965 ASSERT(extra_slots >= 0); |
992 __ EnterOsrFrame(extra_slots * kWordSize); | 966 __ EnterOsrFrame(extra_slots * kWordSize); |
993 } else { | 967 } else { |
994 ASSERT(StackSize() >= 0); | 968 ASSERT(StackSize() >= 0); |
995 __ EnterDartFrame(StackSize() * kWordSize); | 969 __ EnterDartFrame(StackSize() * kWordSize); |
996 } | 970 } |
997 } | 971 } |
998 | 972 |
999 | |
1000 void FlowGraphCompiler::CompileGraph() { | 973 void FlowGraphCompiler::CompileGraph() { |
1001 InitCompiler(); | 974 InitCompiler(); |
1002 | 975 |
1003 if (TryIntrinsify()) { | 976 if (TryIntrinsify()) { |
1004 // Skip regular code generation. | 977 // Skip regular code generation. |
1005 return; | 978 return; |
1006 } | 979 } |
1007 | 980 |
1008 EmitFrameEntry(); | 981 EmitFrameEntry(); |
1009 | 982 |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1125 // checked during resolution. | 1098 // checked during resolution. |
1126 | 1099 |
1127 EndCodeSourceRange(TokenPosition::kDartCodePrologue); | 1100 EndCodeSourceRange(TokenPosition::kDartCodePrologue); |
1128 ASSERT(!block_order().is_empty()); | 1101 ASSERT(!block_order().is_empty()); |
1129 VisitBlocks(); | 1102 VisitBlocks(); |
1130 | 1103 |
1131 __ int3(); | 1104 __ int3(); |
1132 GenerateDeferredCode(); | 1105 GenerateDeferredCode(); |
1133 } | 1106 } |
1134 | 1107 |
1135 | |
1136 void FlowGraphCompiler::GenerateCall(TokenPosition token_pos, | 1108 void FlowGraphCompiler::GenerateCall(TokenPosition token_pos, |
1137 const StubEntry& stub_entry, | 1109 const StubEntry& stub_entry, |
1138 RawPcDescriptors::Kind kind, | 1110 RawPcDescriptors::Kind kind, |
1139 LocationSummary* locs) { | 1111 LocationSummary* locs) { |
1140 __ Call(stub_entry); | 1112 __ Call(stub_entry); |
1141 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); | 1113 EmitCallsiteMetaData(token_pos, Thread::kNoDeoptId, kind, locs); |
1142 } | 1114 } |
1143 | 1115 |
1144 | |
1145 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, | 1116 void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id, |
1146 TokenPosition token_pos, | 1117 TokenPosition token_pos, |
1147 const StubEntry& stub_entry, | 1118 const StubEntry& stub_entry, |
1148 RawPcDescriptors::Kind kind, | 1119 RawPcDescriptors::Kind kind, |
1149 LocationSummary* locs) { | 1120 LocationSummary* locs) { |
1150 __ Call(stub_entry); | 1121 __ Call(stub_entry); |
1151 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); | 1122 EmitCallsiteMetaData(token_pos, deopt_id, kind, locs); |
1152 // Marks either the continuation point in unoptimized code or the | 1123 // Marks either the continuation point in unoptimized code or the |
1153 // deoptimization point in optimized code, after call. | 1124 // deoptimization point in optimized code, after call. |
1154 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1125 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
1155 if (is_optimizing()) { | 1126 if (is_optimizing()) { |
1156 AddDeoptIndexAtCall(deopt_id_after); | 1127 AddDeoptIndexAtCall(deopt_id_after); |
1157 } else { | 1128 } else { |
1158 // Add deoptimization continuation point after the call and before the | 1129 // Add deoptimization continuation point after the call and before the |
1159 // arguments are removed. | 1130 // arguments are removed. |
1160 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1131 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1161 } | 1132 } |
1162 } | 1133 } |
1163 | 1134 |
1164 | |
1165 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id, | 1135 void FlowGraphCompiler::GenerateStaticDartCall(intptr_t deopt_id, |
1166 TokenPosition token_pos, | 1136 TokenPosition token_pos, |
1167 const StubEntry& stub_entry, | 1137 const StubEntry& stub_entry, |
1168 RawPcDescriptors::Kind kind, | 1138 RawPcDescriptors::Kind kind, |
1169 LocationSummary* locs, | 1139 LocationSummary* locs, |
1170 const Function& target) { | 1140 const Function& target) { |
1171 GenerateDartCall(deopt_id, token_pos, stub_entry, kind, locs); | 1141 GenerateDartCall(deopt_id, token_pos, stub_entry, kind, locs); |
1172 AddStaticCallTarget(target); | 1142 AddStaticCallTarget(target); |
1173 } | 1143 } |
1174 | 1144 |
1175 | |
1176 void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos, | 1145 void FlowGraphCompiler::GenerateRuntimeCall(TokenPosition token_pos, |
1177 intptr_t deopt_id, | 1146 intptr_t deopt_id, |
1178 const RuntimeEntry& entry, | 1147 const RuntimeEntry& entry, |
1179 intptr_t argument_count, | 1148 intptr_t argument_count, |
1180 LocationSummary* locs) { | 1149 LocationSummary* locs) { |
1181 __ CallRuntime(entry, argument_count); | 1150 __ CallRuntime(entry, argument_count); |
1182 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); | 1151 EmitCallsiteMetaData(token_pos, deopt_id, RawPcDescriptors::kOther, locs); |
1183 if (deopt_id != Thread::kNoDeoptId) { | 1152 if (deopt_id != Thread::kNoDeoptId) { |
1184 // Marks either the continuation point in unoptimized code or the | 1153 // Marks either the continuation point in unoptimized code or the |
1185 // deoptimization point in optimized code, after call. | 1154 // deoptimization point in optimized code, after call. |
1186 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); | 1155 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id); |
1187 if (is_optimizing()) { | 1156 if (is_optimizing()) { |
1188 AddDeoptIndexAtCall(deopt_id_after); | 1157 AddDeoptIndexAtCall(deopt_id_after); |
1189 } else { | 1158 } else { |
1190 // Add deoptimization continuation point after the call and before the | 1159 // Add deoptimization continuation point after the call and before the |
1191 // arguments are removed. | 1160 // arguments are removed. |
1192 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1161 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1193 } | 1162 } |
1194 } | 1163 } |
1195 } | 1164 } |
1196 | 1165 |
1197 | |
1198 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count, | 1166 void FlowGraphCompiler::EmitUnoptimizedStaticCall(intptr_t argument_count, |
1199 intptr_t deopt_id, | 1167 intptr_t deopt_id, |
1200 TokenPosition token_pos, | 1168 TokenPosition token_pos, |
1201 LocationSummary* locs, | 1169 LocationSummary* locs, |
1202 const ICData& ic_data) { | 1170 const ICData& ic_data) { |
1203 const StubEntry& stub_entry = | 1171 const StubEntry& stub_entry = |
1204 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); | 1172 *StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested()); |
1205 __ LoadObject(ECX, ic_data); | 1173 __ LoadObject(ECX, ic_data); |
1206 GenerateDartCall(deopt_id, token_pos, stub_entry, | 1174 GenerateDartCall(deopt_id, token_pos, stub_entry, |
1207 RawPcDescriptors::kUnoptStaticCall, locs); | 1175 RawPcDescriptors::kUnoptStaticCall, locs); |
1208 __ Drop(argument_count); | 1176 __ Drop(argument_count); |
1209 } | 1177 } |
1210 | 1178 |
1211 | |
1212 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { | 1179 void FlowGraphCompiler::EmitEdgeCounter(intptr_t edge_id) { |
1213 // We do not check for overflow when incrementing the edge counter. The | 1180 // We do not check for overflow when incrementing the edge counter. The |
1214 // function should normally be optimized long before the counter can | 1181 // function should normally be optimized long before the counter can |
1215 // overflow; and though we do not reset the counters when we optimize or | 1182 // overflow; and though we do not reset the counters when we optimize or |
1216 // deoptimize, there is a bound on the number of | 1183 // deoptimize, there is a bound on the number of |
1217 // optimization/deoptimization cycles we will attempt. | 1184 // optimization/deoptimization cycles we will attempt. |
1218 ASSERT(!edge_counters_array_.IsNull()); | 1185 ASSERT(!edge_counters_array_.IsNull()); |
1219 __ Comment("Edge counter"); | 1186 __ Comment("Edge counter"); |
1220 __ LoadObject(EAX, edge_counters_array_); | 1187 __ LoadObject(EAX, edge_counters_array_); |
1221 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1); | 1188 __ IncrementSmiField(FieldAddress(EAX, Array::element_offset(edge_id)), 1); |
1222 } | 1189 } |
1223 | 1190 |
1224 | |
1225 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry, | 1191 void FlowGraphCompiler::EmitOptimizedInstanceCall(const StubEntry& stub_entry, |
1226 const ICData& ic_data, | 1192 const ICData& ic_data, |
1227 intptr_t argument_count, | 1193 intptr_t argument_count, |
1228 intptr_t deopt_id, | 1194 intptr_t deopt_id, |
1229 TokenPosition token_pos, | 1195 TokenPosition token_pos, |
1230 LocationSummary* locs) { | 1196 LocationSummary* locs) { |
1231 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); | 1197 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); |
1232 // Each ICData propagated from unoptimized to optimized code contains the | 1198 // Each ICData propagated from unoptimized to optimized code contains the |
1233 // function that corresponds to the Dart function of that IC call. Due | 1199 // function that corresponds to the Dart function of that IC call. Due |
1234 // to inlining in optimized code, that function may not correspond to the | 1200 // to inlining in optimized code, that function may not correspond to the |
1235 // top-level function (parsed_function().function()) which could be | 1201 // top-level function (parsed_function().function()) which could be |
1236 // reoptimized and which counter needs to be incremented. | 1202 // reoptimized and which counter needs to be incremented. |
1237 // Pass the function explicitly, it is used in IC stub. | 1203 // Pass the function explicitly, it is used in IC stub. |
1238 __ LoadObject(EBX, parsed_function().function()); | 1204 __ LoadObject(EBX, parsed_function().function()); |
1239 __ LoadObject(ECX, ic_data); | 1205 __ LoadObject(ECX, ic_data); |
1240 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, | 1206 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, |
1241 locs); | 1207 locs); |
1242 __ Drop(argument_count); | 1208 __ Drop(argument_count); |
1243 } | 1209 } |
1244 | 1210 |
1245 | |
1246 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, | 1211 void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry, |
1247 const ICData& ic_data, | 1212 const ICData& ic_data, |
1248 intptr_t argument_count, | 1213 intptr_t argument_count, |
1249 intptr_t deopt_id, | 1214 intptr_t deopt_id, |
1250 TokenPosition token_pos, | 1215 TokenPosition token_pos, |
1251 LocationSummary* locs) { | 1216 LocationSummary* locs) { |
1252 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); | 1217 ASSERT(Array::Handle(ic_data.arguments_descriptor()).Length() > 0); |
1253 __ LoadObject(ECX, ic_data); | 1218 __ LoadObject(ECX, ic_data); |
1254 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, | 1219 GenerateDartCall(deopt_id, token_pos, stub_entry, RawPcDescriptors::kIcCall, |
1255 locs); | 1220 locs); |
1256 __ Drop(argument_count); | 1221 __ Drop(argument_count); |
1257 } | 1222 } |
1258 | 1223 |
1259 | |
1260 void FlowGraphCompiler::EmitMegamorphicInstanceCall( | 1224 void FlowGraphCompiler::EmitMegamorphicInstanceCall( |
1261 const String& name, | 1225 const String& name, |
1262 const Array& arguments_descriptor, | 1226 const Array& arguments_descriptor, |
1263 intptr_t argument_count, | 1227 intptr_t argument_count, |
1264 intptr_t deopt_id, | 1228 intptr_t deopt_id, |
1265 TokenPosition token_pos, | 1229 TokenPosition token_pos, |
1266 LocationSummary* locs, | 1230 LocationSummary* locs, |
1267 intptr_t try_index, | 1231 intptr_t try_index, |
1268 intptr_t slow_path_argument_count) { | 1232 intptr_t slow_path_argument_count) { |
1269 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); | 1233 ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0)); |
(...skipping 17 matching lines...) Expand all Loading... |
1287 AddDeoptIndexAtCall(deopt_id_after); | 1251 AddDeoptIndexAtCall(deopt_id_after); |
1288 } else { | 1252 } else { |
1289 // Add deoptimization continuation point after the call and before the | 1253 // Add deoptimization continuation point after the call and before the |
1290 // arguments are removed. | 1254 // arguments are removed. |
1291 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); | 1255 AddCurrentDescriptor(RawPcDescriptors::kDeopt, deopt_id_after, token_pos); |
1292 } | 1256 } |
1293 EmitCatchEntryState(pending_deoptimization_env_, try_index); | 1257 EmitCatchEntryState(pending_deoptimization_env_, try_index); |
1294 __ Drop(argument_count); | 1258 __ Drop(argument_count); |
1295 } | 1259 } |
1296 | 1260 |
1297 | |
1298 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data, | 1261 void FlowGraphCompiler::EmitSwitchableInstanceCall(const ICData& ic_data, |
1299 intptr_t argument_count, | 1262 intptr_t argument_count, |
1300 intptr_t deopt_id, | 1263 intptr_t deopt_id, |
1301 TokenPosition token_pos, | 1264 TokenPosition token_pos, |
1302 LocationSummary* locs) { | 1265 LocationSummary* locs) { |
1303 // Only generated with precompilation. | 1266 // Only generated with precompilation. |
1304 UNREACHABLE(); | 1267 UNREACHABLE(); |
1305 } | 1268 } |
1306 | 1269 |
1307 | |
1308 void FlowGraphCompiler::EmitOptimizedStaticCall( | 1270 void FlowGraphCompiler::EmitOptimizedStaticCall( |
1309 const Function& function, | 1271 const Function& function, |
1310 const Array& arguments_descriptor, | 1272 const Array& arguments_descriptor, |
1311 intptr_t argument_count, | 1273 intptr_t argument_count, |
1312 intptr_t deopt_id, | 1274 intptr_t deopt_id, |
1313 TokenPosition token_pos, | 1275 TokenPosition token_pos, |
1314 LocationSummary* locs) { | 1276 LocationSummary* locs) { |
1315 if (function.HasOptionalParameters() || | 1277 if (function.HasOptionalParameters() || |
1316 (FLAG_reify_generic_functions && function.IsGeneric())) { | 1278 (FLAG_reify_generic_functions && function.IsGeneric())) { |
1317 __ LoadObject(EDX, arguments_descriptor); | 1279 __ LoadObject(EDX, arguments_descriptor); |
1318 } else { | 1280 } else { |
1319 __ xorl(EDX, EDX); // GC safe smi zero because of stub. | 1281 __ xorl(EDX, EDX); // GC safe smi zero because of stub. |
1320 } | 1282 } |
1321 // Do not use the code from the function, but let the code be patched so that | 1283 // Do not use the code from the function, but let the code be patched so that |
1322 // we can record the outgoing edges to other code. | 1284 // we can record the outgoing edges to other code. |
1323 GenerateDartCall(deopt_id, token_pos, *StubCode::CallStaticFunction_entry(), | 1285 GenerateDartCall(deopt_id, token_pos, *StubCode::CallStaticFunction_entry(), |
1324 RawPcDescriptors::kOther, locs); | 1286 RawPcDescriptors::kOther, locs); |
1325 AddStaticCallTarget(function); | 1287 AddStaticCallTarget(function); |
1326 __ Drop(argument_count); | 1288 __ Drop(argument_count); |
1327 } | 1289 } |
1328 | 1290 |
1329 | |
1330 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( | 1291 Condition FlowGraphCompiler::EmitEqualityRegConstCompare( |
1331 Register reg, | 1292 Register reg, |
1332 const Object& obj, | 1293 const Object& obj, |
1333 bool needs_number_check, | 1294 bool needs_number_check, |
1334 TokenPosition token_pos, | 1295 TokenPosition token_pos, |
1335 intptr_t deopt_id) { | 1296 intptr_t deopt_id) { |
1336 ASSERT(!needs_number_check || | 1297 ASSERT(!needs_number_check || |
1337 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); | 1298 (!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint())); |
1338 | 1299 |
1339 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { | 1300 if (obj.IsSmi() && (Smi::Cast(obj).Value() == 0)) { |
(...skipping 13 matching lines...) Expand all Loading... |
1353 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); | 1314 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); |
1354 // Stub returns result in flags (result of a cmpl, we need ZF computed). | 1315 // Stub returns result in flags (result of a cmpl, we need ZF computed). |
1355 __ popl(reg); // Discard constant. | 1316 __ popl(reg); // Discard constant. |
1356 __ popl(reg); // Restore 'reg'. | 1317 __ popl(reg); // Restore 'reg'. |
1357 } else { | 1318 } else { |
1358 __ CompareObject(reg, obj); | 1319 __ CompareObject(reg, obj); |
1359 } | 1320 } |
1360 return EQUAL; | 1321 return EQUAL; |
1361 } | 1322 } |
1362 | 1323 |
1363 | |
1364 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, | 1324 Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left, |
1365 Register right, | 1325 Register right, |
1366 bool needs_number_check, | 1326 bool needs_number_check, |
1367 TokenPosition token_pos, | 1327 TokenPosition token_pos, |
1368 intptr_t deopt_id) { | 1328 intptr_t deopt_id) { |
1369 if (needs_number_check) { | 1329 if (needs_number_check) { |
1370 __ pushl(left); | 1330 __ pushl(left); |
1371 __ pushl(right); | 1331 __ pushl(right); |
1372 if (is_optimizing()) { | 1332 if (is_optimizing()) { |
1373 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); | 1333 __ Call(*StubCode::OptimizedIdenticalWithNumberCheck_entry()); |
1374 } else { | 1334 } else { |
1375 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); | 1335 __ Call(*StubCode::UnoptimizedIdenticalWithNumberCheck_entry()); |
1376 } | 1336 } |
1377 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); | 1337 AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id, token_pos); |
1378 // Stub returns result in flags (result of a cmpl, we need ZF computed). | 1338 // Stub returns result in flags (result of a cmpl, we need ZF computed). |
1379 __ popl(right); | 1339 __ popl(right); |
1380 __ popl(left); | 1340 __ popl(left); |
1381 } else { | 1341 } else { |
1382 __ cmpl(left, right); | 1342 __ cmpl(left, right); |
1383 } | 1343 } |
1384 return EQUAL; | 1344 return EQUAL; |
1385 } | 1345 } |
1386 | 1346 |
1387 | |
1388 // This function must be in sync with FlowGraphCompiler::RecordSafepoint and | 1347 // This function must be in sync with FlowGraphCompiler::RecordSafepoint and |
1389 // FlowGraphCompiler::SlowPathEnvironmentFor. | 1348 // FlowGraphCompiler::SlowPathEnvironmentFor. |
1390 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { | 1349 void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) { |
1391 #if defined(DEBUG) | 1350 #if defined(DEBUG) |
1392 locs->CheckWritableInputs(); | 1351 locs->CheckWritableInputs(); |
1393 ClobberDeadTempRegisters(locs); | 1352 ClobberDeadTempRegisters(locs); |
1394 #endif | 1353 #endif |
1395 | 1354 |
1396 // TODO(vegorov): consider saving only caller save (volatile) registers. | 1355 // TODO(vegorov): consider saving only caller save (volatile) registers. |
1397 const intptr_t xmm_regs_count = locs->live_registers()->FpuRegisterCount(); | 1356 const intptr_t xmm_regs_count = locs->live_registers()->FpuRegisterCount(); |
(...skipping 15 matching lines...) Expand all Loading... |
1413 // The order in which the registers are pushed must match the order | 1372 // The order in which the registers are pushed must match the order |
1414 // in which the registers are encoded in the safe point's stack map. | 1373 // in which the registers are encoded in the safe point's stack map. |
1415 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) { | 1374 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) { |
1416 Register reg = static_cast<Register>(i); | 1375 Register reg = static_cast<Register>(i); |
1417 if (locs->live_registers()->ContainsRegister(reg)) { | 1376 if (locs->live_registers()->ContainsRegister(reg)) { |
1418 __ pushl(reg); | 1377 __ pushl(reg); |
1419 } | 1378 } |
1420 } | 1379 } |
1421 } | 1380 } |
1422 | 1381 |
1423 | |
1424 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { | 1382 void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) { |
1425 for (intptr_t i = 0; i < kNumberOfCpuRegisters; ++i) { | 1383 for (intptr_t i = 0; i < kNumberOfCpuRegisters; ++i) { |
1426 Register reg = static_cast<Register>(i); | 1384 Register reg = static_cast<Register>(i); |
1427 if (locs->live_registers()->ContainsRegister(reg)) { | 1385 if (locs->live_registers()->ContainsRegister(reg)) { |
1428 __ popl(reg); | 1386 __ popl(reg); |
1429 } | 1387 } |
1430 } | 1388 } |
1431 | 1389 |
1432 const intptr_t xmm_regs_count = locs->live_registers()->FpuRegisterCount(); | 1390 const intptr_t xmm_regs_count = locs->live_registers()->FpuRegisterCount(); |
1433 if (xmm_regs_count > 0) { | 1391 if (xmm_regs_count > 0) { |
1434 // XMM registers have the lowest register number at the lowest address. | 1392 // XMM registers have the lowest register number at the lowest address. |
1435 intptr_t offset = 0; | 1393 intptr_t offset = 0; |
1436 for (intptr_t i = 0; i < kNumberOfXmmRegisters; ++i) { | 1394 for (intptr_t i = 0; i < kNumberOfXmmRegisters; ++i) { |
1437 XmmRegister xmm_reg = static_cast<XmmRegister>(i); | 1395 XmmRegister xmm_reg = static_cast<XmmRegister>(i); |
1438 if (locs->live_registers()->ContainsFpuRegister(xmm_reg)) { | 1396 if (locs->live_registers()->ContainsFpuRegister(xmm_reg)) { |
1439 __ movups(xmm_reg, Address(ESP, offset)); | 1397 __ movups(xmm_reg, Address(ESP, offset)); |
1440 offset += kFpuRegisterSize; | 1398 offset += kFpuRegisterSize; |
1441 } | 1399 } |
1442 } | 1400 } |
1443 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize)); | 1401 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize)); |
1444 __ addl(ESP, Immediate(offset)); | 1402 __ addl(ESP, Immediate(offset)); |
1445 } | 1403 } |
1446 } | 1404 } |
1447 | 1405 |
1448 | |
1449 #if defined(DEBUG) | 1406 #if defined(DEBUG) |
1450 void FlowGraphCompiler::ClobberDeadTempRegisters(LocationSummary* locs) { | 1407 void FlowGraphCompiler::ClobberDeadTempRegisters(LocationSummary* locs) { |
1451 // Clobber temporaries that have not been manually preserved. | 1408 // Clobber temporaries that have not been manually preserved. |
1452 for (intptr_t i = 0; i < locs->temp_count(); ++i) { | 1409 for (intptr_t i = 0; i < locs->temp_count(); ++i) { |
1453 Location tmp = locs->temp(i); | 1410 Location tmp = locs->temp(i); |
1454 // TODO(zerny): clobber non-live temporary FPU registers. | 1411 // TODO(zerny): clobber non-live temporary FPU registers. |
1455 if (tmp.IsRegister() && | 1412 if (tmp.IsRegister() && |
1456 !locs->live_registers()->ContainsRegister(tmp.reg())) { | 1413 !locs->live_registers()->ContainsRegister(tmp.reg())) { |
1457 __ movl(tmp.reg(), Immediate(0xf7)); | 1414 __ movl(tmp.reg(), Immediate(0xf7)); |
1458 } | 1415 } |
1459 } | 1416 } |
1460 } | 1417 } |
1461 #endif | 1418 #endif |
1462 | 1419 |
1463 | |
1464 void FlowGraphCompiler::EmitTestAndCallLoadReceiver( | 1420 void FlowGraphCompiler::EmitTestAndCallLoadReceiver( |
1465 intptr_t argument_count, | 1421 intptr_t argument_count, |
1466 const Array& arguments_descriptor) { | 1422 const Array& arguments_descriptor) { |
1467 __ Comment("EmitTestAndCall"); | 1423 __ Comment("EmitTestAndCall"); |
1468 // Load receiver into EAX. | 1424 // Load receiver into EAX. |
1469 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize)); | 1425 __ movl(EAX, Address(ESP, (argument_count - 1) * kWordSize)); |
1470 __ LoadObject(EDX, arguments_descriptor); | 1426 __ LoadObject(EDX, arguments_descriptor); |
1471 } | 1427 } |
1472 | 1428 |
1473 | |
1474 void FlowGraphCompiler::EmitTestAndCallSmiBranch(Label* label, bool if_smi) { | 1429 void FlowGraphCompiler::EmitTestAndCallSmiBranch(Label* label, bool if_smi) { |
1475 __ testl(EAX, Immediate(kSmiTagMask)); | 1430 __ testl(EAX, Immediate(kSmiTagMask)); |
1476 // Jump if receiver is (not) Smi. | 1431 // Jump if receiver is (not) Smi. |
1477 __ j(if_smi ? ZERO : NOT_ZERO, label); | 1432 __ j(if_smi ? ZERO : NOT_ZERO, label); |
1478 } | 1433 } |
1479 | 1434 |
1480 | |
1481 void FlowGraphCompiler::EmitTestAndCallLoadCid() { | 1435 void FlowGraphCompiler::EmitTestAndCallLoadCid() { |
1482 __ LoadClassId(EDI, EAX); | 1436 __ LoadClassId(EDI, EAX); |
1483 } | 1437 } |
1484 | 1438 |
1485 | |
1486 int FlowGraphCompiler::EmitTestAndCallCheckCid(Label* next_label, | 1439 int FlowGraphCompiler::EmitTestAndCallCheckCid(Label* next_label, |
1487 const CidRange& range, | 1440 const CidRange& range, |
1488 int bias) { | 1441 int bias) { |
1489 intptr_t cid_start = range.cid_start; | 1442 intptr_t cid_start = range.cid_start; |
1490 if (range.IsSingleCid()) { | 1443 if (range.IsSingleCid()) { |
1491 __ cmpl(EDI, Immediate(cid_start - bias)); | 1444 __ cmpl(EDI, Immediate(cid_start - bias)); |
1492 __ j(NOT_EQUAL, next_label); | 1445 __ j(NOT_EQUAL, next_label); |
1493 } else { | 1446 } else { |
1494 __ addl(EDI, Immediate(bias - cid_start)); | 1447 __ addl(EDI, Immediate(bias - cid_start)); |
1495 bias = cid_start; | 1448 bias = cid_start; |
1496 __ cmpl(EDI, Immediate(range.Extent())); | 1449 __ cmpl(EDI, Immediate(range.Extent())); |
1497 __ j(ABOVE, next_label); // Unsigned higher. | 1450 __ j(ABOVE, next_label); // Unsigned higher. |
1498 } | 1451 } |
1499 return bias; | 1452 return bias; |
1500 } | 1453 } |
1501 | 1454 |
1502 | |
1503 #undef __ | 1455 #undef __ |
1504 #define __ compiler_->assembler()-> | 1456 #define __ compiler_->assembler()-> |
1505 | 1457 |
1506 | |
1507 void ParallelMoveResolver::EmitMove(int index) { | 1458 void ParallelMoveResolver::EmitMove(int index) { |
1508 MoveOperands* move = moves_[index]; | 1459 MoveOperands* move = moves_[index]; |
1509 const Location source = move->src(); | 1460 const Location source = move->src(); |
1510 const Location destination = move->dest(); | 1461 const Location destination = move->dest(); |
1511 | 1462 |
1512 if (source.IsRegister()) { | 1463 if (source.IsRegister()) { |
1513 if (destination.IsRegister()) { | 1464 if (destination.IsRegister()) { |
1514 __ movl(destination.reg(), source.reg()); | 1465 __ movl(destination.reg(), source.reg()); |
1515 } else { | 1466 } else { |
1516 ASSERT(destination.IsStackSlot()); | 1467 ASSERT(destination.IsStackSlot()); |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1603 Immediate(Smi::Cast(constant).Value())); | 1554 Immediate(Smi::Cast(constant).Value())); |
1604 } else { | 1555 } else { |
1605 StoreObject(destination.ToStackSlotAddress(), source.constant()); | 1556 StoreObject(destination.ToStackSlotAddress(), source.constant()); |
1606 } | 1557 } |
1607 } | 1558 } |
1608 } | 1559 } |
1609 | 1560 |
1610 move->Eliminate(); | 1561 move->Eliminate(); |
1611 } | 1562 } |
1612 | 1563 |
1613 | |
1614 void ParallelMoveResolver::EmitSwap(int index) { | 1564 void ParallelMoveResolver::EmitSwap(int index) { |
1615 MoveOperands* move = moves_[index]; | 1565 MoveOperands* move = moves_[index]; |
1616 const Location source = move->src(); | 1566 const Location source = move->src(); |
1617 const Location destination = move->dest(); | 1567 const Location destination = move->dest(); |
1618 | 1568 |
1619 if (source.IsRegister() && destination.IsRegister()) { | 1569 if (source.IsRegister() && destination.IsRegister()) { |
1620 __ xchgl(destination.reg(), source.reg()); | 1570 __ xchgl(destination.reg(), source.reg()); |
1621 } else if (source.IsRegister() && destination.IsStackSlot()) { | 1571 } else if (source.IsRegister() && destination.IsStackSlot()) { |
1622 Exchange(source.reg(), destination.ToStackSlotAddress()); | 1572 Exchange(source.reg(), destination.ToStackSlotAddress()); |
1623 } else if (source.IsStackSlot() && destination.IsRegister()) { | 1573 } else if (source.IsStackSlot() && destination.IsRegister()) { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1679 for (int i = 0; i < moves_.length(); ++i) { | 1629 for (int i = 0; i < moves_.length(); ++i) { |
1680 const MoveOperands& other_move = *moves_[i]; | 1630 const MoveOperands& other_move = *moves_[i]; |
1681 if (other_move.Blocks(source)) { | 1631 if (other_move.Blocks(source)) { |
1682 moves_[i]->set_src(destination); | 1632 moves_[i]->set_src(destination); |
1683 } else if (other_move.Blocks(destination)) { | 1633 } else if (other_move.Blocks(destination)) { |
1684 moves_[i]->set_src(source); | 1634 moves_[i]->set_src(source); |
1685 } | 1635 } |
1686 } | 1636 } |
1687 } | 1637 } |
1688 | 1638 |
1689 | |
1690 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, | 1639 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, |
1691 const Address& src) { | 1640 const Address& src) { |
1692 ScratchRegisterScope ensure_scratch(this, kNoRegister); | 1641 ScratchRegisterScope ensure_scratch(this, kNoRegister); |
1693 __ movl(ensure_scratch.reg(), src); | 1642 __ movl(ensure_scratch.reg(), src); |
1694 __ movl(dst, ensure_scratch.reg()); | 1643 __ movl(dst, ensure_scratch.reg()); |
1695 } | 1644 } |
1696 | 1645 |
1697 | |
1698 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { | 1646 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { |
1699 if (Assembler::IsSafeSmi(obj) || obj.IsNull()) { | 1647 if (Assembler::IsSafeSmi(obj) || obj.IsNull()) { |
1700 __ movl(dst, Immediate(reinterpret_cast<int32_t>(obj.raw()))); | 1648 __ movl(dst, Immediate(reinterpret_cast<int32_t>(obj.raw()))); |
1701 } else { | 1649 } else { |
1702 ScratchRegisterScope ensure_scratch(this, kNoRegister); | 1650 ScratchRegisterScope ensure_scratch(this, kNoRegister); |
1703 __ LoadObjectSafely(ensure_scratch.reg(), obj); | 1651 __ LoadObjectSafely(ensure_scratch.reg(), obj); |
1704 __ movl(dst, ensure_scratch.reg()); | 1652 __ movl(dst, ensure_scratch.reg()); |
1705 } | 1653 } |
1706 } | 1654 } |
1707 | 1655 |
1708 | |
1709 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { | 1656 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { |
1710 ScratchRegisterScope ensure_scratch(this, reg); | 1657 ScratchRegisterScope ensure_scratch(this, reg); |
1711 __ movl(ensure_scratch.reg(), mem); | 1658 __ movl(ensure_scratch.reg(), mem); |
1712 __ movl(mem, reg); | 1659 __ movl(mem, reg); |
1713 __ movl(reg, ensure_scratch.reg()); | 1660 __ movl(reg, ensure_scratch.reg()); |
1714 } | 1661 } |
1715 | 1662 |
1716 | |
1717 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { | 1663 void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) { |
1718 ScratchRegisterScope ensure_scratch1(this, kNoRegister); | 1664 ScratchRegisterScope ensure_scratch1(this, kNoRegister); |
1719 ScratchRegisterScope ensure_scratch2(this, ensure_scratch1.reg()); | 1665 ScratchRegisterScope ensure_scratch2(this, ensure_scratch1.reg()); |
1720 __ movl(ensure_scratch1.reg(), mem1); | 1666 __ movl(ensure_scratch1.reg(), mem1); |
1721 __ movl(ensure_scratch2.reg(), mem2); | 1667 __ movl(ensure_scratch2.reg(), mem2); |
1722 __ movl(mem2, ensure_scratch1.reg()); | 1668 __ movl(mem2, ensure_scratch1.reg()); |
1723 __ movl(mem1, ensure_scratch2.reg()); | 1669 __ movl(mem1, ensure_scratch2.reg()); |
1724 } | 1670 } |
1725 | 1671 |
1726 | |
1727 void ParallelMoveResolver::Exchange(Register reg, | 1672 void ParallelMoveResolver::Exchange(Register reg, |
1728 Register base_reg, | 1673 Register base_reg, |
1729 intptr_t stack_offset) { | 1674 intptr_t stack_offset) { |
1730 UNREACHABLE(); | 1675 UNREACHABLE(); |
1731 } | 1676 } |
1732 | 1677 |
1733 | |
1734 void ParallelMoveResolver::Exchange(Register base_reg1, | 1678 void ParallelMoveResolver::Exchange(Register base_reg1, |
1735 intptr_t stack_offset1, | 1679 intptr_t stack_offset1, |
1736 Register base_reg2, | 1680 Register base_reg2, |
1737 intptr_t stack_offset2) { | 1681 intptr_t stack_offset2) { |
1738 UNREACHABLE(); | 1682 UNREACHABLE(); |
1739 } | 1683 } |
1740 | 1684 |
1741 | |
1742 void ParallelMoveResolver::SpillScratch(Register reg) { | 1685 void ParallelMoveResolver::SpillScratch(Register reg) { |
1743 __ pushl(reg); | 1686 __ pushl(reg); |
1744 } | 1687 } |
1745 | 1688 |
1746 | |
1747 void ParallelMoveResolver::RestoreScratch(Register reg) { | 1689 void ParallelMoveResolver::RestoreScratch(Register reg) { |
1748 __ popl(reg); | 1690 __ popl(reg); |
1749 } | 1691 } |
1750 | 1692 |
1751 | |
1752 void ParallelMoveResolver::SpillFpuScratch(FpuRegister reg) { | 1693 void ParallelMoveResolver::SpillFpuScratch(FpuRegister reg) { |
1753 __ subl(ESP, Immediate(kFpuRegisterSize)); | 1694 __ subl(ESP, Immediate(kFpuRegisterSize)); |
1754 __ movups(Address(ESP, 0), reg); | 1695 __ movups(Address(ESP, 0), reg); |
1755 } | 1696 } |
1756 | 1697 |
1757 | |
1758 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { | 1698 void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) { |
1759 __ movups(reg, Address(ESP, 0)); | 1699 __ movups(reg, Address(ESP, 0)); |
1760 __ addl(ESP, Immediate(kFpuRegisterSize)); | 1700 __ addl(ESP, Immediate(kFpuRegisterSize)); |
1761 } | 1701 } |
1762 | 1702 |
1763 | |
1764 #undef __ | 1703 #undef __ |
1765 | 1704 |
1766 } // namespace dart | 1705 } // namespace dart |
1767 | 1706 |
1768 #endif // defined TARGET_ARCH_IA32 | 1707 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |