OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
90 ASSERT(fp_sp_dist <= 0); | 90 ASSERT(fp_sp_dist <= 0); |
91 __ movq(RDI, RSP); | 91 __ movq(RDI, RSP); |
92 __ subq(RDI, RBP); | 92 __ subq(RDI, RBP); |
93 __ cmpq(RDI, Immediate(fp_sp_dist)); | 93 __ cmpq(RDI, Immediate(fp_sp_dist)); |
94 __ j(EQUAL, &done, Assembler::kNearJump); | 94 __ j(EQUAL, &done, Assembler::kNearJump); |
95 __ int3(); | 95 __ int3(); |
96 __ Bind(&done); | 96 __ Bind(&done); |
97 } | 97 } |
98 #endif | 98 #endif |
99 __ LeaveFrame(); | |
100 __ ret(); | |
101 | 99 |
102 // Generate 8 bytes of NOPs so that the debugger can patch the | 100 __ ReturnPatchable(); |
103 // return pattern with a call to the debug stub. | |
104 // Note that the nop(8) byte pattern is not recognized by the debugger. | |
105 __ nop(1); | |
106 __ nop(1); | |
107 __ nop(1); | |
108 __ nop(1); | |
109 __ nop(1); | |
110 __ nop(1); | |
111 __ nop(1); | |
112 __ nop(1); | |
113 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, | 101 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, |
114 Isolate::kNoDeoptId, | 102 Isolate::kNoDeoptId, |
115 token_pos()); | 103 token_pos()); |
116 } | 104 } |
117 | 105 |
118 | 106 |
119 static Condition NegateCondition(Condition condition) { | 107 static Condition NegateCondition(Condition condition) { |
120 switch (condition) { | 108 switch (condition) { |
121 case EQUAL: return NOT_EQUAL; | 109 case EQUAL: return NOT_EQUAL; |
122 case NOT_EQUAL: return EQUAL; | 110 case NOT_EQUAL: return EQUAL; |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
308 return LocationSummary::Make(kNumInputs, | 296 return LocationSummary::Make(kNumInputs, |
309 Location::RequiresRegister(), | 297 Location::RequiresRegister(), |
310 LocationSummary::kNoCall); | 298 LocationSummary::kNoCall); |
311 } | 299 } |
312 | 300 |
313 | 301 |
314 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 302 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
315 // The register allocator drops constant definitions that have no uses. | 303 // The register allocator drops constant definitions that have no uses. |
316 if (!locs()->out().IsInvalid()) { | 304 if (!locs()->out().IsInvalid()) { |
317 Register result = locs()->out().reg(); | 305 Register result = locs()->out().reg(); |
318 __ LoadObject(result, value()); | 306 __ LoadObject(result, value(), PP); |
319 } | 307 } |
320 } | 308 } |
321 | 309 |
322 | 310 |
323 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { | 311 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { |
324 const intptr_t kNumInputs = 3; | 312 const intptr_t kNumInputs = 3; |
325 const intptr_t kNumTemps = 0; | 313 const intptr_t kNumTemps = 0; |
326 LocationSummary* summary = | 314 LocationSummary* summary = |
327 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 315 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
328 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. | 316 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
458 const ICData& original_ic_data) { | 446 const ICData& original_ic_data) { |
459 if (!compiler->is_optimizing()) { | 447 if (!compiler->is_optimizing()) { |
460 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 448 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
461 deopt_id, | 449 deopt_id, |
462 token_pos); | 450 token_pos); |
463 } | 451 } |
464 const int kNumberOfArguments = 2; | 452 const int kNumberOfArguments = 2; |
465 const Array& kNoArgumentNames = Object::null_array(); | 453 const Array& kNoArgumentNames = Object::null_array(); |
466 const int kNumArgumentsChecked = 2; | 454 const int kNumArgumentsChecked = 2; |
467 | 455 |
468 const Immediate& raw_null = | |
469 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
470 Label check_identity; | 456 Label check_identity; |
471 __ cmpq(Address(RSP, 0 * kWordSize), raw_null); | 457 __ LoadObject(TMP, Object::Handle(), PP); |
| 458 __ cmpq(Address(RSP, 0 * kWordSize), TMP); |
472 __ j(EQUAL, &check_identity); | 459 __ j(EQUAL, &check_identity); |
473 __ cmpq(Address(RSP, 1 * kWordSize), raw_null); | 460 __ cmpq(Address(RSP, 1 * kWordSize), TMP); |
474 __ j(EQUAL, &check_identity); | 461 __ j(EQUAL, &check_identity); |
475 | 462 |
476 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); | 463 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); |
477 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { | 464 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { |
478 ASSERT(!original_ic_data.IsNull()); | 465 ASSERT(!original_ic_data.IsNull()); |
479 if (original_ic_data.NumberOfChecks() == 0) { | 466 if (original_ic_data.NumberOfChecks() == 0) { |
480 // IC call for reoptimization populates original ICData. | 467 // IC call for reoptimization populates original ICData. |
481 equality_ic_data = original_ic_data.raw(); | 468 equality_ic_data = original_ic_data.raw(); |
482 } else { | 469 } else { |
483 // Megamorphic call. | 470 // Megamorphic call. |
(...skipping 20 matching lines...) Expand all Loading... |
504 | 491 |
505 __ Bind(&check_identity); | 492 __ Bind(&check_identity); |
506 Label equality_done; | 493 Label equality_done; |
507 if (compiler->is_optimizing()) { | 494 if (compiler->is_optimizing()) { |
508 // No need to update IC data. | 495 // No need to update IC data. |
509 Label is_true; | 496 Label is_true; |
510 __ popq(RAX); | 497 __ popq(RAX); |
511 __ popq(RDX); | 498 __ popq(RDX); |
512 __ cmpq(RAX, RDX); | 499 __ cmpq(RAX, RDX); |
513 __ j(EQUAL, &is_true); | 500 __ j(EQUAL, &is_true); |
514 __ LoadObject(RAX, Bool::Get(kind != Token::kEQ)); | 501 __ LoadObject(RAX, Bool::Get(kind != Token::kEQ), PP); |
515 __ jmp(&equality_done); | 502 __ jmp(&equality_done); |
516 __ Bind(&is_true); | 503 __ Bind(&is_true); |
517 __ LoadObject(RAX, Bool::Get(kind == Token::kEQ)); | 504 __ LoadObject(RAX, Bool::Get(kind == Token::kEQ), PP); |
518 if (kind == Token::kNE) { | 505 if (kind == Token::kNE) { |
519 // Skip not-equal result conversion. | 506 // Skip not-equal result conversion. |
520 __ jmp(&equality_done); | 507 __ jmp(&equality_done); |
521 } | 508 } |
522 } else { | 509 } else { |
523 // Call stub, load IC data in register. The stub will update ICData if | 510 // Call stub, load IC data in register. The stub will update ICData if |
524 // necessary. | 511 // necessary. |
525 Register ic_data_reg = locs->temp(0).reg(); | 512 Register ic_data_reg = locs->temp(0).reg(); |
526 ASSERT(ic_data_reg == RBX); // Stub depends on it. | 513 ASSERT(ic_data_reg == RBX); // Stub depends on it. |
527 __ LoadObject(ic_data_reg, equality_ic_data); | 514 __ LoadObject(ic_data_reg, equality_ic_data, PP); |
528 compiler->GenerateCall(token_pos, | 515 compiler->GenerateCall(token_pos, |
529 &StubCode::EqualityWithNullArgLabel(), | 516 &StubCode::EqualityWithNullArgLabel(), |
530 PcDescriptors::kRuntimeCall, | 517 PcDescriptors::kRuntimeCall, |
531 locs); | 518 locs); |
532 __ Drop(2); | 519 __ Drop(2); |
533 } | 520 } |
534 __ Bind(&check_ne); | 521 __ Bind(&check_ne); |
535 if (kind == Token::kNE) { | 522 if (kind == Token::kNE) { |
536 Label true_label, done; | 523 Label true_label, done; |
537 // Negate the condition: true label returns false and vice versa. | 524 // Negate the condition: true label returns false and vice versa. |
538 __ CompareObject(RAX, Bool::True()); | 525 __ CompareObject(RAX, Bool::True()); |
539 __ j(EQUAL, &true_label, Assembler::kNearJump); | 526 __ j(EQUAL, &true_label, Assembler::kNearJump); |
540 __ LoadObject(RAX, Bool::True()); | 527 __ LoadObject(RAX, Bool::True(), PP); |
541 __ jmp(&done, Assembler::kNearJump); | 528 __ jmp(&done, Assembler::kNearJump); |
542 __ Bind(&true_label); | 529 __ Bind(&true_label); |
543 __ LoadObject(RAX, Bool::False()); | 530 __ LoadObject(RAX, Bool::False(), PP); |
544 __ Bind(&done); | 531 __ Bind(&done); |
545 } | 532 } |
546 __ Bind(&equality_done); | 533 __ Bind(&equality_done); |
547 } | 534 } |
548 | 535 |
549 | 536 |
550 static void LoadValueCid(FlowGraphCompiler* compiler, | 537 static void LoadValueCid(FlowGraphCompiler* compiler, |
551 Register value_cid_reg, | 538 Register value_cid_reg, |
552 Register value_reg, | 539 Register value_reg, |
553 Label* value_is_smi = NULL) { | 540 Label* value_is_smi = NULL) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
603 // Object.== is same as ===. | 590 // Object.== is same as ===. |
604 __ Drop(2); | 591 __ Drop(2); |
605 __ cmpq(left, right); | 592 __ cmpq(left, right); |
606 if (branch != NULL) { | 593 if (branch != NULL) { |
607 branch->EmitBranchOnCondition(compiler, cond); | 594 branch->EmitBranchOnCondition(compiler, cond); |
608 } else { | 595 } else { |
609 // This case should be rare. | 596 // This case should be rare. |
610 Register result = locs->out().reg(); | 597 Register result = locs->out().reg(); |
611 Label load_true; | 598 Label load_true; |
612 __ j(cond, &load_true, Assembler::kNearJump); | 599 __ j(cond, &load_true, Assembler::kNearJump); |
613 __ LoadObject(result, Bool::False()); | 600 __ LoadObject(result, Bool::False(), PP); |
614 __ jmp(&done); | 601 __ jmp(&done); |
615 __ Bind(&load_true); | 602 __ Bind(&load_true); |
616 __ LoadObject(result, Bool::True()); | 603 __ LoadObject(result, Bool::True(), PP); |
617 } | 604 } |
618 } else { | 605 } else { |
619 const int kNumberOfArguments = 2; | 606 const int kNumberOfArguments = 2; |
620 const Array& kNoArgumentNames = Object::null_array(); | 607 const Array& kNoArgumentNames = Object::null_array(); |
621 compiler->GenerateStaticCall(deopt_id, | 608 compiler->GenerateStaticCall(deopt_id, |
622 token_pos, | 609 token_pos, |
623 target, | 610 target, |
624 kNumberOfArguments, | 611 kNumberOfArguments, |
625 kNoArgumentNames, | 612 kNoArgumentNames, |
626 locs); | 613 locs); |
627 if (branch == NULL) { | 614 if (branch == NULL) { |
628 if (kind == Token::kNE) { | 615 if (kind == Token::kNE) { |
629 Label false_label; | 616 Label false_label; |
630 __ CompareObject(RAX, Bool::True()); | 617 __ CompareObject(RAX, Bool::True()); |
631 __ j(EQUAL, &false_label, Assembler::kNearJump); | 618 __ j(EQUAL, &false_label, Assembler::kNearJump); |
632 __ LoadObject(RAX, Bool::True()); | 619 __ LoadObject(RAX, Bool::True(), PP); |
633 __ jmp(&done); | 620 __ jmp(&done); |
634 __ Bind(&false_label); | 621 __ Bind(&false_label); |
635 __ LoadObject(RAX, Bool::False()); | 622 __ LoadObject(RAX, Bool::False(), PP); |
636 } | 623 } |
637 } else { | 624 } else { |
638 if (branch->is_checked()) { | 625 if (branch->is_checked()) { |
639 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); | 626 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); |
640 } | 627 } |
641 __ CompareObject(RAX, Bool::True()); | 628 __ CompareObject(RAX, Bool::True()); |
642 branch->EmitBranchOnCondition(compiler, cond); | 629 branch->EmitBranchOnCondition(compiler, cond); |
643 } | 630 } |
644 } | 631 } |
645 if (i < len - 1) { | 632 if (i < len - 1) { |
(...skipping 13 matching lines...) Expand all Loading... |
659 BranchInstr* branch, | 646 BranchInstr* branch, |
660 intptr_t deopt_id) { | 647 intptr_t deopt_id) { |
661 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 648 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
662 Register left = locs.in(0).reg(); | 649 Register left = locs.in(0).reg(); |
663 Register right = locs.in(1).reg(); | 650 Register right = locs.in(1).reg(); |
664 Register temp = locs.temp(0).reg(); | 651 Register temp = locs.temp(0).reg(); |
665 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); | 652 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); |
666 __ testq(left, Immediate(kSmiTagMask)); | 653 __ testq(left, Immediate(kSmiTagMask)); |
667 __ j(ZERO, deopt); | 654 __ j(ZERO, deopt); |
668 // 'left' is not Smi. | 655 // 'left' is not Smi. |
669 const Immediate& raw_null = | 656 |
670 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
671 Label identity_compare; | 657 Label identity_compare; |
672 __ cmpq(right, raw_null); | 658 __ CompareObject(right, Object::Handle()); |
673 __ j(EQUAL, &identity_compare); | 659 __ j(EQUAL, &identity_compare); |
674 __ cmpq(left, raw_null); | 660 __ CompareObject(left, Object::Handle()); |
675 __ j(EQUAL, &identity_compare); | 661 __ j(EQUAL, &identity_compare); |
676 | 662 |
677 __ LoadClassId(temp, left); | 663 __ LoadClassId(temp, left); |
678 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); | 664 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); |
679 const intptr_t len = ic_data.NumberOfChecks(); | 665 const intptr_t len = ic_data.NumberOfChecks(); |
680 for (intptr_t i = 0; i < len; i++) { | 666 for (intptr_t i = 0; i < len; i++) { |
681 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); | 667 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); |
682 if (i == (len - 1)) { | 668 if (i == (len - 1)) { |
683 __ j(NOT_EQUAL, deopt); | 669 __ j(NOT_EQUAL, deopt); |
684 } else { | 670 } else { |
685 __ j(EQUAL, &identity_compare); | 671 __ j(EQUAL, &identity_compare); |
686 } | 672 } |
687 } | 673 } |
688 __ Bind(&identity_compare); | 674 __ Bind(&identity_compare); |
689 __ cmpq(left, right); | 675 __ cmpq(left, right); |
690 if (branch == NULL) { | 676 if (branch == NULL) { |
691 Label done, is_equal; | 677 Label done, is_equal; |
692 Register result = locs.out().reg(); | 678 Register result = locs.out().reg(); |
693 __ j(EQUAL, &is_equal, Assembler::kNearJump); | 679 __ j(EQUAL, &is_equal, Assembler::kNearJump); |
694 // Not equal. | 680 // Not equal. |
695 __ LoadObject(result, Bool::Get(kind != Token::kEQ)); | 681 __ LoadObject(result, Bool::Get(kind != Token::kEQ), PP); |
696 __ jmp(&done, Assembler::kNearJump); | 682 __ jmp(&done, Assembler::kNearJump); |
697 __ Bind(&is_equal); | 683 __ Bind(&is_equal); |
698 __ LoadObject(result, Bool::Get(kind == Token::kEQ)); | 684 __ LoadObject(result, Bool::Get(kind == Token::kEQ), PP); |
699 __ Bind(&done); | 685 __ Bind(&done); |
700 } else { | 686 } else { |
701 Condition cond = TokenKindToSmiCondition(kind); | 687 Condition cond = TokenKindToSmiCondition(kind); |
702 branch->EmitBranchOnCondition(compiler, cond); | 688 branch->EmitBranchOnCondition(compiler, cond); |
703 } | 689 } |
704 } | 690 } |
705 | 691 |
706 | 692 |
707 // First test if receiver is NULL, in which case === is applied. | 693 // First test if receiver is NULL, in which case === is applied. |
708 // If type feedback was provided (lists of <class-id, target>), do a | 694 // If type feedback was provided (lists of <class-id, target>), do a |
709 // type by type check (either === or static call to the operator. | 695 // type by type check (either === or static call to the operator. |
710 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, | 696 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, |
711 LocationSummary* locs, | 697 LocationSummary* locs, |
712 Token::Kind kind, | 698 Token::Kind kind, |
713 BranchInstr* branch, | 699 BranchInstr* branch, |
714 const ICData& ic_data, | 700 const ICData& ic_data, |
715 intptr_t deopt_id, | 701 intptr_t deopt_id, |
716 intptr_t token_pos) { | 702 intptr_t token_pos) { |
717 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 703 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
718 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 704 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
719 Register left = locs->in(0).reg(); | 705 Register left = locs->in(0).reg(); |
720 Register right = locs->in(1).reg(); | 706 Register right = locs->in(1).reg(); |
721 const Immediate& raw_null = | 707 |
722 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
723 Label done, identity_compare, non_null_compare; | 708 Label done, identity_compare, non_null_compare; |
724 __ cmpq(right, raw_null); | 709 __ CompareObject(right, Object::Handle()); |
725 __ j(EQUAL, &identity_compare, Assembler::kNearJump); | 710 __ j(EQUAL, &identity_compare, Assembler::kNearJump); |
726 __ cmpq(left, raw_null); | 711 __ CompareObject(left, Object::Handle()); |
727 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); | 712 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); |
728 // Comparison with NULL is "===". | 713 // Comparison with NULL is "===". |
729 __ Bind(&identity_compare); | 714 __ Bind(&identity_compare); |
730 __ cmpq(left, right); | 715 __ cmpq(left, right); |
731 Condition cond = TokenKindToSmiCondition(kind); | 716 Condition cond = TokenKindToSmiCondition(kind); |
732 if (branch != NULL) { | 717 if (branch != NULL) { |
733 branch->EmitBranchOnCondition(compiler, cond); | 718 branch->EmitBranchOnCondition(compiler, cond); |
734 } else { | 719 } else { |
735 Register result = locs->out().reg(); | 720 Register result = locs->out().reg(); |
736 Label load_true; | 721 Label load_true; |
737 __ j(cond, &load_true, Assembler::kNearJump); | 722 __ j(cond, &load_true, Assembler::kNearJump); |
738 __ LoadObject(result, Bool::False()); | 723 __ LoadObject(result, Bool::False(), PP); |
739 __ jmp(&done); | 724 __ jmp(&done); |
740 __ Bind(&load_true); | 725 __ Bind(&load_true); |
741 __ LoadObject(result, Bool::True()); | 726 __ LoadObject(result, Bool::True(), PP); |
742 } | 727 } |
743 __ jmp(&done); | 728 __ jmp(&done); |
744 __ Bind(&non_null_compare); // Receiver is not null. | 729 __ Bind(&non_null_compare); // Receiver is not null. |
745 __ pushq(left); | 730 __ pushq(left); |
746 __ pushq(right); | 731 __ pushq(right); |
747 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, | 732 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, |
748 deopt_id, token_pos); | 733 deopt_id, token_pos); |
749 __ Bind(&done); | 734 __ Bind(&done); |
750 } | 735 } |
751 | 736 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
789 } else { | 774 } else { |
790 __ cmpq(left.reg(), right.reg()); | 775 __ cmpq(left.reg(), right.reg()); |
791 } | 776 } |
792 | 777 |
793 if (branch != NULL) { | 778 if (branch != NULL) { |
794 branch->EmitBranchOnCondition(compiler, true_condition); | 779 branch->EmitBranchOnCondition(compiler, true_condition); |
795 } else { | 780 } else { |
796 Register result = locs.out().reg(); | 781 Register result = locs.out().reg(); |
797 Label done, is_true; | 782 Label done, is_true; |
798 __ j(true_condition, &is_true); | 783 __ j(true_condition, &is_true); |
799 __ LoadObject(result, Bool::False()); | 784 __ LoadObject(result, Bool::False(), PP); |
800 __ jmp(&done); | 785 __ jmp(&done); |
801 __ Bind(&is_true); | 786 __ Bind(&is_true); |
802 __ LoadObject(result, Bool::True()); | 787 __ LoadObject(result, Bool::True(), PP); |
803 __ Bind(&done); | 788 __ Bind(&done); |
804 } | 789 } |
805 } | 790 } |
806 | 791 |
807 | 792 |
808 static Condition TokenKindToDoubleCondition(Token::Kind kind) { | 793 static Condition TokenKindToDoubleCondition(Token::Kind kind) { |
809 switch (kind) { | 794 switch (kind) { |
810 case Token::kEQ: return EQUAL; | 795 case Token::kEQ: return EQUAL; |
811 case Token::kNE: return NOT_EQUAL; | 796 case Token::kNE: return NOT_EQUAL; |
812 case Token::kLT: return BELOW; | 797 case Token::kLT: return BELOW; |
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1520 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { | 1505 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { |
1521 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { | 1506 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { |
1522 // Currently we can't have different location summaries for optimized | 1507 // Currently we can't have different location summaries for optimized |
1523 // and non-optimized code. So instead we manually pick up a register | 1508 // and non-optimized code. So instead we manually pick up a register |
1524 // that is known to be free because we know how non-optimizing compiler | 1509 // that is known to be free because we know how non-optimizing compiler |
1525 // allocates registers. | 1510 // allocates registers. |
1526 field_reg = RBX; | 1511 field_reg = RBX; |
1527 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); | 1512 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); |
1528 } | 1513 } |
1529 | 1514 |
1530 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1515 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); |
1531 | 1516 |
1532 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); | 1517 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
1533 FieldAddress field_nullability_operand( | 1518 FieldAddress field_nullability_operand( |
1534 field_reg, Field::is_nullable_offset()); | 1519 field_reg, Field::is_nullable_offset()); |
1535 FieldAddress field_length_operand( | 1520 FieldAddress field_length_operand( |
1536 field_reg, Field::guarded_list_length_offset()); | 1521 field_reg, Field::guarded_list_length_offset()); |
1537 | 1522 |
1538 if (value_cid == kDynamicCid) { | 1523 if (value_cid == kDynamicCid) { |
1539 if (value_cid_reg == kNoRegister) { | 1524 if (value_cid_reg == kNoRegister) { |
1540 ASSERT(!compiler->is_optimizing()); | 1525 ASSERT(!compiler->is_optimizing()); |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1666 } else { | 1651 } else { |
1667 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); | 1652 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); |
1668 } | 1653 } |
1669 } | 1654 } |
1670 | 1655 |
1671 if (!ok_is_fall_through) { | 1656 if (!ok_is_fall_through) { |
1672 __ jmp(&ok); | 1657 __ jmp(&ok); |
1673 } | 1658 } |
1674 } else { | 1659 } else { |
1675 if (field_reg != kNoRegister) { | 1660 if (field_reg != kNoRegister) { |
1676 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1661 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); |
1677 } | 1662 } |
1678 | 1663 |
1679 if (value_cid == kDynamicCid) { | 1664 if (value_cid == kDynamicCid) { |
1680 // Field's guarded class id is fixed but value's class id is not known. | 1665 // Field's guarded class id is fixed but value's class id is not known. |
1681 __ testq(value_reg, Immediate(kSmiTagMask)); | 1666 __ testq(value_reg, Immediate(kSmiTagMask)); |
1682 | 1667 |
1683 if (field_cid != kSmiCid) { | 1668 if (field_cid != kSmiCid) { |
1684 __ j(ZERO, fail); | 1669 __ j(ZERO, fail); |
1685 __ LoadClassId(value_cid_reg, value_reg); | 1670 __ LoadClassId(value_cid_reg, value_reg); |
1686 __ cmpq(value_cid_reg, Immediate(field_cid)); | 1671 __ cmpq(value_cid_reg, Immediate(field_cid)); |
(...skipping 15 matching lines...) Expand all Loading... |
1702 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1687 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
1703 // Destroy value_cid_reg (safe because we are finished with it). | 1688 // Destroy value_cid_reg (safe because we are finished with it). |
1704 __ movq(value_cid_reg, | 1689 __ movq(value_cid_reg, |
1705 FieldAddress(value_reg, TypedData::length_offset())); | 1690 FieldAddress(value_reg, TypedData::length_offset())); |
1706 } | 1691 } |
1707 __ cmpq(value_cid_reg, field_length_operand); | 1692 __ cmpq(value_cid_reg, field_length_operand); |
1708 } | 1693 } |
1709 | 1694 |
1710 if (field().is_nullable() && (field_cid != kNullCid)) { | 1695 if (field().is_nullable() && (field_cid != kNullCid)) { |
1711 __ j(EQUAL, &ok); | 1696 __ j(EQUAL, &ok); |
1712 const Immediate& raw_null = | 1697 __ CompareObject(value_reg, Object::Handle()); |
1713 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1714 __ cmpq(value_reg, raw_null); | |
1715 } | 1698 } |
1716 | 1699 |
1717 if (ok_is_fall_through) { | 1700 if (ok_is_fall_through) { |
1718 __ j(NOT_EQUAL, fail); | 1701 __ j(NOT_EQUAL, fail); |
1719 } else { | 1702 } else { |
1720 __ j(EQUAL, &ok); | 1703 __ j(EQUAL, &ok); |
1721 } | 1704 } |
1722 } else { | 1705 } else { |
1723 // Both value's and field's class id is known. | 1706 // Both value's and field's class id is known. |
1724 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1707 if ((value_cid != field_cid) && (value_cid != nullability)) { |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1829 : Location::RequiresRegister()); | 1812 : Location::RequiresRegister()); |
1830 locs->set_temp(0, Location::RequiresRegister()); | 1813 locs->set_temp(0, Location::RequiresRegister()); |
1831 return locs; | 1814 return locs; |
1832 } | 1815 } |
1833 | 1816 |
1834 | 1817 |
1835 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1818 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1836 Register value = locs()->in(0).reg(); | 1819 Register value = locs()->in(0).reg(); |
1837 Register temp = locs()->temp(0).reg(); | 1820 Register temp = locs()->temp(0).reg(); |
1838 | 1821 |
1839 __ LoadObject(temp, field()); | 1822 __ LoadObject(temp, field(), PP); |
1840 if (this->value()->NeedsStoreBuffer()) { | 1823 if (this->value()->NeedsStoreBuffer()) { |
1841 __ StoreIntoObject(temp, | 1824 __ StoreIntoObject(temp, |
1842 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 1825 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
1843 } else { | 1826 } else { |
1844 __ StoreIntoObjectNoBarrier( | 1827 __ StoreIntoObjectNoBarrier( |
1845 temp, FieldAddress(temp, Field::value_offset()), value); | 1828 temp, FieldAddress(temp, Field::value_offset()), value); |
1846 } | 1829 } |
1847 } | 1830 } |
1848 | 1831 |
1849 | 1832 |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1983 // (or null). | 1966 // (or null). |
1984 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 1967 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
1985 !type_arguments().CanShareInstantiatorTypeArguments( | 1968 !type_arguments().CanShareInstantiatorTypeArguments( |
1986 instantiator_class())); | 1969 instantiator_class())); |
1987 // If the instantiator is null and if the type argument vector | 1970 // If the instantiator is null and if the type argument vector |
1988 // instantiated from null becomes a vector of dynamic, then use null as | 1971 // instantiated from null becomes a vector of dynamic, then use null as |
1989 // the type arguments. | 1972 // the type arguments. |
1990 Label type_arguments_instantiated; | 1973 Label type_arguments_instantiated; |
1991 const intptr_t len = type_arguments().Length(); | 1974 const intptr_t len = type_arguments().Length(); |
1992 if (type_arguments().IsRawInstantiatedRaw(len)) { | 1975 if (type_arguments().IsRawInstantiatedRaw(len)) { |
1993 const Immediate& raw_null = | 1976 __ CompareObject(instantiator_reg, Object::Handle()); |
1994 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1995 __ cmpq(instantiator_reg, raw_null); | |
1996 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 1977 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
1997 } | 1978 } |
1998 // Instantiate non-null type arguments. | 1979 // Instantiate non-null type arguments. |
1999 // A runtime call to instantiate the type arguments is required. | 1980 // A runtime call to instantiate the type arguments is required. |
2000 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 1981 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
2001 __ PushObject(type_arguments()); | 1982 __ PushObject(type_arguments()); |
2002 __ pushq(instantiator_reg); // Push instantiator type arguments. | 1983 __ pushq(instantiator_reg); // Push instantiator type arguments. |
2003 compiler->GenerateCallRuntime(token_pos(), | 1984 compiler->GenerateCallRuntime(token_pos(), |
2004 deopt_id(), | 1985 deopt_id(), |
2005 kInstantiateTypeArgumentsRuntimeEntry, | 1986 kInstantiateTypeArgumentsRuntimeEntry, |
(...skipping 27 matching lines...) Expand all Loading... |
2033 // instantiator_reg is the instantiator type argument vector, i.e. an | 2014 // instantiator_reg is the instantiator type argument vector, i.e. an |
2034 // AbstractTypeArguments object (or null). | 2015 // AbstractTypeArguments object (or null). |
2035 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2016 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2036 !type_arguments().CanShareInstantiatorTypeArguments( | 2017 !type_arguments().CanShareInstantiatorTypeArguments( |
2037 instantiator_class())); | 2018 instantiator_class())); |
2038 // If the instantiator is null and if the type argument vector | 2019 // If the instantiator is null and if the type argument vector |
2039 // instantiated from null becomes a vector of dynamic, then use null as | 2020 // instantiated from null becomes a vector of dynamic, then use null as |
2040 // the type arguments. | 2021 // the type arguments. |
2041 Label type_arguments_instantiated; | 2022 Label type_arguments_instantiated; |
2042 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2023 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2043 const Immediate& raw_null = | 2024 |
2044 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 2025 __ CompareObject(instantiator_reg, Object::Handle()); |
2045 __ cmpq(instantiator_reg, raw_null); | |
2046 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 2026 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
2047 // Instantiate non-null type arguments. | 2027 // Instantiate non-null type arguments. |
2048 // In the non-factory case, we rely on the allocation stub to | 2028 // In the non-factory case, we rely on the allocation stub to |
2049 // instantiate the type arguments. | 2029 // instantiate the type arguments. |
2050 __ LoadObject(result_reg, type_arguments()); | 2030 __ LoadObject(result_reg, type_arguments(), PP); |
2051 // result_reg: uninstantiated type arguments. | 2031 // result_reg: uninstantiated type arguments. |
2052 | 2032 |
2053 __ Bind(&type_arguments_instantiated); | 2033 __ Bind(&type_arguments_instantiated); |
2054 // result_reg: uninstantiated or instantiated type arguments. | 2034 // result_reg: uninstantiated or instantiated type arguments. |
2055 } | 2035 } |
2056 | 2036 |
2057 | 2037 |
2058 LocationSummary* | 2038 LocationSummary* |
2059 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { | 2039 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { |
2060 const intptr_t kNumInputs = 1; | 2040 const intptr_t kNumInputs = 1; |
(...skipping 14 matching lines...) Expand all Loading... |
2075 // instantiator_reg is the instantiator AbstractTypeArguments object | 2055 // instantiator_reg is the instantiator AbstractTypeArguments object |
2076 // (or null). | 2056 // (or null). |
2077 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2057 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2078 !type_arguments().CanShareInstantiatorTypeArguments( | 2058 !type_arguments().CanShareInstantiatorTypeArguments( |
2079 instantiator_class())); | 2059 instantiator_class())); |
2080 | 2060 |
2081 // If the instantiator is null and if the type argument vector | 2061 // If the instantiator is null and if the type argument vector |
2082 // instantiated from null becomes a vector of dynamic, then use null as | 2062 // instantiated from null becomes a vector of dynamic, then use null as |
2083 // the type arguments and do not pass the instantiator. | 2063 // the type arguments and do not pass the instantiator. |
2084 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2064 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2085 const Immediate& raw_null = | 2065 |
2086 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
2087 Label instantiator_not_null; | 2066 Label instantiator_not_null; |
2088 __ cmpq(instantiator_reg, raw_null); | 2067 __ CompareObject(instantiator_reg, Object::Handle()); |
2089 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); | 2068 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); |
2090 // Null was used in VisitExtractConstructorTypeArguments as the | 2069 // Null was used in VisitExtractConstructorTypeArguments as the |
2091 // instantiated type arguments, no proper instantiator needed. | 2070 // instantiated type arguments, no proper instantiator needed. |
2092 __ movq(instantiator_reg, | 2071 __ movq(instantiator_reg, |
2093 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); | 2072 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); |
2094 __ Bind(&instantiator_not_null); | 2073 __ Bind(&instantiator_not_null); |
2095 // instantiator_reg: instantiator or kNoInstantiator. | 2074 // instantiator_reg: instantiator or kNoInstantiator. |
2096 } | 2075 } |
2097 | 2076 |
2098 | 2077 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2154 } | 2133 } |
2155 | 2134 |
2156 | 2135 |
2157 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2136 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2158 __ Bind(compiler->GetJumpLabel(this)); | 2137 __ Bind(compiler->GetJumpLabel(this)); |
2159 compiler->AddExceptionHandler(catch_try_index(), | 2138 compiler->AddExceptionHandler(catch_try_index(), |
2160 try_index(), | 2139 try_index(), |
2161 compiler->assembler()->CodeSize(), | 2140 compiler->assembler()->CodeSize(), |
2162 catch_handler_types_, | 2141 catch_handler_types_, |
2163 needs_stacktrace()); | 2142 needs_stacktrace()); |
| 2143 |
| 2144 // Restore the pool pointer. |
| 2145 __ LoadPoolPointer(PP); |
| 2146 |
2164 if (HasParallelMove()) { | 2147 if (HasParallelMove()) { |
2165 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 2148 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
2166 } | 2149 } |
2167 | 2150 |
2168 // Restore RSP from RBP as we are coming from a throw and the code for | 2151 // Restore RSP from RBP as we are coming from a throw and the code for |
2169 // popping arguments has not been run. | 2152 // popping arguments has not been run. |
2170 const intptr_t fp_sp_dist = | 2153 const intptr_t fp_sp_dist = |
2171 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 2154 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
2172 ASSERT(fp_sp_dist <= 0); | 2155 ASSERT(fp_sp_dist <= 0); |
2173 __ leaq(RSP, Address(RBP, fp_sp_dist)); | 2156 __ leaq(RSP, Address(RBP, fp_sp_dist)); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2234 | 2217 |
2235 Register temp = locs()->temp(0).reg(); | 2218 Register temp = locs()->temp(0).reg(); |
2236 // Generate stack overflow check. | 2219 // Generate stack overflow check. |
2237 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); | 2220 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); |
2238 __ cmpq(RSP, Address(temp, 0)); | 2221 __ cmpq(RSP, Address(temp, 0)); |
2239 __ j(BELOW_EQUAL, slow_path->entry_label()); | 2222 __ j(BELOW_EQUAL, slow_path->entry_label()); |
2240 if (compiler->CanOSRFunction() && in_loop()) { | 2223 if (compiler->CanOSRFunction() && in_loop()) { |
2241 // In unoptimized code check the usage counter to trigger OSR at loop | 2224 // In unoptimized code check the usage counter to trigger OSR at loop |
2242 // stack checks. Use progressively higher thresholds for more deeply | 2225 // stack checks. Use progressively higher thresholds for more deeply |
2243 // nested loops to attempt to hit outer loops with OSR when possible. | 2226 // nested loops to attempt to hit outer loops with OSR when possible. |
2244 __ LoadObject(temp, compiler->parsed_function().function()); | 2227 __ LoadObject(temp, compiler->parsed_function().function(), PP); |
2245 intptr_t threshold = | 2228 intptr_t threshold = |
2246 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2229 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
2247 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), | 2230 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), |
2248 Immediate(threshold)); | 2231 Immediate(threshold)); |
2249 __ j(GREATER_EQUAL, slow_path->entry_label()); | 2232 __ j(GREATER_EQUAL, slow_path->entry_label()); |
2250 } | 2233 } |
2251 __ Bind(slow_path->exit_label()); | 2234 __ Bind(slow_path->exit_label()); |
2252 } | 2235 } |
2253 | 2236 |
2254 | 2237 |
(...skipping 1426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3681 __ movl(result, Address(RSP, 8)); | 3664 __ movl(result, Address(RSP, 8)); |
3682 break; | 3665 break; |
3683 case MethodRecognizer::kUint32x4GetFlagW: | 3666 case MethodRecognizer::kUint32x4GetFlagW: |
3684 __ movl(result, Address(RSP, 12)); | 3667 __ movl(result, Address(RSP, 12)); |
3685 break; | 3668 break; |
3686 default: UNREACHABLE(); | 3669 default: UNREACHABLE(); |
3687 } | 3670 } |
3688 __ addq(RSP, Immediate(16)); | 3671 __ addq(RSP, Immediate(16)); |
3689 __ testl(result, result); | 3672 __ testl(result, result); |
3690 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); | 3673 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); |
3691 __ LoadObject(result, Bool::False()); | 3674 __ LoadObject(result, Bool::False(), PP); |
3692 __ jmp(&done); | 3675 __ jmp(&done); |
3693 __ Bind(&non_zero); | 3676 __ Bind(&non_zero); |
3694 __ LoadObject(result, Bool::True()); | 3677 __ LoadObject(result, Bool::True(), PP); |
3695 __ Bind(&done); | 3678 __ Bind(&done); |
3696 } | 3679 } |
3697 | 3680 |
3698 | 3681 |
3699 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { | 3682 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { |
3700 const intptr_t kNumInputs = 3; | 3683 const intptr_t kNumInputs = 3; |
3701 const intptr_t kNumTemps = 1; | 3684 const intptr_t kNumTemps = 1; |
3702 LocationSummary* summary = | 3685 LocationSummary* summary = |
3703 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3686 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
3704 summary->set_in(0, Location::RequiresFpuRegister()); | 3687 summary->set_in(0, Location::RequiresFpuRegister()); |
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4198 // return double.NAN; | 4181 // return double.NAN; |
4199 // } | 4182 // } |
4200 XmmRegister base = locs()->in(0).fpu_reg(); | 4183 XmmRegister base = locs()->in(0).fpu_reg(); |
4201 XmmRegister exp = locs()->in(1).fpu_reg(); | 4184 XmmRegister exp = locs()->in(1).fpu_reg(); |
4202 XmmRegister result = locs()->out().fpu_reg(); | 4185 XmmRegister result = locs()->out().fpu_reg(); |
4203 Register temp = locs()->temp(0).reg(); | 4186 Register temp = locs()->temp(0).reg(); |
4204 XmmRegister zero_temp = locs()->temp(1).fpu_reg(); | 4187 XmmRegister zero_temp = locs()->temp(1).fpu_reg(); |
4205 | 4188 |
4206 Label check_base_is_one; | 4189 Label check_base_is_one; |
4207 // Check if exponent is 0.0 -> return 1.0; | 4190 // Check if exponent is 0.0 -> return 1.0; |
4208 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0))); | 4191 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0)), PP); |
4209 __ movsd(zero_temp, FieldAddress(temp, Double::value_offset())); | 4192 __ movsd(zero_temp, FieldAddress(temp, Double::value_offset())); |
4210 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1))); | 4193 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1)), PP); |
4211 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 4194 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
4212 // 'result' contains 1.0. | 4195 // 'result' contains 1.0. |
4213 __ comisd(exp, zero_temp); | 4196 __ comisd(exp, zero_temp); |
4214 __ j(PARITY_EVEN, &check_base_is_one, Assembler::kNearJump); // NaN. | 4197 __ j(PARITY_EVEN, &check_base_is_one, Assembler::kNearJump); // NaN. |
4215 __ j(EQUAL, &skip_call, Assembler::kNearJump); // exp is 0, result is 1.0. | 4198 __ j(EQUAL, &skip_call, Assembler::kNearJump); // exp is 0, result is 1.0. |
4216 | 4199 |
4217 Label base_is_nan; | 4200 Label base_is_nan; |
4218 __ Bind(&check_base_is_one); | 4201 __ Bind(&check_base_is_one); |
4219 // Checks if base == 1.0. | 4202 // Checks if base == 1.0. |
4220 __ comisd(base, result); | 4203 __ comisd(base, result); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4298 summary->AddTemp(Location::RequiresRegister()); | 4281 summary->AddTemp(Location::RequiresRegister()); |
4299 } | 4282 } |
4300 return summary; | 4283 return summary; |
4301 } | 4284 } |
4302 | 4285 |
4303 | 4286 |
4304 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4287 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4305 if (IsNullCheck()) { | 4288 if (IsNullCheck()) { |
4306 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4289 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4307 kDeoptCheckClass); | 4290 kDeoptCheckClass); |
4308 const Immediate& raw_null = | 4291 __ CompareObject(locs()->in(0).reg(), |
4309 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 4292 Object::Handle()); |
4310 __ cmpq(locs()->in(0).reg(), raw_null); | |
4311 __ j(EQUAL, deopt); | 4293 __ j(EQUAL, deopt); |
4312 return; | 4294 return; |
4313 } | 4295 } |
4314 | 4296 |
4315 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 4297 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
4316 (unary_checks().NumberOfChecks() > 1)); | 4298 (unary_checks().NumberOfChecks() > 1)); |
4317 Register value = locs()->in(0).reg(); | 4299 Register value = locs()->in(0).reg(); |
4318 Register temp = locs()->temp(0).reg(); | 4300 Register temp = locs()->temp(0).reg(); |
4319 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4301 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4320 kDeoptCheckClass); | 4302 kDeoptCheckClass); |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4510 __ Bind(compiler->GetJumpLabel(this)); | 4492 __ Bind(compiler->GetJumpLabel(this)); |
4511 if (!compiler->is_optimizing()) { | 4493 if (!compiler->is_optimizing()) { |
4512 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4494 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4513 deopt_id_, | 4495 deopt_id_, |
4514 Scanner::kDummyTokenIndex); | 4496 Scanner::kDummyTokenIndex); |
4515 // Add an edge counter. | 4497 // Add an edge counter. |
4516 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | 4498 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); |
4517 counter.SetAt(0, Smi::Handle(Smi::New(0))); | 4499 counter.SetAt(0, Smi::Handle(Smi::New(0))); |
4518 Label done; | 4500 Label done; |
4519 __ Comment("Edge counter"); | 4501 __ Comment("Edge counter"); |
4520 __ LoadObject(RAX, counter); | 4502 __ LoadObject(RAX, counter, PP); |
4521 __ addq(FieldAddress(RAX, Array::element_offset(0)), | 4503 __ addq(FieldAddress(RAX, Array::element_offset(0)), |
4522 Immediate(Smi::RawValue(1))); | 4504 Immediate(Smi::RawValue(1))); |
4523 __ j(NO_OVERFLOW, &done); | 4505 __ j(NO_OVERFLOW, &done); |
4524 __ movq(FieldAddress(RAX, Array::element_offset(0)), | 4506 __ movq(FieldAddress(RAX, Array::element_offset(0)), |
4525 Immediate(Smi::RawValue(Smi::kMaxValue))); | 4507 Immediate(Smi::RawValue(Smi::kMaxValue))); |
4526 __ Bind(&done); | 4508 __ Bind(&done); |
4527 } | 4509 } |
4528 if (HasParallelMove()) { | 4510 if (HasParallelMove()) { |
4529 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4511 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4530 } | 4512 } |
(...skipping 10 matching lines...) Expand all Loading... |
4541 // Add deoptimization descriptor for deoptimizing instructions that may | 4523 // Add deoptimization descriptor for deoptimizing instructions that may |
4542 // be inserted before this instruction. | 4524 // be inserted before this instruction. |
4543 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4525 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4544 GetDeoptId(), | 4526 GetDeoptId(), |
4545 0); // No token position. | 4527 0); // No token position. |
4546 // Add an edge counter. | 4528 // Add an edge counter. |
4547 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | 4529 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); |
4548 counter.SetAt(0, Smi::Handle(Smi::New(0))); | 4530 counter.SetAt(0, Smi::Handle(Smi::New(0))); |
4549 Label done; | 4531 Label done; |
4550 __ Comment("Edge counter"); | 4532 __ Comment("Edge counter"); |
4551 __ LoadObject(RAX, counter); | 4533 __ LoadObject(RAX, counter, PP); |
4552 __ addq(FieldAddress(RAX, Array::element_offset(0)), | 4534 __ addq(FieldAddress(RAX, Array::element_offset(0)), |
4553 Immediate(Smi::RawValue(1))); | 4535 Immediate(Smi::RawValue(1))); |
4554 __ j(NO_OVERFLOW, &done); | 4536 __ j(NO_OVERFLOW, &done); |
4555 __ movq(FieldAddress(RAX, Array::element_offset(0)), | 4537 __ movq(FieldAddress(RAX, Array::element_offset(0)), |
4556 Immediate(Smi::RawValue(Smi::kMaxValue))); | 4538 Immediate(Smi::RawValue(Smi::kMaxValue))); |
4557 __ Bind(&done); | 4539 __ Bind(&done); |
4558 } | 4540 } |
4559 if (HasParallelMove()) { | 4541 if (HasParallelMove()) { |
4560 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4542 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4561 } | 4543 } |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4624 // Special code for numbers (compare values instead of references.) | 4606 // Special code for numbers (compare values instead of references.) |
4625 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4607 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4626 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4608 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4627 Location left = locs()->in(0); | 4609 Location left = locs()->in(0); |
4628 Location right = locs()->in(1); | 4610 Location right = locs()->in(1); |
4629 if (left.IsConstant() && right.IsConstant()) { | 4611 if (left.IsConstant() && right.IsConstant()) { |
4630 // TODO(vegorov): should be eliminated earlier by constant propagation. | 4612 // TODO(vegorov): should be eliminated earlier by constant propagation. |
4631 const bool result = (kind() == Token::kEQ_STRICT) ? | 4613 const bool result = (kind() == Token::kEQ_STRICT) ? |
4632 left.constant().raw() == right.constant().raw() : | 4614 left.constant().raw() == right.constant().raw() : |
4633 left.constant().raw() != right.constant().raw(); | 4615 left.constant().raw() != right.constant().raw(); |
4634 __ LoadObject(locs()->out().reg(), Bool::Get(result)); | 4616 __ LoadObject(locs()->out().reg(), Bool::Get(result), PP); |
4635 return; | 4617 return; |
4636 } | 4618 } |
4637 if (left.IsConstant()) { | 4619 if (left.IsConstant()) { |
4638 compiler->EmitEqualityRegConstCompare(right.reg(), | 4620 compiler->EmitEqualityRegConstCompare(right.reg(), |
4639 left.constant(), | 4621 left.constant(), |
4640 needs_number_check(), | 4622 needs_number_check(), |
4641 token_pos()); | 4623 token_pos()); |
4642 } else if (right.IsConstant()) { | 4624 } else if (right.IsConstant()) { |
4643 compiler->EmitEqualityRegConstCompare(left.reg(), | 4625 compiler->EmitEqualityRegConstCompare(left.reg(), |
4644 right.constant(), | 4626 right.constant(), |
4645 needs_number_check(), | 4627 needs_number_check(), |
4646 token_pos()); | 4628 token_pos()); |
4647 } else { | 4629 } else { |
4648 compiler->EmitEqualityRegRegCompare(left.reg(), | 4630 compiler->EmitEqualityRegRegCompare(left.reg(), |
4649 right.reg(), | 4631 right.reg(), |
4650 needs_number_check(), | 4632 needs_number_check(), |
4651 token_pos()); | 4633 token_pos()); |
4652 } | 4634 } |
4653 | 4635 |
4654 Register result = locs()->out().reg(); | 4636 Register result = locs()->out().reg(); |
4655 Label load_true, done; | 4637 Label load_true, done; |
4656 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; | 4638 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; |
4657 __ j(true_condition, &load_true, Assembler::kNearJump); | 4639 __ j(true_condition, &load_true, Assembler::kNearJump); |
4658 __ LoadObject(result, Bool::False()); | 4640 __ LoadObject(result, Bool::False(), PP); |
4659 __ jmp(&done, Assembler::kNearJump); | 4641 __ jmp(&done, Assembler::kNearJump); |
4660 __ Bind(&load_true); | 4642 __ Bind(&load_true); |
4661 __ LoadObject(result, Bool::True()); | 4643 __ LoadObject(result, Bool::True(), PP); |
4662 __ Bind(&done); | 4644 __ Bind(&done); |
4663 } | 4645 } |
4664 | 4646 |
4665 | 4647 |
4666 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 4648 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
4667 BranchInstr* branch) { | 4649 BranchInstr* branch) { |
4668 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4650 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4669 Location left = locs()->in(0); | 4651 Location left = locs()->in(0); |
4670 Location right = locs()->in(1); | 4652 Location right = locs()->in(1); |
4671 if (left.IsConstant() && right.IsConstant()) { | 4653 if (left.IsConstant() && right.IsConstant()) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4710 | 4692 |
4711 | 4693 |
4712 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4694 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4713 // The arguments to the stub include the closure, as does the arguments | 4695 // The arguments to the stub include the closure, as does the arguments |
4714 // descriptor. | 4696 // descriptor. |
4715 Register temp_reg = locs()->temp(0).reg(); | 4697 Register temp_reg = locs()->temp(0).reg(); |
4716 int argument_count = ArgumentCount(); | 4698 int argument_count = ArgumentCount(); |
4717 const Array& arguments_descriptor = | 4699 const Array& arguments_descriptor = |
4718 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 4700 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
4719 argument_names())); | 4701 argument_names())); |
4720 __ LoadObject(temp_reg, arguments_descriptor); | 4702 __ LoadObject(temp_reg, arguments_descriptor, PP); |
4721 ASSERT(temp_reg == R10); | 4703 ASSERT(temp_reg == R10); |
4722 compiler->GenerateDartCall(deopt_id(), | 4704 compiler->GenerateDartCall(deopt_id(), |
4723 token_pos(), | 4705 token_pos(), |
4724 &StubCode::CallClosureFunctionLabel(), | 4706 &StubCode::CallClosureFunctionLabel(), |
4725 PcDescriptors::kClosureCall, | 4707 PcDescriptors::kClosureCall, |
4726 locs()); | 4708 locs()); |
4727 __ Drop(argument_count); | 4709 __ Drop(argument_count); |
4728 } | 4710 } |
4729 | 4711 |
4730 | 4712 |
4731 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { | 4713 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { |
4732 return LocationSummary::Make(1, | 4714 return LocationSummary::Make(1, |
4733 Location::RequiresRegister(), | 4715 Location::RequiresRegister(), |
4734 LocationSummary::kNoCall); | 4716 LocationSummary::kNoCall); |
4735 } | 4717 } |
4736 | 4718 |
4737 | 4719 |
4738 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4720 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4739 Register value = locs()->in(0).reg(); | 4721 Register value = locs()->in(0).reg(); |
4740 Register result = locs()->out().reg(); | 4722 Register result = locs()->out().reg(); |
4741 | 4723 |
4742 Label done; | 4724 Label done; |
4743 __ LoadObject(result, Bool::True()); | 4725 __ LoadObject(result, Bool::True(), PP); |
4744 __ CompareRegisters(result, value); | 4726 __ CompareRegisters(result, value); |
4745 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 4727 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
4746 __ LoadObject(result, Bool::False()); | 4728 __ LoadObject(result, Bool::False(), PP); |
4747 __ Bind(&done); | 4729 __ Bind(&done); |
4748 } | 4730 } |
4749 | 4731 |
4750 | 4732 |
4751 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { | 4733 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { |
4752 const intptr_t kNumInputs = 2; | 4734 const intptr_t kNumInputs = 2; |
4753 const intptr_t kNumTemps = 0; | 4735 const intptr_t kNumTemps = 0; |
4754 LocationSummary* locs = | 4736 LocationSummary* locs = |
4755 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4737 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
4756 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 4738 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4806 PcDescriptors::kOther, | 4788 PcDescriptors::kOther, |
4807 locs()); | 4789 locs()); |
4808 __ Drop(2); // Discard type arguments and receiver. | 4790 __ Drop(2); // Discard type arguments and receiver. |
4809 } | 4791 } |
4810 | 4792 |
4811 } // namespace dart | 4793 } // namespace dart |
4812 | 4794 |
4813 #undef __ | 4795 #undef __ |
4814 | 4796 |
4815 #endif // defined TARGET_ARCH_X64 | 4797 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |