OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
90 ASSERT(fp_sp_dist <= 0); | 90 ASSERT(fp_sp_dist <= 0); |
91 __ movq(RDI, RSP); | 91 __ movq(RDI, RSP); |
92 __ subq(RDI, RBP); | 92 __ subq(RDI, RBP); |
93 __ cmpq(RDI, Immediate(fp_sp_dist)); | 93 __ cmpq(RDI, Immediate(fp_sp_dist)); |
94 __ j(EQUAL, &done, Assembler::kNearJump); | 94 __ j(EQUAL, &done, Assembler::kNearJump); |
95 __ int3(); | 95 __ int3(); |
96 __ Bind(&done); | 96 __ Bind(&done); |
97 } | 97 } |
98 #endif | 98 #endif |
99 __ LeaveFrame(); | 99 __ LeaveFrame(true); |
100 __ ret(); | 100 __ ret(); |
101 | 101 |
102 // Generate 8 bytes of NOPs so that the debugger can patch the | 102 // Generate 4 bytes of NOPs so that the debugger can patch the |
103 // return pattern with a call to the debug stub. | 103 // return pattern with a call to the debug stub. |
104 // Note that the nop(8) byte pattern is not recognized by the debugger. | 104 // Note that the nop(8) byte pattern is not recognized by the debugger. |
105 __ nop(1); | 105 __ nop(1); |
106 __ nop(1); | 106 __ nop(1); |
107 __ nop(1); | 107 __ nop(1); |
108 __ nop(1); | 108 __ nop(1); |
109 __ nop(1); | |
110 __ nop(1); | |
111 __ nop(1); | |
112 __ nop(1); | |
113 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, | 109 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, |
114 Isolate::kNoDeoptId, | 110 Isolate::kNoDeoptId, |
115 token_pos()); | 111 token_pos()); |
116 } | 112 } |
117 | 113 |
118 | 114 |
119 static Condition NegateCondition(Condition condition) { | 115 static Condition NegateCondition(Condition condition) { |
120 switch (condition) { | 116 switch (condition) { |
121 case EQUAL: return NOT_EQUAL; | 117 case EQUAL: return NOT_EQUAL; |
122 case NOT_EQUAL: return EQUAL; | 118 case NOT_EQUAL: return EQUAL; |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
308 return LocationSummary::Make(kNumInputs, | 304 return LocationSummary::Make(kNumInputs, |
309 Location::RequiresRegister(), | 305 Location::RequiresRegister(), |
310 LocationSummary::kNoCall); | 306 LocationSummary::kNoCall); |
311 } | 307 } |
312 | 308 |
313 | 309 |
314 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 310 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
315 // The register allocator drops constant definitions that have no uses. | 311 // The register allocator drops constant definitions that have no uses. |
316 if (!locs()->out().IsInvalid()) { | 312 if (!locs()->out().IsInvalid()) { |
317 Register result = locs()->out().reg(); | 313 Register result = locs()->out().reg(); |
318 __ LoadObject(result, value()); | 314 __ LoadObject(result, value(), Assembler::kNotPatchable); |
319 } | 315 } |
320 } | 316 } |
321 | 317 |
322 | 318 |
323 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { | 319 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { |
324 const intptr_t kNumInputs = 3; | 320 const intptr_t kNumInputs = 3; |
325 const intptr_t kNumTemps = 0; | 321 const intptr_t kNumTemps = 0; |
326 LocationSummary* summary = | 322 LocationSummary* summary = |
327 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 323 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
328 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. | 324 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
457 const ICData& original_ic_data) { | 453 const ICData& original_ic_data) { |
458 if (!compiler->is_optimizing()) { | 454 if (!compiler->is_optimizing()) { |
459 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 455 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
460 deopt_id, | 456 deopt_id, |
461 token_pos); | 457 token_pos); |
462 } | 458 } |
463 const int kNumberOfArguments = 2; | 459 const int kNumberOfArguments = 2; |
464 const Array& kNoArgumentNames = Object::null_array(); | 460 const Array& kNoArgumentNames = Object::null_array(); |
465 const int kNumArgumentsChecked = 2; | 461 const int kNumArgumentsChecked = 2; |
466 | 462 |
467 const Immediate& raw_null = | |
468 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
469 Label check_identity; | 463 Label check_identity; |
470 __ cmpq(Address(RSP, 0 * kWordSize), raw_null); | 464 __ LoadObject(TMP, Object::Handle(Object::null())); |
| 465 __ cmpq(Address(RSP, 0 * kWordSize), TMP); |
471 __ j(EQUAL, &check_identity); | 466 __ j(EQUAL, &check_identity); |
472 __ cmpq(Address(RSP, 1 * kWordSize), raw_null); | 467 __ cmpq(Address(RSP, 1 * kWordSize), TMP); |
473 __ j(EQUAL, &check_identity); | 468 __ j(EQUAL, &check_identity); |
474 | 469 |
475 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); | 470 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); |
476 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { | 471 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { |
477 ASSERT(!original_ic_data.IsNull()); | 472 ASSERT(!original_ic_data.IsNull()); |
478 if (original_ic_data.NumberOfChecks() == 0) { | 473 if (original_ic_data.NumberOfChecks() == 0) { |
479 // IC call for reoptimization populates original ICData. | 474 // IC call for reoptimization populates original ICData. |
480 equality_ic_data = original_ic_data.raw(); | 475 equality_ic_data = original_ic_data.raw(); |
481 } else { | 476 } else { |
482 // Megamorphic call. | 477 // Megamorphic call. |
(...skipping 20 matching lines...) Expand all Loading... |
503 | 498 |
504 __ Bind(&check_identity); | 499 __ Bind(&check_identity); |
505 Label equality_done; | 500 Label equality_done; |
506 if (compiler->is_optimizing()) { | 501 if (compiler->is_optimizing()) { |
507 // No need to update IC data. | 502 // No need to update IC data. |
508 Label is_true; | 503 Label is_true; |
509 __ popq(RAX); | 504 __ popq(RAX); |
510 __ popq(RDX); | 505 __ popq(RDX); |
511 __ cmpq(RAX, RDX); | 506 __ cmpq(RAX, RDX); |
512 __ j(EQUAL, &is_true); | 507 __ j(EQUAL, &is_true); |
513 __ LoadObject(RAX, (kind == Token::kEQ) ? Bool::False() : Bool::True()); | 508 __ LoadObject(RAX, (kind == Token::kEQ) ? Bool::False() : Bool::True(), |
| 509 Assembler::kNotPatchable); |
514 __ jmp(&equality_done); | 510 __ jmp(&equality_done); |
515 __ Bind(&is_true); | 511 __ Bind(&is_true); |
516 __ LoadObject(RAX, (kind == Token::kEQ) ? Bool::True() : Bool::False()); | 512 __ LoadObject(RAX, (kind == Token::kEQ) ? Bool::True() : Bool::False(), |
| 513 Assembler::kNotPatchable); |
517 if (kind == Token::kNE) { | 514 if (kind == Token::kNE) { |
518 // Skip not-equal result conversion. | 515 // Skip not-equal result conversion. |
519 __ jmp(&equality_done); | 516 __ jmp(&equality_done); |
520 } | 517 } |
521 } else { | 518 } else { |
522 // Call stub, load IC data in register. The stub will update ICData if | 519 // Call stub, load IC data in register. The stub will update ICData if |
523 // necessary. | 520 // necessary. |
524 Register ic_data_reg = locs->temp(0).reg(); | 521 Register ic_data_reg = locs->temp(0).reg(); |
525 ASSERT(ic_data_reg == RBX); // Stub depends on it. | 522 ASSERT(ic_data_reg == RBX); // Stub depends on it. |
526 __ LoadObject(ic_data_reg, equality_ic_data); | 523 __ LoadObject(ic_data_reg, equality_ic_data); |
527 compiler->GenerateCall(token_pos, | 524 compiler->GenerateCall(token_pos, |
528 &StubCode::EqualityWithNullArgLabel(), | 525 &StubCode::EqualityWithNullArgLabel(), |
529 PcDescriptors::kRuntimeCall, | 526 PcDescriptors::kRuntimeCall, |
530 locs); | 527 locs); |
531 __ Drop(2); | 528 __ Drop(2); |
532 } | 529 } |
533 __ Bind(&check_ne); | 530 __ Bind(&check_ne); |
534 if (kind == Token::kNE) { | 531 if (kind == Token::kNE) { |
535 Label true_label, done; | 532 Label true_label, done; |
536 // Negate the condition: true label returns false and vice versa. | 533 // Negate the condition: true label returns false and vice versa. |
537 __ CompareObject(RAX, Bool::True()); | 534 __ CompareObject(RAX, Bool::True()); |
538 __ j(EQUAL, &true_label, Assembler::kNearJump); | 535 __ j(EQUAL, &true_label, Assembler::kNearJump); |
539 __ LoadObject(RAX, Bool::True()); | 536 __ LoadObject(RAX, Bool::True(), Assembler::kNotPatchable); |
540 __ jmp(&done, Assembler::kNearJump); | 537 __ jmp(&done, Assembler::kNearJump); |
541 __ Bind(&true_label); | 538 __ Bind(&true_label); |
542 __ LoadObject(RAX, Bool::False()); | 539 __ LoadObject(RAX, Bool::False(), Assembler::kNotPatchable); |
543 __ Bind(&done); | 540 __ Bind(&done); |
544 } | 541 } |
545 __ Bind(&equality_done); | 542 __ Bind(&equality_done); |
546 } | 543 } |
547 | 544 |
548 | 545 |
549 static void LoadValueCid(FlowGraphCompiler* compiler, | 546 static void LoadValueCid(FlowGraphCompiler* compiler, |
550 Register value_cid_reg, | 547 Register value_cid_reg, |
551 Register value_reg, | 548 Register value_reg, |
552 Label* value_is_smi = NULL) { | 549 Label* value_is_smi = NULL) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
602 // Object.== is same as ===. | 599 // Object.== is same as ===. |
603 __ Drop(2); | 600 __ Drop(2); |
604 __ cmpq(left, right); | 601 __ cmpq(left, right); |
605 if (branch != NULL) { | 602 if (branch != NULL) { |
606 branch->EmitBranchOnCondition(compiler, cond); | 603 branch->EmitBranchOnCondition(compiler, cond); |
607 } else { | 604 } else { |
608 // This case should be rare. | 605 // This case should be rare. |
609 Register result = locs->out().reg(); | 606 Register result = locs->out().reg(); |
610 Label load_true; | 607 Label load_true; |
611 __ j(cond, &load_true, Assembler::kNearJump); | 608 __ j(cond, &load_true, Assembler::kNearJump); |
612 __ LoadObject(result, Bool::False()); | 609 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
613 __ jmp(&done); | 610 __ jmp(&done); |
614 __ Bind(&load_true); | 611 __ Bind(&load_true); |
615 __ LoadObject(result, Bool::True()); | 612 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
616 } | 613 } |
617 } else { | 614 } else { |
618 const int kNumberOfArguments = 2; | 615 const int kNumberOfArguments = 2; |
619 const Array& kNoArgumentNames = Object::null_array(); | 616 const Array& kNoArgumentNames = Object::null_array(); |
620 compiler->GenerateStaticCall(deopt_id, | 617 compiler->GenerateStaticCall(deopt_id, |
621 token_pos, | 618 token_pos, |
622 target, | 619 target, |
623 kNumberOfArguments, | 620 kNumberOfArguments, |
624 kNoArgumentNames, | 621 kNoArgumentNames, |
625 locs); | 622 locs); |
626 if (branch == NULL) { | 623 if (branch == NULL) { |
627 if (kind == Token::kNE) { | 624 if (kind == Token::kNE) { |
628 Label false_label; | 625 Label false_label; |
629 __ CompareObject(RAX, Bool::True()); | 626 __ CompareObject(RAX, Bool::True()); |
630 __ j(EQUAL, &false_label, Assembler::kNearJump); | 627 __ j(EQUAL, &false_label, Assembler::kNearJump); |
631 __ LoadObject(RAX, Bool::True()); | 628 __ LoadObject(RAX, Bool::True(), Assembler::kNotPatchable); |
632 __ jmp(&done); | 629 __ jmp(&done); |
633 __ Bind(&false_label); | 630 __ Bind(&false_label); |
634 __ LoadObject(RAX, Bool::False()); | 631 __ LoadObject(RAX, Bool::False(), Assembler::kNotPatchable); |
635 } | 632 } |
636 } else { | 633 } else { |
637 if (branch->is_checked()) { | 634 if (branch->is_checked()) { |
638 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); | 635 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); |
639 } | 636 } |
640 __ CompareObject(RAX, Bool::True()); | 637 __ CompareObject(RAX, Bool::True()); |
641 branch->EmitBranchOnCondition(compiler, cond); | 638 branch->EmitBranchOnCondition(compiler, cond); |
642 } | 639 } |
643 } | 640 } |
644 if (i < len - 1) { | 641 if (i < len - 1) { |
(...skipping 13 matching lines...) Expand all Loading... |
658 BranchInstr* branch, | 655 BranchInstr* branch, |
659 intptr_t deopt_id) { | 656 intptr_t deopt_id) { |
660 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 657 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
661 Register left = locs.in(0).reg(); | 658 Register left = locs.in(0).reg(); |
662 Register right = locs.in(1).reg(); | 659 Register right = locs.in(1).reg(); |
663 Register temp = locs.temp(0).reg(); | 660 Register temp = locs.temp(0).reg(); |
664 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); | 661 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); |
665 __ testq(left, Immediate(kSmiTagMask)); | 662 __ testq(left, Immediate(kSmiTagMask)); |
666 __ j(ZERO, deopt); | 663 __ j(ZERO, deopt); |
667 // 'left' is not Smi. | 664 // 'left' is not Smi. |
668 const Immediate& raw_null = | 665 |
669 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
670 Label identity_compare; | 666 Label identity_compare; |
671 __ cmpq(right, raw_null); | 667 __ CompareObject(right, Object::Handle(Object::null())); |
672 __ j(EQUAL, &identity_compare); | 668 __ j(EQUAL, &identity_compare); |
673 __ cmpq(left, raw_null); | 669 __ CompareObject(left, Object::Handle(Object::null())); |
674 __ j(EQUAL, &identity_compare); | 670 __ j(EQUAL, &identity_compare); |
675 | 671 |
676 __ LoadClassId(temp, left); | 672 __ LoadClassId(temp, left); |
677 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); | 673 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); |
678 const intptr_t len = ic_data.NumberOfChecks(); | 674 const intptr_t len = ic_data.NumberOfChecks(); |
679 for (intptr_t i = 0; i < len; i++) { | 675 for (intptr_t i = 0; i < len; i++) { |
680 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); | 676 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); |
681 if (i == (len - 1)) { | 677 if (i == (len - 1)) { |
682 __ j(NOT_EQUAL, deopt); | 678 __ j(NOT_EQUAL, deopt); |
683 } else { | 679 } else { |
684 __ j(EQUAL, &identity_compare); | 680 __ j(EQUAL, &identity_compare); |
685 } | 681 } |
686 } | 682 } |
687 __ Bind(&identity_compare); | 683 __ Bind(&identity_compare); |
688 __ cmpq(left, right); | 684 __ cmpq(left, right); |
689 if (branch == NULL) { | 685 if (branch == NULL) { |
690 Label done, is_equal; | 686 Label done, is_equal; |
691 Register result = locs.out().reg(); | 687 Register result = locs.out().reg(); |
692 __ j(EQUAL, &is_equal, Assembler::kNearJump); | 688 __ j(EQUAL, &is_equal, Assembler::kNearJump); |
693 // Not equal. | 689 // Not equal. |
694 __ LoadObject(result, (kind == Token::kEQ) ? Bool::False() : Bool::True()); | 690 __ LoadObject(result, (kind == Token::kEQ) ? Bool::False() : Bool::True(), |
| 691 Assembler::kNotPatchable); |
695 __ jmp(&done, Assembler::kNearJump); | 692 __ jmp(&done, Assembler::kNearJump); |
696 __ Bind(&is_equal); | 693 __ Bind(&is_equal); |
697 __ LoadObject(result, (kind == Token::kEQ) ? Bool::True() : Bool::False()); | 694 __ LoadObject(result, (kind == Token::kEQ) ? Bool::True() : Bool::False(), |
| 695 Assembler::kNotPatchable); |
698 __ Bind(&done); | 696 __ Bind(&done); |
699 } else { | 697 } else { |
700 Condition cond = TokenKindToSmiCondition(kind); | 698 Condition cond = TokenKindToSmiCondition(kind); |
701 branch->EmitBranchOnCondition(compiler, cond); | 699 branch->EmitBranchOnCondition(compiler, cond); |
702 } | 700 } |
703 } | 701 } |
704 | 702 |
705 | 703 |
706 // First test if receiver is NULL, in which case === is applied. | 704 // First test if receiver is NULL, in which case === is applied. |
707 // If type feedback was provided (lists of <class-id, target>), do a | 705 // If type feedback was provided (lists of <class-id, target>), do a |
708 // type by type check (either === or static call to the operator. | 706 // type by type check (either === or static call to the operator. |
709 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, | 707 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, |
710 LocationSummary* locs, | 708 LocationSummary* locs, |
711 Token::Kind kind, | 709 Token::Kind kind, |
712 BranchInstr* branch, | 710 BranchInstr* branch, |
713 const ICData& ic_data, | 711 const ICData& ic_data, |
714 intptr_t deopt_id, | 712 intptr_t deopt_id, |
715 intptr_t token_pos) { | 713 intptr_t token_pos) { |
716 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 714 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
717 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 715 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
718 Register left = locs->in(0).reg(); | 716 Register left = locs->in(0).reg(); |
719 Register right = locs->in(1).reg(); | 717 Register right = locs->in(1).reg(); |
720 const Immediate& raw_null = | 718 |
721 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
722 Label done, identity_compare, non_null_compare; | 719 Label done, identity_compare, non_null_compare; |
723 __ cmpq(right, raw_null); | 720 __ CompareObject(right, Object::Handle(Object::null())); |
724 __ j(EQUAL, &identity_compare, Assembler::kNearJump); | 721 __ j(EQUAL, &identity_compare, Assembler::kNearJump); |
725 __ cmpq(left, raw_null); | 722 __ CompareObject(left, Object::Handle(Object::null())); |
726 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); | 723 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); |
727 // Comparison with NULL is "===". | 724 // Comparison with NULL is "===". |
728 __ Bind(&identity_compare); | 725 __ Bind(&identity_compare); |
729 __ cmpq(left, right); | 726 __ cmpq(left, right); |
730 Condition cond = TokenKindToSmiCondition(kind); | 727 Condition cond = TokenKindToSmiCondition(kind); |
731 if (branch != NULL) { | 728 if (branch != NULL) { |
732 branch->EmitBranchOnCondition(compiler, cond); | 729 branch->EmitBranchOnCondition(compiler, cond); |
733 } else { | 730 } else { |
734 Register result = locs->out().reg(); | 731 Register result = locs->out().reg(); |
735 Label load_true; | 732 Label load_true; |
736 __ j(cond, &load_true, Assembler::kNearJump); | 733 __ j(cond, &load_true, Assembler::kNearJump); |
737 __ LoadObject(result, Bool::False()); | 734 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
738 __ jmp(&done); | 735 __ jmp(&done); |
739 __ Bind(&load_true); | 736 __ Bind(&load_true); |
740 __ LoadObject(result, Bool::True()); | 737 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
741 } | 738 } |
742 __ jmp(&done); | 739 __ jmp(&done); |
743 __ Bind(&non_null_compare); // Receiver is not null. | 740 __ Bind(&non_null_compare); // Receiver is not null. |
744 __ pushq(left); | 741 __ pushq(left); |
745 __ pushq(right); | 742 __ pushq(right); |
746 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, | 743 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, |
747 deopt_id, token_pos); | 744 deopt_id, token_pos); |
748 __ Bind(&done); | 745 __ Bind(&done); |
749 } | 746 } |
750 | 747 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
788 } else { | 785 } else { |
789 __ cmpq(left.reg(), right.reg()); | 786 __ cmpq(left.reg(), right.reg()); |
790 } | 787 } |
791 | 788 |
792 if (branch != NULL) { | 789 if (branch != NULL) { |
793 branch->EmitBranchOnCondition(compiler, true_condition); | 790 branch->EmitBranchOnCondition(compiler, true_condition); |
794 } else { | 791 } else { |
795 Register result = locs.out().reg(); | 792 Register result = locs.out().reg(); |
796 Label done, is_true; | 793 Label done, is_true; |
797 __ j(true_condition, &is_true); | 794 __ j(true_condition, &is_true); |
798 __ LoadObject(result, Bool::False()); | 795 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
799 __ jmp(&done); | 796 __ jmp(&done); |
800 __ Bind(&is_true); | 797 __ Bind(&is_true); |
801 __ LoadObject(result, Bool::True()); | 798 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
802 __ Bind(&done); | 799 __ Bind(&done); |
803 } | 800 } |
804 } | 801 } |
805 | 802 |
806 | 803 |
807 static Condition TokenKindToDoubleCondition(Token::Kind kind) { | 804 static Condition TokenKindToDoubleCondition(Token::Kind kind) { |
808 switch (kind) { | 805 switch (kind) { |
809 case Token::kEQ: return EQUAL; | 806 case Token::kEQ: return EQUAL; |
810 case Token::kNE: return NOT_EQUAL; | 807 case Token::kNE: return NOT_EQUAL; |
811 case Token::kLT: return BELOW; | 808 case Token::kLT: return BELOW; |
(...skipping 782 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1594 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { | 1591 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { |
1595 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { | 1592 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { |
1596 // Currently we can't have different location summaries for optimized | 1593 // Currently we can't have different location summaries for optimized |
1597 // and non-optimized code. So instead we manually pick up a register | 1594 // and non-optimized code. So instead we manually pick up a register |
1598 // that is known to be free because we know how non-optimizing compiler | 1595 // that is known to be free because we know how non-optimizing compiler |
1599 // allocates registers. | 1596 // allocates registers. |
1600 field_reg = RBX; | 1597 field_reg = RBX; |
1601 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); | 1598 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); |
1602 } | 1599 } |
1603 | 1600 |
1604 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1601 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), |
| 1602 Assembler::kNotPatchable); |
1605 | 1603 |
1606 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); | 1604 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
1607 FieldAddress field_nullability_operand( | 1605 FieldAddress field_nullability_operand( |
1608 field_reg, Field::is_nullable_offset()); | 1606 field_reg, Field::is_nullable_offset()); |
1609 FieldAddress field_length_operand( | 1607 FieldAddress field_length_operand( |
1610 field_reg, Field::guarded_list_length_offset()); | 1608 field_reg, Field::guarded_list_length_offset()); |
1611 | 1609 |
1612 if (value_cid == kDynamicCid) { | 1610 if (value_cid == kDynamicCid) { |
1613 if (value_cid_reg == kNoRegister) { | 1611 if (value_cid_reg == kNoRegister) { |
1614 ASSERT(!compiler->is_optimizing()); | 1612 ASSERT(!compiler->is_optimizing()); |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1740 } else { | 1738 } else { |
1741 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); | 1739 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); |
1742 } | 1740 } |
1743 } | 1741 } |
1744 | 1742 |
1745 if (!ok_is_fall_through) { | 1743 if (!ok_is_fall_through) { |
1746 __ jmp(&ok); | 1744 __ jmp(&ok); |
1747 } | 1745 } |
1748 } else { | 1746 } else { |
1749 if (field_reg != kNoRegister) { | 1747 if (field_reg != kNoRegister) { |
1750 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1748 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), |
| 1749 Assembler::kNotPatchable); |
1751 } | 1750 } |
1752 | 1751 |
1753 if (value_cid == kDynamicCid) { | 1752 if (value_cid == kDynamicCid) { |
1754 // Field's guarded class id is fixed but value's class id is not known. | 1753 // Field's guarded class id is fixed but value's class id is not known. |
1755 __ testq(value_reg, Immediate(kSmiTagMask)); | 1754 __ testq(value_reg, Immediate(kSmiTagMask)); |
1756 | 1755 |
1757 if (field_cid != kSmiCid) { | 1756 if (field_cid != kSmiCid) { |
1758 __ j(ZERO, fail); | 1757 __ j(ZERO, fail); |
1759 __ LoadClassId(value_cid_reg, value_reg); | 1758 __ LoadClassId(value_cid_reg, value_reg); |
1760 __ cmpq(value_cid_reg, Immediate(field_cid)); | 1759 __ cmpq(value_cid_reg, Immediate(field_cid)); |
(...skipping 15 matching lines...) Expand all Loading... |
1776 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1775 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
1777 // Destroy value_cid_reg (safe because we are finished with it). | 1776 // Destroy value_cid_reg (safe because we are finished with it). |
1778 __ movq(value_cid_reg, | 1777 __ movq(value_cid_reg, |
1779 FieldAddress(value_reg, TypedData::length_offset())); | 1778 FieldAddress(value_reg, TypedData::length_offset())); |
1780 } | 1779 } |
1781 __ cmpq(value_cid_reg, field_length_operand); | 1780 __ cmpq(value_cid_reg, field_length_operand); |
1782 } | 1781 } |
1783 | 1782 |
1784 if (field().is_nullable() && (field_cid != kNullCid)) { | 1783 if (field().is_nullable() && (field_cid != kNullCid)) { |
1785 __ j(EQUAL, &ok); | 1784 __ j(EQUAL, &ok); |
1786 const Immediate& raw_null = | 1785 __ CompareObject(value_reg, Object::Handle(Object::null())); |
1787 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1788 __ cmpq(value_reg, raw_null); | |
1789 } | 1786 } |
1790 | 1787 |
1791 if (ok_is_fall_through) { | 1788 if (ok_is_fall_through) { |
1792 __ j(NOT_EQUAL, fail); | 1789 __ j(NOT_EQUAL, fail); |
1793 } else { | 1790 } else { |
1794 __ j(EQUAL, &ok); | 1791 __ j(EQUAL, &ok); |
1795 } | 1792 } |
1796 } else { | 1793 } else { |
1797 // Both value's and field's class id is known. | 1794 // Both value's and field's class id is known. |
1798 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1795 if ((value_cid != field_cid) && (value_cid != nullability)) { |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1903 : Location::RequiresRegister()); | 1900 : Location::RequiresRegister()); |
1904 locs->set_temp(0, Location::RequiresRegister()); | 1901 locs->set_temp(0, Location::RequiresRegister()); |
1905 return locs; | 1902 return locs; |
1906 } | 1903 } |
1907 | 1904 |
1908 | 1905 |
1909 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1906 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1910 Register value = locs()->in(0).reg(); | 1907 Register value = locs()->in(0).reg(); |
1911 Register temp = locs()->temp(0).reg(); | 1908 Register temp = locs()->temp(0).reg(); |
1912 | 1909 |
1913 __ LoadObject(temp, field()); | 1910 __ LoadObject(temp, field(), Assembler::kNotPatchable); |
1914 if (this->value()->NeedsStoreBuffer()) { | 1911 if (this->value()->NeedsStoreBuffer()) { |
1915 __ StoreIntoObject(temp, | 1912 __ StoreIntoObject(temp, |
1916 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 1913 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
1917 } else { | 1914 } else { |
1918 __ StoreIntoObjectNoBarrier( | 1915 __ StoreIntoObjectNoBarrier( |
1919 temp, FieldAddress(temp, Field::value_offset()), value); | 1916 temp, FieldAddress(temp, Field::value_offset()), value); |
1920 } | 1917 } |
1921 } | 1918 } |
1922 | 1919 |
1923 | 1920 |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2055 // (or null). | 2052 // (or null). |
2056 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2053 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2057 !type_arguments().CanShareInstantiatorTypeArguments( | 2054 !type_arguments().CanShareInstantiatorTypeArguments( |
2058 instantiator_class())); | 2055 instantiator_class())); |
2059 // If the instantiator is null and if the type argument vector | 2056 // If the instantiator is null and if the type argument vector |
2060 // instantiated from null becomes a vector of dynamic, then use null as | 2057 // instantiated from null becomes a vector of dynamic, then use null as |
2061 // the type arguments. | 2058 // the type arguments. |
2062 Label type_arguments_instantiated; | 2059 Label type_arguments_instantiated; |
2063 const intptr_t len = type_arguments().Length(); | 2060 const intptr_t len = type_arguments().Length(); |
2064 if (type_arguments().IsRawInstantiatedRaw(len)) { | 2061 if (type_arguments().IsRawInstantiatedRaw(len)) { |
2065 const Immediate& raw_null = | 2062 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
2066 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
2067 __ cmpq(instantiator_reg, raw_null); | |
2068 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 2063 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
2069 } | 2064 } |
2070 // Instantiate non-null type arguments. | 2065 // Instantiate non-null type arguments. |
2071 // A runtime call to instantiate the type arguments is required. | 2066 // A runtime call to instantiate the type arguments is required. |
2072 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 2067 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
2073 __ PushObject(type_arguments()); | 2068 __ PushObject(type_arguments()); |
2074 __ pushq(instantiator_reg); // Push instantiator type arguments. | 2069 __ pushq(instantiator_reg); // Push instantiator type arguments. |
2075 compiler->GenerateCallRuntime(token_pos(), | 2070 compiler->GenerateCallRuntime(token_pos(), |
2076 deopt_id(), | 2071 deopt_id(), |
2077 kInstantiateTypeArgumentsRuntimeEntry, | 2072 kInstantiateTypeArgumentsRuntimeEntry, |
(...skipping 26 matching lines...) Expand all Loading... |
2104 // instantiator_reg is the instantiator type argument vector, i.e. an | 2099 // instantiator_reg is the instantiator type argument vector, i.e. an |
2105 // AbstractTypeArguments object (or null). | 2100 // AbstractTypeArguments object (or null). |
2106 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2101 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2107 !type_arguments().CanShareInstantiatorTypeArguments( | 2102 !type_arguments().CanShareInstantiatorTypeArguments( |
2108 instantiator_class())); | 2103 instantiator_class())); |
2109 // If the instantiator is null and if the type argument vector | 2104 // If the instantiator is null and if the type argument vector |
2110 // instantiated from null becomes a vector of dynamic, then use null as | 2105 // instantiated from null becomes a vector of dynamic, then use null as |
2111 // the type arguments. | 2106 // the type arguments. |
2112 Label type_arguments_instantiated; | 2107 Label type_arguments_instantiated; |
2113 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2108 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2114 const Immediate& raw_null = | 2109 |
2115 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 2110 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
2116 __ cmpq(instantiator_reg, raw_null); | |
2117 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 2111 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
2118 // Instantiate non-null type arguments. | 2112 // Instantiate non-null type arguments. |
2119 // In the non-factory case, we rely on the allocation stub to | 2113 // In the non-factory case, we rely on the allocation stub to |
2120 // instantiate the type arguments. | 2114 // instantiate the type arguments. |
2121 __ LoadObject(result_reg, type_arguments()); | 2115 __ LoadObject(result_reg, type_arguments(), Assembler::kNotPatchable); |
2122 // result_reg: uninstantiated type arguments. | 2116 // result_reg: uninstantiated type arguments. |
2123 | 2117 |
2124 __ Bind(&type_arguments_instantiated); | 2118 __ Bind(&type_arguments_instantiated); |
2125 // result_reg: uninstantiated or instantiated type arguments. | 2119 // result_reg: uninstantiated or instantiated type arguments. |
2126 } | 2120 } |
2127 | 2121 |
2128 | 2122 |
2129 LocationSummary* | 2123 LocationSummary* |
2130 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { | 2124 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { |
2131 const intptr_t kNumInputs = 1; | 2125 const intptr_t kNumInputs = 1; |
(...skipping 14 matching lines...) Expand all Loading... |
2146 // instantiator_reg is the instantiator AbstractTypeArguments object | 2140 // instantiator_reg is the instantiator AbstractTypeArguments object |
2147 // (or null). | 2141 // (or null). |
2148 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2142 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2149 !type_arguments().CanShareInstantiatorTypeArguments( | 2143 !type_arguments().CanShareInstantiatorTypeArguments( |
2150 instantiator_class())); | 2144 instantiator_class())); |
2151 | 2145 |
2152 // If the instantiator is null and if the type argument vector | 2146 // If the instantiator is null and if the type argument vector |
2153 // instantiated from null becomes a vector of dynamic, then use null as | 2147 // instantiated from null becomes a vector of dynamic, then use null as |
2154 // the type arguments and do not pass the instantiator. | 2148 // the type arguments and do not pass the instantiator. |
2155 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2149 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2156 const Immediate& raw_null = | 2150 |
2157 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
2158 Label instantiator_not_null; | 2151 Label instantiator_not_null; |
2159 __ cmpq(instantiator_reg, raw_null); | 2152 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
2160 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); | 2153 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); |
2161 // Null was used in VisitExtractConstructorTypeArguments as the | 2154 // Null was used in VisitExtractConstructorTypeArguments as the |
2162 // instantiated type arguments, no proper instantiator needed. | 2155 // instantiated type arguments, no proper instantiator needed. |
2163 __ movq(instantiator_reg, | 2156 __ movq(instantiator_reg, |
2164 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); | 2157 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); |
2165 __ Bind(&instantiator_not_null); | 2158 __ Bind(&instantiator_not_null); |
2166 // instantiator_reg: instantiator or kNoInstantiator. | 2159 // instantiator_reg: instantiator or kNoInstantiator. |
2167 } | 2160 } |
2168 | 2161 |
2169 | 2162 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2223 return NULL; | 2216 return NULL; |
2224 } | 2217 } |
2225 | 2218 |
2226 | 2219 |
2227 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2220 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2228 __ Bind(compiler->GetJumpLabel(this)); | 2221 __ Bind(compiler->GetJumpLabel(this)); |
2229 compiler->AddExceptionHandler(catch_try_index(), | 2222 compiler->AddExceptionHandler(catch_try_index(), |
2230 try_index(), | 2223 try_index(), |
2231 compiler->assembler()->CodeSize(), | 2224 compiler->assembler()->CodeSize(), |
2232 catch_handler_types_); | 2225 catch_handler_types_); |
| 2226 |
| 2227 // Restore the pool pointer. |
| 2228 __ LoadPoolPointer(); |
| 2229 |
2233 if (HasParallelMove()) { | 2230 if (HasParallelMove()) { |
2234 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 2231 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
2235 } | 2232 } |
2236 | 2233 |
2237 // Restore RSP from RBP as we are coming from a throw and the code for | 2234 // Restore RSP from RBP as we are coming from a throw and the code for |
2238 // popping arguments has not been run. | 2235 // popping arguments has not been run. |
2239 const intptr_t fp_sp_dist = | 2236 const intptr_t fp_sp_dist = |
2240 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 2237 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
2241 ASSERT(fp_sp_dist <= 0); | 2238 ASSERT(fp_sp_dist <= 0); |
2242 __ leaq(RSP, Address(RBP, fp_sp_dist)); | 2239 __ leaq(RSP, Address(RBP, fp_sp_dist)); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2302 | 2299 |
2303 Register temp = locs()->temp(0).reg(); | 2300 Register temp = locs()->temp(0).reg(); |
2304 // Generate stack overflow check. | 2301 // Generate stack overflow check. |
2305 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); | 2302 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); |
2306 __ cmpq(RSP, Address(temp, 0)); | 2303 __ cmpq(RSP, Address(temp, 0)); |
2307 __ j(BELOW_EQUAL, slow_path->entry_label()); | 2304 __ j(BELOW_EQUAL, slow_path->entry_label()); |
2308 if (compiler->CanOSRFunction() && in_loop()) { | 2305 if (compiler->CanOSRFunction() && in_loop()) { |
2309 // In unoptimized code check the usage counter to trigger OSR at loop | 2306 // In unoptimized code check the usage counter to trigger OSR at loop |
2310 // stack checks. Use progressively higher thresholds for more deeply | 2307 // stack checks. Use progressively higher thresholds for more deeply |
2311 // nested loops to attempt to hit outer loops with OSR when possible. | 2308 // nested loops to attempt to hit outer loops with OSR when possible. |
2312 __ LoadObject(temp, compiler->parsed_function().function()); | 2309 __ LoadObject(temp, compiler->parsed_function().function(), |
| 2310 Assembler::kNotPatchable); |
2313 intptr_t threshold = | 2311 intptr_t threshold = |
2314 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2312 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
2315 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), | 2313 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), |
2316 Immediate(threshold)); | 2314 Immediate(threshold)); |
2317 __ j(GREATER_EQUAL, slow_path->entry_label()); | 2315 __ j(GREATER_EQUAL, slow_path->entry_label()); |
2318 } | 2316 } |
2319 __ Bind(slow_path->exit_label()); | 2317 __ Bind(slow_path->exit_label()); |
2320 } | 2318 } |
2321 | 2319 |
2322 | 2320 |
(...skipping 1426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3749 __ movl(result, Address(RSP, 8)); | 3747 __ movl(result, Address(RSP, 8)); |
3750 break; | 3748 break; |
3751 case MethodRecognizer::kUint32x4GetFlagW: | 3749 case MethodRecognizer::kUint32x4GetFlagW: |
3752 __ movl(result, Address(RSP, 12)); | 3750 __ movl(result, Address(RSP, 12)); |
3753 break; | 3751 break; |
3754 default: UNREACHABLE(); | 3752 default: UNREACHABLE(); |
3755 } | 3753 } |
3756 __ addq(RSP, Immediate(16)); | 3754 __ addq(RSP, Immediate(16)); |
3757 __ testl(result, result); | 3755 __ testl(result, result); |
3758 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); | 3756 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); |
3759 __ LoadObject(result, Bool::False()); | 3757 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
3760 __ jmp(&done); | 3758 __ jmp(&done); |
3761 __ Bind(&non_zero); | 3759 __ Bind(&non_zero); |
3762 __ LoadObject(result, Bool::True()); | 3760 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
3763 __ Bind(&done); | 3761 __ Bind(&done); |
3764 } | 3762 } |
3765 | 3763 |
3766 | 3764 |
3767 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { | 3765 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { |
3768 const intptr_t kNumInputs = 3; | 3766 const intptr_t kNumInputs = 3; |
3769 const intptr_t kNumTemps = 1; | 3767 const intptr_t kNumTemps = 1; |
3770 LocationSummary* summary = | 3768 LocationSummary* summary = |
3771 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3769 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
3772 summary->set_in(0, Location::RequiresFpuRegister()); | 3770 summary->set_in(0, Location::RequiresFpuRegister()); |
(...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4366 summary->AddTemp(Location::RequiresRegister()); | 4364 summary->AddTemp(Location::RequiresRegister()); |
4367 } | 4365 } |
4368 return summary; | 4366 return summary; |
4369 } | 4367 } |
4370 | 4368 |
4371 | 4369 |
4372 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4370 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4373 if (IsNullCheck()) { | 4371 if (IsNullCheck()) { |
4374 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4372 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4375 kDeoptCheckClass); | 4373 kDeoptCheckClass); |
4376 const Immediate& raw_null = | 4374 __ CompareObject(locs()->in(0).reg(), |
4377 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 4375 Object::Handle(Object::null())); |
4378 __ cmpq(locs()->in(0).reg(), raw_null); | |
4379 __ j(EQUAL, deopt); | 4376 __ j(EQUAL, deopt); |
4380 return; | 4377 return; |
4381 } | 4378 } |
4382 | 4379 |
4383 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 4380 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
4384 (unary_checks().NumberOfChecks() > 1)); | 4381 (unary_checks().NumberOfChecks() > 1)); |
4385 Register value = locs()->in(0).reg(); | 4382 Register value = locs()->in(0).reg(); |
4386 Register temp = locs()->temp(0).reg(); | 4383 Register temp = locs()->temp(0).reg(); |
4387 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4384 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4388 kDeoptCheckClass); | 4385 kDeoptCheckClass); |
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4645 // Special code for numbers (compare values instead of references.) | 4642 // Special code for numbers (compare values instead of references.) |
4646 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4643 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4647 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4644 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4648 Location left = locs()->in(0); | 4645 Location left = locs()->in(0); |
4649 Location right = locs()->in(1); | 4646 Location right = locs()->in(1); |
4650 if (left.IsConstant() && right.IsConstant()) { | 4647 if (left.IsConstant() && right.IsConstant()) { |
4651 // TODO(vegorov): should be eliminated earlier by constant propagation. | 4648 // TODO(vegorov): should be eliminated earlier by constant propagation. |
4652 const bool result = (kind() == Token::kEQ_STRICT) ? | 4649 const bool result = (kind() == Token::kEQ_STRICT) ? |
4653 left.constant().raw() == right.constant().raw() : | 4650 left.constant().raw() == right.constant().raw() : |
4654 left.constant().raw() != right.constant().raw(); | 4651 left.constant().raw() != right.constant().raw(); |
4655 __ LoadObject(locs()->out().reg(), result ? Bool::True() : Bool::False()); | 4652 __ LoadObject(locs()->out().reg(), result ? Bool::True() : Bool::False(), |
| 4653 Assembler::kNotPatchable); |
4656 return; | 4654 return; |
4657 } | 4655 } |
4658 if (left.IsConstant()) { | 4656 if (left.IsConstant()) { |
4659 compiler->EmitEqualityRegConstCompare(right.reg(), | 4657 compiler->EmitEqualityRegConstCompare(right.reg(), |
4660 left.constant(), | 4658 left.constant(), |
4661 needs_number_check(), | 4659 needs_number_check(), |
4662 token_pos()); | 4660 token_pos()); |
4663 } else if (right.IsConstant()) { | 4661 } else if (right.IsConstant()) { |
4664 compiler->EmitEqualityRegConstCompare(left.reg(), | 4662 compiler->EmitEqualityRegConstCompare(left.reg(), |
4665 right.constant(), | 4663 right.constant(), |
4666 needs_number_check(), | 4664 needs_number_check(), |
4667 token_pos()); | 4665 token_pos()); |
4668 } else { | 4666 } else { |
4669 compiler->EmitEqualityRegRegCompare(left.reg(), | 4667 compiler->EmitEqualityRegRegCompare(left.reg(), |
4670 right.reg(), | 4668 right.reg(), |
4671 needs_number_check(), | 4669 needs_number_check(), |
4672 token_pos()); | 4670 token_pos()); |
4673 } | 4671 } |
4674 | 4672 |
4675 Register result = locs()->out().reg(); | 4673 Register result = locs()->out().reg(); |
4676 Label load_true, done; | 4674 Label load_true, done; |
4677 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; | 4675 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; |
4678 __ j(true_condition, &load_true, Assembler::kNearJump); | 4676 __ j(true_condition, &load_true, Assembler::kNearJump); |
4679 __ LoadObject(result, Bool::False()); | 4677 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
4680 __ jmp(&done, Assembler::kNearJump); | 4678 __ jmp(&done, Assembler::kNearJump); |
4681 __ Bind(&load_true); | 4679 __ Bind(&load_true); |
4682 __ LoadObject(result, Bool::True()); | 4680 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
4683 __ Bind(&done); | 4681 __ Bind(&done); |
4684 } | 4682 } |
4685 | 4683 |
4686 | 4684 |
4687 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 4685 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
4688 BranchInstr* branch) { | 4686 BranchInstr* branch) { |
4689 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4687 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4690 Location left = locs()->in(0); | 4688 Location left = locs()->in(0); |
4691 Location right = locs()->in(1); | 4689 Location right = locs()->in(1); |
4692 if (left.IsConstant() && right.IsConstant()) { | 4690 if (left.IsConstant() && right.IsConstant()) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4731 | 4729 |
4732 | 4730 |
4733 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4731 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4734 // The arguments to the stub include the closure, as does the arguments | 4732 // The arguments to the stub include the closure, as does the arguments |
4735 // descriptor. | 4733 // descriptor. |
4736 Register temp_reg = locs()->temp(0).reg(); | 4734 Register temp_reg = locs()->temp(0).reg(); |
4737 int argument_count = ArgumentCount(); | 4735 int argument_count = ArgumentCount(); |
4738 const Array& arguments_descriptor = | 4736 const Array& arguments_descriptor = |
4739 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 4737 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
4740 argument_names())); | 4738 argument_names())); |
4741 __ LoadObject(temp_reg, arguments_descriptor); | 4739 __ LoadObject(temp_reg, arguments_descriptor, Assembler::kNotPatchable); |
4742 ASSERT(temp_reg == R10); | 4740 ASSERT(temp_reg == R10); |
4743 compiler->GenerateDartCall(deopt_id(), | 4741 compiler->GenerateDartCall(deopt_id(), |
4744 token_pos(), | 4742 token_pos(), |
4745 &StubCode::CallClosureFunctionLabel(), | 4743 &StubCode::CallClosureFunctionLabel(), |
4746 PcDescriptors::kClosureCall, | 4744 PcDescriptors::kClosureCall, |
4747 locs()); | 4745 locs()); |
4748 __ Drop(argument_count); | 4746 __ Drop(argument_count); |
4749 } | 4747 } |
4750 | 4748 |
4751 | 4749 |
4752 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { | 4750 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { |
4753 return LocationSummary::Make(1, | 4751 return LocationSummary::Make(1, |
4754 Location::RequiresRegister(), | 4752 Location::RequiresRegister(), |
4755 LocationSummary::kNoCall); | 4753 LocationSummary::kNoCall); |
4756 } | 4754 } |
4757 | 4755 |
4758 | 4756 |
4759 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4757 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4760 Register value = locs()->in(0).reg(); | 4758 Register value = locs()->in(0).reg(); |
4761 Register result = locs()->out().reg(); | 4759 Register result = locs()->out().reg(); |
4762 | 4760 |
4763 Label done; | 4761 Label done; |
4764 __ LoadObject(result, Bool::True()); | 4762 __ LoadObject(result, Bool::True(), Assembler::kNotPatchable); |
4765 __ CompareRegisters(result, value); | 4763 __ CompareRegisters(result, value); |
4766 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 4764 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
4767 __ LoadObject(result, Bool::False()); | 4765 __ LoadObject(result, Bool::False(), Assembler::kNotPatchable); |
4768 __ Bind(&done); | 4766 __ Bind(&done); |
4769 } | 4767 } |
4770 | 4768 |
4771 | 4769 |
4772 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { | 4770 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { |
4773 const intptr_t kNumInputs = 2; | 4771 const intptr_t kNumInputs = 2; |
4774 const intptr_t kNumTemps = 0; | 4772 const intptr_t kNumTemps = 0; |
4775 LocationSummary* locs = | 4773 LocationSummary* locs = |
4776 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4774 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
4777 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 4775 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4827 PcDescriptors::kOther, | 4825 PcDescriptors::kOther, |
4828 locs()); | 4826 locs()); |
4829 __ Drop(2); // Discard type arguments and receiver. | 4827 __ Drop(2); // Discard type arguments and receiver. |
4830 } | 4828 } |
4831 | 4829 |
4832 } // namespace dart | 4830 } // namespace dart |
4833 | 4831 |
4834 #undef __ | 4832 #undef __ |
4835 | 4833 |
4836 #endif // defined TARGET_ARCH_X64 | 4834 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |