OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 89 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
90 ASSERT(fp_sp_dist <= 0); | 90 ASSERT(fp_sp_dist <= 0); |
91 __ movq(RDI, RSP); | 91 __ movq(RDI, RSP); |
92 __ subq(RDI, RBP); | 92 __ subq(RDI, RBP); |
93 __ cmpq(RDI, Immediate(fp_sp_dist)); | 93 __ cmpq(RDI, Immediate(fp_sp_dist)); |
94 __ j(EQUAL, &done, Assembler::kNearJump); | 94 __ j(EQUAL, &done, Assembler::kNearJump); |
95 __ int3(); | 95 __ int3(); |
96 __ Bind(&done); | 96 __ Bind(&done); |
97 } | 97 } |
98 #endif | 98 #endif |
99 __ LeaveFrame(); | 99 __ LeaveFrameWithPP(); |
100 __ ret(); | 100 __ ret(); |
101 | 101 |
102 // Generate 8 bytes of NOPs so that the debugger can patch the | 102 // Generate 4 bytes of NOPs so that the debugger can patch the |
103 // return pattern with a call to the debug stub. | 103 // return pattern with a call to the debug stub. |
104 // Note that the nop(8) byte pattern is not recognized by the debugger. | 104 // Note that the nop(8) byte pattern is not recognized by the debugger. |
105 __ nop(1); | 105 __ nop(1); |
106 __ nop(1); | 106 __ nop(1); |
107 __ nop(1); | 107 __ nop(1); |
108 __ nop(1); | 108 __ nop(1); |
109 __ nop(1); | |
110 __ nop(1); | |
111 __ nop(1); | |
112 __ nop(1); | |
113 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, | 109 compiler->AddCurrentDescriptor(PcDescriptors::kReturn, |
114 Isolate::kNoDeoptId, | 110 Isolate::kNoDeoptId, |
115 token_pos()); | 111 token_pos()); |
116 } | 112 } |
117 | 113 |
118 | 114 |
119 static Condition NegateCondition(Condition condition) { | 115 static Condition NegateCondition(Condition condition) { |
120 switch (condition) { | 116 switch (condition) { |
121 case EQUAL: return NOT_EQUAL; | 117 case EQUAL: return NOT_EQUAL; |
122 case NOT_EQUAL: return EQUAL; | 118 case NOT_EQUAL: return EQUAL; |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
308 return LocationSummary::Make(kNumInputs, | 304 return LocationSummary::Make(kNumInputs, |
309 Location::RequiresRegister(), | 305 Location::RequiresRegister(), |
310 LocationSummary::kNoCall); | 306 LocationSummary::kNoCall); |
311 } | 307 } |
312 | 308 |
313 | 309 |
314 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 310 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
315 // The register allocator drops constant definitions that have no uses. | 311 // The register allocator drops constant definitions that have no uses. |
316 if (!locs()->out().IsInvalid()) { | 312 if (!locs()->out().IsInvalid()) { |
317 Register result = locs()->out().reg(); | 313 Register result = locs()->out().reg(); |
318 __ LoadObject(result, value()); | 314 __ LoadObjectFromPool(result, value(), Assembler::kNotPatchable, PP); |
319 } | 315 } |
320 } | 316 } |
321 | 317 |
322 | 318 |
323 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { | 319 LocationSummary* AssertAssignableInstr::MakeLocationSummary() const { |
324 const intptr_t kNumInputs = 3; | 320 const intptr_t kNumInputs = 3; |
325 const intptr_t kNumTemps = 0; | 321 const intptr_t kNumTemps = 0; |
326 LocationSummary* summary = | 322 LocationSummary* summary = |
327 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 323 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
328 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. | 324 summary->set_in(0, Location::RegisterLocation(RAX)); // Value. |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
458 const ICData& original_ic_data) { | 454 const ICData& original_ic_data) { |
459 if (!compiler->is_optimizing()) { | 455 if (!compiler->is_optimizing()) { |
460 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 456 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
461 deopt_id, | 457 deopt_id, |
462 token_pos); | 458 token_pos); |
463 } | 459 } |
464 const int kNumberOfArguments = 2; | 460 const int kNumberOfArguments = 2; |
465 const Array& kNoArgumentNames = Object::null_array(); | 461 const Array& kNoArgumentNames = Object::null_array(); |
466 const int kNumArgumentsChecked = 2; | 462 const int kNumArgumentsChecked = 2; |
467 | 463 |
468 const Immediate& raw_null = | |
469 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
470 Label check_identity; | 464 Label check_identity; |
471 __ cmpq(Address(RSP, 0 * kWordSize), raw_null); | 465 __ LoadObjectFromPool(TMP, Object::Handle(Object::null()), |
| 466 Assembler::kNotPatchable, PP); |
| 467 __ cmpq(Address(RSP, 0 * kWordSize), TMP); |
472 __ j(EQUAL, &check_identity); | 468 __ j(EQUAL, &check_identity); |
473 __ cmpq(Address(RSP, 1 * kWordSize), raw_null); | 469 __ cmpq(Address(RSP, 1 * kWordSize), TMP); |
474 __ j(EQUAL, &check_identity); | 470 __ j(EQUAL, &check_identity); |
475 | 471 |
476 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); | 472 ICData& equality_ic_data = ICData::ZoneHandle(original_ic_data.raw()); |
477 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { | 473 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { |
478 ASSERT(!original_ic_data.IsNull()); | 474 ASSERT(!original_ic_data.IsNull()); |
479 if (original_ic_data.NumberOfChecks() == 0) { | 475 if (original_ic_data.NumberOfChecks() == 0) { |
480 // IC call for reoptimization populates original ICData. | 476 // IC call for reoptimization populates original ICData. |
481 equality_ic_data = original_ic_data.raw(); | 477 equality_ic_data = original_ic_data.raw(); |
482 } else { | 478 } else { |
483 // Megamorphic call. | 479 // Megamorphic call. |
(...skipping 20 matching lines...) Expand all Loading... |
504 | 500 |
505 __ Bind(&check_identity); | 501 __ Bind(&check_identity); |
506 Label equality_done; | 502 Label equality_done; |
507 if (compiler->is_optimizing()) { | 503 if (compiler->is_optimizing()) { |
508 // No need to update IC data. | 504 // No need to update IC data. |
509 Label is_true; | 505 Label is_true; |
510 __ popq(RAX); | 506 __ popq(RAX); |
511 __ popq(RDX); | 507 __ popq(RDX); |
512 __ cmpq(RAX, RDX); | 508 __ cmpq(RAX, RDX); |
513 __ j(EQUAL, &is_true); | 509 __ j(EQUAL, &is_true); |
514 __ LoadObject(RAX, Bool::Get(kind != Token::kEQ)); | 510 __ LoadObjectFromPool(RAX, Bool::Get(kind != Token::kEQ), |
| 511 Assembler::kNotPatchable, PP); |
515 __ jmp(&equality_done); | 512 __ jmp(&equality_done); |
516 __ Bind(&is_true); | 513 __ Bind(&is_true); |
517 __ LoadObject(RAX, Bool::Get(kind == Token::kEQ)); | 514 __ LoadObjectFromPool(RAX, Bool::Get(kind == Token::kEQ), |
| 515 Assembler::kNotPatchable, PP); |
518 if (kind == Token::kNE) { | 516 if (kind == Token::kNE) { |
519 // Skip not-equal result conversion. | 517 // Skip not-equal result conversion. |
520 __ jmp(&equality_done); | 518 __ jmp(&equality_done); |
521 } | 519 } |
522 } else { | 520 } else { |
523 // Call stub, load IC data in register. The stub will update ICData if | 521 // Call stub, load IC data in register. The stub will update ICData if |
524 // necessary. | 522 // necessary. |
525 Register ic_data_reg = locs->temp(0).reg(); | 523 Register ic_data_reg = locs->temp(0).reg(); |
526 ASSERT(ic_data_reg == RBX); // Stub depends on it. | 524 ASSERT(ic_data_reg == RBX); // Stub depends on it. |
527 __ LoadObject(ic_data_reg, equality_ic_data); | 525 __ LoadObject(ic_data_reg, equality_ic_data); |
528 compiler->GenerateCall(token_pos, | 526 compiler->GenerateCall(token_pos, |
529 &StubCode::EqualityWithNullArgLabel(), | 527 &StubCode::EqualityWithNullArgLabel(), |
530 PcDescriptors::kRuntimeCall, | 528 PcDescriptors::kRuntimeCall, |
531 locs); | 529 locs); |
532 __ Drop(2); | 530 __ Drop(2); |
533 } | 531 } |
534 __ Bind(&check_ne); | 532 __ Bind(&check_ne); |
535 if (kind == Token::kNE) { | 533 if (kind == Token::kNE) { |
536 Label true_label, done; | 534 Label true_label, done; |
537 // Negate the condition: true label returns false and vice versa. | 535 // Negate the condition: true label returns false and vice versa. |
538 __ CompareObject(RAX, Bool::True()); | 536 __ CompareObject(RAX, Bool::True()); |
539 __ j(EQUAL, &true_label, Assembler::kNearJump); | 537 __ j(EQUAL, &true_label, Assembler::kNearJump); |
540 __ LoadObject(RAX, Bool::True()); | 538 __ LoadObjectFromPool(RAX, Bool::True(), Assembler::kNotPatchable, PP); |
541 __ jmp(&done, Assembler::kNearJump); | 539 __ jmp(&done, Assembler::kNearJump); |
542 __ Bind(&true_label); | 540 __ Bind(&true_label); |
543 __ LoadObject(RAX, Bool::False()); | 541 __ LoadObjectFromPool(RAX, Bool::False(), Assembler::kNotPatchable, PP); |
544 __ Bind(&done); | 542 __ Bind(&done); |
545 } | 543 } |
546 __ Bind(&equality_done); | 544 __ Bind(&equality_done); |
547 } | 545 } |
548 | 546 |
549 | 547 |
550 static void LoadValueCid(FlowGraphCompiler* compiler, | 548 static void LoadValueCid(FlowGraphCompiler* compiler, |
551 Register value_cid_reg, | 549 Register value_cid_reg, |
552 Register value_reg, | 550 Register value_reg, |
553 Label* value_is_smi = NULL) { | 551 Label* value_is_smi = NULL) { |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
603 // Object.== is same as ===. | 601 // Object.== is same as ===. |
604 __ Drop(2); | 602 __ Drop(2); |
605 __ cmpq(left, right); | 603 __ cmpq(left, right); |
606 if (branch != NULL) { | 604 if (branch != NULL) { |
607 branch->EmitBranchOnCondition(compiler, cond); | 605 branch->EmitBranchOnCondition(compiler, cond); |
608 } else { | 606 } else { |
609 // This case should be rare. | 607 // This case should be rare. |
610 Register result = locs->out().reg(); | 608 Register result = locs->out().reg(); |
611 Label load_true; | 609 Label load_true; |
612 __ j(cond, &load_true, Assembler::kNearJump); | 610 __ j(cond, &load_true, Assembler::kNearJump); |
613 __ LoadObject(result, Bool::False()); | 611 __ LoadObjectFromPool(result, Bool::False(), |
| 612 Assembler::kNotPatchable, PP); |
614 __ jmp(&done); | 613 __ jmp(&done); |
615 __ Bind(&load_true); | 614 __ Bind(&load_true); |
616 __ LoadObject(result, Bool::True()); | 615 __ LoadObjectFromPool(result, Bool::True(), |
| 616 Assembler::kNotPatchable, PP); |
617 } | 617 } |
618 } else { | 618 } else { |
619 const int kNumberOfArguments = 2; | 619 const int kNumberOfArguments = 2; |
620 const Array& kNoArgumentNames = Object::null_array(); | 620 const Array& kNoArgumentNames = Object::null_array(); |
621 compiler->GenerateStaticCall(deopt_id, | 621 compiler->GenerateStaticCall(deopt_id, |
622 token_pos, | 622 token_pos, |
623 target, | 623 target, |
624 kNumberOfArguments, | 624 kNumberOfArguments, |
625 kNoArgumentNames, | 625 kNoArgumentNames, |
626 locs); | 626 locs); |
627 if (branch == NULL) { | 627 if (branch == NULL) { |
628 if (kind == Token::kNE) { | 628 if (kind == Token::kNE) { |
629 Label false_label; | 629 Label false_label; |
630 __ CompareObject(RAX, Bool::True()); | 630 __ CompareObject(RAX, Bool::True()); |
631 __ j(EQUAL, &false_label, Assembler::kNearJump); | 631 __ j(EQUAL, &false_label, Assembler::kNearJump); |
632 __ LoadObject(RAX, Bool::True()); | 632 __ LoadObjectFromPool(RAX, Bool::True(), |
| 633 Assembler::kNotPatchable, PP); |
633 __ jmp(&done); | 634 __ jmp(&done); |
634 __ Bind(&false_label); | 635 __ Bind(&false_label); |
635 __ LoadObject(RAX, Bool::False()); | 636 __ LoadObjectFromPool(RAX, Bool::False(), |
| 637 Assembler::kNotPatchable, PP); |
636 } | 638 } |
637 } else { | 639 } else { |
638 if (branch->is_checked()) { | 640 if (branch->is_checked()) { |
639 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); | 641 EmitAssertBoolean(RAX, token_pos, deopt_id, locs, compiler); |
640 } | 642 } |
641 __ CompareObject(RAX, Bool::True()); | 643 __ CompareObject(RAX, Bool::True()); |
642 branch->EmitBranchOnCondition(compiler, cond); | 644 branch->EmitBranchOnCondition(compiler, cond); |
643 } | 645 } |
644 } | 646 } |
645 if (i < len - 1) { | 647 if (i < len - 1) { |
(...skipping 13 matching lines...) Expand all Loading... |
659 BranchInstr* branch, | 661 BranchInstr* branch, |
660 intptr_t deopt_id) { | 662 intptr_t deopt_id) { |
661 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 663 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
662 Register left = locs.in(0).reg(); | 664 Register left = locs.in(0).reg(); |
663 Register right = locs.in(1).reg(); | 665 Register right = locs.in(1).reg(); |
664 Register temp = locs.temp(0).reg(); | 666 Register temp = locs.temp(0).reg(); |
665 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); | 667 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); |
666 __ testq(left, Immediate(kSmiTagMask)); | 668 __ testq(left, Immediate(kSmiTagMask)); |
667 __ j(ZERO, deopt); | 669 __ j(ZERO, deopt); |
668 // 'left' is not Smi. | 670 // 'left' is not Smi. |
669 const Immediate& raw_null = | 671 |
670 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
671 Label identity_compare; | 672 Label identity_compare; |
672 __ cmpq(right, raw_null); | 673 __ CompareObject(right, Object::Handle(Object::null())); |
673 __ j(EQUAL, &identity_compare); | 674 __ j(EQUAL, &identity_compare); |
674 __ cmpq(left, raw_null); | 675 __ CompareObject(left, Object::Handle(Object::null())); |
675 __ j(EQUAL, &identity_compare); | 676 __ j(EQUAL, &identity_compare); |
676 | 677 |
677 __ LoadClassId(temp, left); | 678 __ LoadClassId(temp, left); |
678 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); | 679 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); |
679 const intptr_t len = ic_data.NumberOfChecks(); | 680 const intptr_t len = ic_data.NumberOfChecks(); |
680 for (intptr_t i = 0; i < len; i++) { | 681 for (intptr_t i = 0; i < len; i++) { |
681 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); | 682 __ cmpq(temp, Immediate(ic_data.GetReceiverClassIdAt(i))); |
682 if (i == (len - 1)) { | 683 if (i == (len - 1)) { |
683 __ j(NOT_EQUAL, deopt); | 684 __ j(NOT_EQUAL, deopt); |
684 } else { | 685 } else { |
685 __ j(EQUAL, &identity_compare); | 686 __ j(EQUAL, &identity_compare); |
686 } | 687 } |
687 } | 688 } |
688 __ Bind(&identity_compare); | 689 __ Bind(&identity_compare); |
689 __ cmpq(left, right); | 690 __ cmpq(left, right); |
690 if (branch == NULL) { | 691 if (branch == NULL) { |
691 Label done, is_equal; | 692 Label done, is_equal; |
692 Register result = locs.out().reg(); | 693 Register result = locs.out().reg(); |
693 __ j(EQUAL, &is_equal, Assembler::kNearJump); | 694 __ j(EQUAL, &is_equal, Assembler::kNearJump); |
694 // Not equal. | 695 // Not equal. |
695 __ LoadObject(result, Bool::Get(kind != Token::kEQ)); | 696 __ LoadObjectFromPool(result, Bool::Get(kind != Token::kEQ), |
| 697 Assembler::kNotPatchable, PP); |
696 __ jmp(&done, Assembler::kNearJump); | 698 __ jmp(&done, Assembler::kNearJump); |
697 __ Bind(&is_equal); | 699 __ Bind(&is_equal); |
698 __ LoadObject(result, Bool::Get(kind == Token::kEQ)); | 700 __ LoadObjectFromPool(result, Bool::Get(kind == Token::kEQ), |
| 701 Assembler::kNotPatchable, PP); |
699 __ Bind(&done); | 702 __ Bind(&done); |
700 } else { | 703 } else { |
701 Condition cond = TokenKindToSmiCondition(kind); | 704 Condition cond = TokenKindToSmiCondition(kind); |
702 branch->EmitBranchOnCondition(compiler, cond); | 705 branch->EmitBranchOnCondition(compiler, cond); |
703 } | 706 } |
704 } | 707 } |
705 | 708 |
706 | 709 |
707 // First test if receiver is NULL, in which case === is applied. | 710 // First test if receiver is NULL, in which case === is applied. |
708 // If type feedback was provided (lists of <class-id, target>), do a | 711 // If type feedback was provided (lists of <class-id, target>), do a |
709 // type by type check (either === or static call to the operator. | 712 // type by type check (either === or static call to the operator. |
710 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, | 713 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, |
711 LocationSummary* locs, | 714 LocationSummary* locs, |
712 Token::Kind kind, | 715 Token::Kind kind, |
713 BranchInstr* branch, | 716 BranchInstr* branch, |
714 const ICData& ic_data, | 717 const ICData& ic_data, |
715 intptr_t deopt_id, | 718 intptr_t deopt_id, |
716 intptr_t token_pos) { | 719 intptr_t token_pos) { |
717 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 720 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
718 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 721 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
719 Register left = locs->in(0).reg(); | 722 Register left = locs->in(0).reg(); |
720 Register right = locs->in(1).reg(); | 723 Register right = locs->in(1).reg(); |
721 const Immediate& raw_null = | 724 |
722 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
723 Label done, identity_compare, non_null_compare; | 725 Label done, identity_compare, non_null_compare; |
724 __ cmpq(right, raw_null); | 726 __ CompareObject(right, Object::Handle(Object::null())); |
725 __ j(EQUAL, &identity_compare, Assembler::kNearJump); | 727 __ j(EQUAL, &identity_compare, Assembler::kNearJump); |
726 __ cmpq(left, raw_null); | 728 __ CompareObject(left, Object::Handle(Object::null())); |
727 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); | 729 __ j(NOT_EQUAL, &non_null_compare, Assembler::kNearJump); |
728 // Comparison with NULL is "===". | 730 // Comparison with NULL is "===". |
729 __ Bind(&identity_compare); | 731 __ Bind(&identity_compare); |
730 __ cmpq(left, right); | 732 __ cmpq(left, right); |
731 Condition cond = TokenKindToSmiCondition(kind); | 733 Condition cond = TokenKindToSmiCondition(kind); |
732 if (branch != NULL) { | 734 if (branch != NULL) { |
733 branch->EmitBranchOnCondition(compiler, cond); | 735 branch->EmitBranchOnCondition(compiler, cond); |
734 } else { | 736 } else { |
735 Register result = locs->out().reg(); | 737 Register result = locs->out().reg(); |
736 Label load_true; | 738 Label load_true; |
737 __ j(cond, &load_true, Assembler::kNearJump); | 739 __ j(cond, &load_true, Assembler::kNearJump); |
738 __ LoadObject(result, Bool::False()); | 740 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
739 __ jmp(&done); | 741 __ jmp(&done); |
740 __ Bind(&load_true); | 742 __ Bind(&load_true); |
741 __ LoadObject(result, Bool::True()); | 743 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
742 } | 744 } |
743 __ jmp(&done); | 745 __ jmp(&done); |
744 __ Bind(&non_null_compare); // Receiver is not null. | 746 __ Bind(&non_null_compare); // Receiver is not null. |
745 __ pushq(left); | 747 __ pushq(left); |
746 __ pushq(right); | 748 __ pushq(right); |
747 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, | 749 EmitEqualityAsPolymorphicCall(compiler, ic_data, locs, branch, kind, |
748 deopt_id, token_pos); | 750 deopt_id, token_pos); |
749 __ Bind(&done); | 751 __ Bind(&done); |
750 } | 752 } |
751 | 753 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
789 } else { | 791 } else { |
790 __ cmpq(left.reg(), right.reg()); | 792 __ cmpq(left.reg(), right.reg()); |
791 } | 793 } |
792 | 794 |
793 if (branch != NULL) { | 795 if (branch != NULL) { |
794 branch->EmitBranchOnCondition(compiler, true_condition); | 796 branch->EmitBranchOnCondition(compiler, true_condition); |
795 } else { | 797 } else { |
796 Register result = locs.out().reg(); | 798 Register result = locs.out().reg(); |
797 Label done, is_true; | 799 Label done, is_true; |
798 __ j(true_condition, &is_true); | 800 __ j(true_condition, &is_true); |
799 __ LoadObject(result, Bool::False()); | 801 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
800 __ jmp(&done); | 802 __ jmp(&done); |
801 __ Bind(&is_true); | 803 __ Bind(&is_true); |
802 __ LoadObject(result, Bool::True()); | 804 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
803 __ Bind(&done); | 805 __ Bind(&done); |
804 } | 806 } |
805 } | 807 } |
806 | 808 |
807 | 809 |
808 static Condition TokenKindToDoubleCondition(Token::Kind kind) { | 810 static Condition TokenKindToDoubleCondition(Token::Kind kind) { |
809 switch (kind) { | 811 switch (kind) { |
810 case Token::kEQ: return EQUAL; | 812 case Token::kEQ: return EQUAL; |
811 case Token::kNE: return NOT_EQUAL; | 813 case Token::kNE: return NOT_EQUAL; |
812 case Token::kLT: return BELOW; | 814 case Token::kLT: return BELOW; |
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1520 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { | 1522 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { |
1521 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { | 1523 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { |
1522 // Currently we can't have different location summaries for optimized | 1524 // Currently we can't have different location summaries for optimized |
1523 // and non-optimized code. So instead we manually pick up a register | 1525 // and non-optimized code. So instead we manually pick up a register |
1524 // that is known to be free because we know how non-optimizing compiler | 1526 // that is known to be free because we know how non-optimizing compiler |
1525 // allocates registers. | 1527 // allocates registers. |
1526 field_reg = RBX; | 1528 field_reg = RBX; |
1527 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); | 1529 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); |
1528 } | 1530 } |
1529 | 1531 |
1530 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1532 __ LoadObjectFromPool(field_reg, Field::ZoneHandle(field().raw()), |
| 1533 Assembler::kNotPatchable, PP); |
1531 | 1534 |
1532 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); | 1535 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
1533 FieldAddress field_nullability_operand( | 1536 FieldAddress field_nullability_operand( |
1534 field_reg, Field::is_nullable_offset()); | 1537 field_reg, Field::is_nullable_offset()); |
1535 FieldAddress field_length_operand( | 1538 FieldAddress field_length_operand( |
1536 field_reg, Field::guarded_list_length_offset()); | 1539 field_reg, Field::guarded_list_length_offset()); |
1537 | 1540 |
1538 if (value_cid == kDynamicCid) { | 1541 if (value_cid == kDynamicCid) { |
1539 if (value_cid_reg == kNoRegister) { | 1542 if (value_cid_reg == kNoRegister) { |
1540 ASSERT(!compiler->is_optimizing()); | 1543 ASSERT(!compiler->is_optimizing()); |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1666 } else { | 1669 } else { |
1667 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); | 1670 __ movq(field_length_operand, Immediate(Field::kNoFixedLength)); |
1668 } | 1671 } |
1669 } | 1672 } |
1670 | 1673 |
1671 if (!ok_is_fall_through) { | 1674 if (!ok_is_fall_through) { |
1672 __ jmp(&ok); | 1675 __ jmp(&ok); |
1673 } | 1676 } |
1674 } else { | 1677 } else { |
1675 if (field_reg != kNoRegister) { | 1678 if (field_reg != kNoRegister) { |
1676 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1679 __ LoadObjectFromPool(field_reg, Field::ZoneHandle(field().raw()), |
| 1680 Assembler::kNotPatchable, PP); |
1677 } | 1681 } |
1678 | 1682 |
1679 if (value_cid == kDynamicCid) { | 1683 if (value_cid == kDynamicCid) { |
1680 // Field's guarded class id is fixed but value's class id is not known. | 1684 // Field's guarded class id is fixed but value's class id is not known. |
1681 __ testq(value_reg, Immediate(kSmiTagMask)); | 1685 __ testq(value_reg, Immediate(kSmiTagMask)); |
1682 | 1686 |
1683 if (field_cid != kSmiCid) { | 1687 if (field_cid != kSmiCid) { |
1684 __ j(ZERO, fail); | 1688 __ j(ZERO, fail); |
1685 __ LoadClassId(value_cid_reg, value_reg); | 1689 __ LoadClassId(value_cid_reg, value_reg); |
1686 __ cmpq(value_cid_reg, Immediate(field_cid)); | 1690 __ cmpq(value_cid_reg, Immediate(field_cid)); |
(...skipping 15 matching lines...) Expand all Loading... |
1702 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1706 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
1703 // Destroy value_cid_reg (safe because we are finished with it). | 1707 // Destroy value_cid_reg (safe because we are finished with it). |
1704 __ movq(value_cid_reg, | 1708 __ movq(value_cid_reg, |
1705 FieldAddress(value_reg, TypedData::length_offset())); | 1709 FieldAddress(value_reg, TypedData::length_offset())); |
1706 } | 1710 } |
1707 __ cmpq(value_cid_reg, field_length_operand); | 1711 __ cmpq(value_cid_reg, field_length_operand); |
1708 } | 1712 } |
1709 | 1713 |
1710 if (field().is_nullable() && (field_cid != kNullCid)) { | 1714 if (field().is_nullable() && (field_cid != kNullCid)) { |
1711 __ j(EQUAL, &ok); | 1715 __ j(EQUAL, &ok); |
1712 const Immediate& raw_null = | 1716 __ CompareObject(value_reg, Object::Handle(Object::null())); |
1713 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1714 __ cmpq(value_reg, raw_null); | |
1715 } | 1717 } |
1716 | 1718 |
1717 if (ok_is_fall_through) { | 1719 if (ok_is_fall_through) { |
1718 __ j(NOT_EQUAL, fail); | 1720 __ j(NOT_EQUAL, fail); |
1719 } else { | 1721 } else { |
1720 __ j(EQUAL, &ok); | 1722 __ j(EQUAL, &ok); |
1721 } | 1723 } |
1722 } else { | 1724 } else { |
1723 // Both value's and field's class id is known. | 1725 // Both value's and field's class id is known. |
1724 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1726 if ((value_cid != field_cid) && (value_cid != nullability)) { |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1829 : Location::RequiresRegister()); | 1831 : Location::RequiresRegister()); |
1830 locs->set_temp(0, Location::RequiresRegister()); | 1832 locs->set_temp(0, Location::RequiresRegister()); |
1831 return locs; | 1833 return locs; |
1832 } | 1834 } |
1833 | 1835 |
1834 | 1836 |
1835 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1837 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1836 Register value = locs()->in(0).reg(); | 1838 Register value = locs()->in(0).reg(); |
1837 Register temp = locs()->temp(0).reg(); | 1839 Register temp = locs()->temp(0).reg(); |
1838 | 1840 |
1839 __ LoadObject(temp, field()); | 1841 __ LoadObjectFromPool(temp, field(), Assembler::kNotPatchable, PP); |
1840 if (this->value()->NeedsStoreBuffer()) { | 1842 if (this->value()->NeedsStoreBuffer()) { |
1841 __ StoreIntoObject(temp, | 1843 __ StoreIntoObject(temp, |
1842 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 1844 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
1843 } else { | 1845 } else { |
1844 __ StoreIntoObjectNoBarrier( | 1846 __ StoreIntoObjectNoBarrier( |
1845 temp, FieldAddress(temp, Field::value_offset()), value); | 1847 temp, FieldAddress(temp, Field::value_offset()), value); |
1846 } | 1848 } |
1847 } | 1849 } |
1848 | 1850 |
1849 | 1851 |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1983 // (or null). | 1985 // (or null). |
1984 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 1986 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
1985 !type_arguments().CanShareInstantiatorTypeArguments( | 1987 !type_arguments().CanShareInstantiatorTypeArguments( |
1986 instantiator_class())); | 1988 instantiator_class())); |
1987 // If the instantiator is null and if the type argument vector | 1989 // If the instantiator is null and if the type argument vector |
1988 // instantiated from null becomes a vector of dynamic, then use null as | 1990 // instantiated from null becomes a vector of dynamic, then use null as |
1989 // the type arguments. | 1991 // the type arguments. |
1990 Label type_arguments_instantiated; | 1992 Label type_arguments_instantiated; |
1991 const intptr_t len = type_arguments().Length(); | 1993 const intptr_t len = type_arguments().Length(); |
1992 if (type_arguments().IsRawInstantiatedRaw(len)) { | 1994 if (type_arguments().IsRawInstantiatedRaw(len)) { |
1993 const Immediate& raw_null = | 1995 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
1994 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1995 __ cmpq(instantiator_reg, raw_null); | |
1996 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 1996 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
1997 } | 1997 } |
1998 // Instantiate non-null type arguments. | 1998 // Instantiate non-null type arguments. |
1999 // A runtime call to instantiate the type arguments is required. | 1999 // A runtime call to instantiate the type arguments is required. |
2000 __ PushObject(Object::ZoneHandle()); // Make room for the result. | 2000 __ PushObject(Object::ZoneHandle()); // Make room for the result. |
2001 __ PushObject(type_arguments()); | 2001 __ PushObject(type_arguments()); |
2002 __ pushq(instantiator_reg); // Push instantiator type arguments. | 2002 __ pushq(instantiator_reg); // Push instantiator type arguments. |
2003 compiler->GenerateCallRuntime(token_pos(), | 2003 compiler->GenerateCallRuntime(token_pos(), |
2004 deopt_id(), | 2004 deopt_id(), |
2005 kInstantiateTypeArgumentsRuntimeEntry, | 2005 kInstantiateTypeArgumentsRuntimeEntry, |
(...skipping 27 matching lines...) Expand all Loading... |
2033 // instantiator_reg is the instantiator type argument vector, i.e. an | 2033 // instantiator_reg is the instantiator type argument vector, i.e. an |
2034 // AbstractTypeArguments object (or null). | 2034 // AbstractTypeArguments object (or null). |
2035 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2035 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2036 !type_arguments().CanShareInstantiatorTypeArguments( | 2036 !type_arguments().CanShareInstantiatorTypeArguments( |
2037 instantiator_class())); | 2037 instantiator_class())); |
2038 // If the instantiator is null and if the type argument vector | 2038 // If the instantiator is null and if the type argument vector |
2039 // instantiated from null becomes a vector of dynamic, then use null as | 2039 // instantiated from null becomes a vector of dynamic, then use null as |
2040 // the type arguments. | 2040 // the type arguments. |
2041 Label type_arguments_instantiated; | 2041 Label type_arguments_instantiated; |
2042 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2042 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2043 const Immediate& raw_null = | 2043 |
2044 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 2044 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
2045 __ cmpq(instantiator_reg, raw_null); | |
2046 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); | 2045 __ j(EQUAL, &type_arguments_instantiated, Assembler::kNearJump); |
2047 // Instantiate non-null type arguments. | 2046 // Instantiate non-null type arguments. |
2048 // In the non-factory case, we rely on the allocation stub to | 2047 // In the non-factory case, we rely on the allocation stub to |
2049 // instantiate the type arguments. | 2048 // instantiate the type arguments. |
2050 __ LoadObject(result_reg, type_arguments()); | 2049 __ LoadObjectFromPool(result_reg, type_arguments(), |
| 2050 Assembler::kNotPatchable, PP); |
2051 // result_reg: uninstantiated type arguments. | 2051 // result_reg: uninstantiated type arguments. |
2052 | 2052 |
2053 __ Bind(&type_arguments_instantiated); | 2053 __ Bind(&type_arguments_instantiated); |
2054 // result_reg: uninstantiated or instantiated type arguments. | 2054 // result_reg: uninstantiated or instantiated type arguments. |
2055 } | 2055 } |
2056 | 2056 |
2057 | 2057 |
2058 LocationSummary* | 2058 LocationSummary* |
2059 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { | 2059 ExtractConstructorInstantiatorInstr::MakeLocationSummary() const { |
2060 const intptr_t kNumInputs = 1; | 2060 const intptr_t kNumInputs = 1; |
(...skipping 14 matching lines...) Expand all Loading... |
2075 // instantiator_reg is the instantiator AbstractTypeArguments object | 2075 // instantiator_reg is the instantiator AbstractTypeArguments object |
2076 // (or null). | 2076 // (or null). |
2077 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2077 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2078 !type_arguments().CanShareInstantiatorTypeArguments( | 2078 !type_arguments().CanShareInstantiatorTypeArguments( |
2079 instantiator_class())); | 2079 instantiator_class())); |
2080 | 2080 |
2081 // If the instantiator is null and if the type argument vector | 2081 // If the instantiator is null and if the type argument vector |
2082 // instantiated from null becomes a vector of dynamic, then use null as | 2082 // instantiated from null becomes a vector of dynamic, then use null as |
2083 // the type arguments and do not pass the instantiator. | 2083 // the type arguments and do not pass the instantiator. |
2084 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2084 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
2085 const Immediate& raw_null = | 2085 |
2086 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
2087 Label instantiator_not_null; | 2086 Label instantiator_not_null; |
2088 __ cmpq(instantiator_reg, raw_null); | 2087 __ CompareObject(instantiator_reg, Object::Handle(Object::null())); |
2089 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); | 2088 __ j(NOT_EQUAL, &instantiator_not_null, Assembler::kNearJump); |
2090 // Null was used in VisitExtractConstructorTypeArguments as the | 2089 // Null was used in VisitExtractConstructorTypeArguments as the |
2091 // instantiated type arguments, no proper instantiator needed. | 2090 // instantiated type arguments, no proper instantiator needed. |
2092 __ movq(instantiator_reg, | 2091 __ movq(instantiator_reg, |
2093 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); | 2092 Immediate(Smi::RawValue(StubCode::kNoInstantiator))); |
2094 __ Bind(&instantiator_not_null); | 2093 __ Bind(&instantiator_not_null); |
2095 // instantiator_reg: instantiator or kNoInstantiator. | 2094 // instantiator_reg: instantiator or kNoInstantiator. |
2096 } | 2095 } |
2097 | 2096 |
2098 | 2097 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2154 } | 2153 } |
2155 | 2154 |
2156 | 2155 |
2157 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2156 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2158 __ Bind(compiler->GetJumpLabel(this)); | 2157 __ Bind(compiler->GetJumpLabel(this)); |
2159 compiler->AddExceptionHandler(catch_try_index(), | 2158 compiler->AddExceptionHandler(catch_try_index(), |
2160 try_index(), | 2159 try_index(), |
2161 compiler->assembler()->CodeSize(), | 2160 compiler->assembler()->CodeSize(), |
2162 catch_handler_types_, | 2161 catch_handler_types_, |
2163 needs_stacktrace()); | 2162 needs_stacktrace()); |
| 2163 |
| 2164 // Restore the pool pointer. |
| 2165 __ LoadPoolPointer(PP); |
| 2166 |
2164 if (HasParallelMove()) { | 2167 if (HasParallelMove()) { |
2165 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 2168 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
2166 } | 2169 } |
2167 | 2170 |
2168 // Restore RSP from RBP as we are coming from a throw and the code for | 2171 // Restore RSP from RBP as we are coming from a throw and the code for |
2169 // popping arguments has not been run. | 2172 // popping arguments has not been run. |
2170 const intptr_t fp_sp_dist = | 2173 const intptr_t fp_sp_dist = |
2171 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 2174 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
2172 ASSERT(fp_sp_dist <= 0); | 2175 ASSERT(fp_sp_dist <= 0); |
2173 __ leaq(RSP, Address(RBP, fp_sp_dist)); | 2176 __ leaq(RSP, Address(RBP, fp_sp_dist)); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2234 | 2237 |
2235 Register temp = locs()->temp(0).reg(); | 2238 Register temp = locs()->temp(0).reg(); |
2236 // Generate stack overflow check. | 2239 // Generate stack overflow check. |
2237 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); | 2240 __ movq(temp, Immediate(Isolate::Current()->stack_limit_address())); |
2238 __ cmpq(RSP, Address(temp, 0)); | 2241 __ cmpq(RSP, Address(temp, 0)); |
2239 __ j(BELOW_EQUAL, slow_path->entry_label()); | 2242 __ j(BELOW_EQUAL, slow_path->entry_label()); |
2240 if (compiler->CanOSRFunction() && in_loop()) { | 2243 if (compiler->CanOSRFunction() && in_loop()) { |
2241 // In unoptimized code check the usage counter to trigger OSR at loop | 2244 // In unoptimized code check the usage counter to trigger OSR at loop |
2242 // stack checks. Use progressively higher thresholds for more deeply | 2245 // stack checks. Use progressively higher thresholds for more deeply |
2243 // nested loops to attempt to hit outer loops with OSR when possible. | 2246 // nested loops to attempt to hit outer loops with OSR when possible. |
2244 __ LoadObject(temp, compiler->parsed_function().function()); | 2247 __ LoadObjectFromPool(temp, compiler->parsed_function().function(), |
| 2248 Assembler::kNotPatchable, PP); |
2245 intptr_t threshold = | 2249 intptr_t threshold = |
2246 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2250 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
2247 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), | 2251 __ cmpq(FieldAddress(temp, Function::usage_counter_offset()), |
2248 Immediate(threshold)); | 2252 Immediate(threshold)); |
2249 __ j(GREATER_EQUAL, slow_path->entry_label()); | 2253 __ j(GREATER_EQUAL, slow_path->entry_label()); |
2250 } | 2254 } |
2251 __ Bind(slow_path->exit_label()); | 2255 __ Bind(slow_path->exit_label()); |
2252 } | 2256 } |
2253 | 2257 |
2254 | 2258 |
(...skipping 1426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3681 __ movl(result, Address(RSP, 8)); | 3685 __ movl(result, Address(RSP, 8)); |
3682 break; | 3686 break; |
3683 case MethodRecognizer::kUint32x4GetFlagW: | 3687 case MethodRecognizer::kUint32x4GetFlagW: |
3684 __ movl(result, Address(RSP, 12)); | 3688 __ movl(result, Address(RSP, 12)); |
3685 break; | 3689 break; |
3686 default: UNREACHABLE(); | 3690 default: UNREACHABLE(); |
3687 } | 3691 } |
3688 __ addq(RSP, Immediate(16)); | 3692 __ addq(RSP, Immediate(16)); |
3689 __ testl(result, result); | 3693 __ testl(result, result); |
3690 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); | 3694 __ j(NOT_ZERO, &non_zero, Assembler::kNearJump); |
3691 __ LoadObject(result, Bool::False()); | 3695 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
3692 __ jmp(&done); | 3696 __ jmp(&done); |
3693 __ Bind(&non_zero); | 3697 __ Bind(&non_zero); |
3694 __ LoadObject(result, Bool::True()); | 3698 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
3695 __ Bind(&done); | 3699 __ Bind(&done); |
3696 } | 3700 } |
3697 | 3701 |
3698 | 3702 |
3699 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { | 3703 LocationSummary* Uint32x4SelectInstr::MakeLocationSummary() const { |
3700 const intptr_t kNumInputs = 3; | 3704 const intptr_t kNumInputs = 3; |
3701 const intptr_t kNumTemps = 1; | 3705 const intptr_t kNumTemps = 1; |
3702 LocationSummary* summary = | 3706 LocationSummary* summary = |
3703 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3707 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
3704 summary->set_in(0, Location::RequiresFpuRegister()); | 3708 summary->set_in(0, Location::RequiresFpuRegister()); |
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4198 // return double.NAN; | 4202 // return double.NAN; |
4199 // } | 4203 // } |
4200 XmmRegister base = locs()->in(0).fpu_reg(); | 4204 XmmRegister base = locs()->in(0).fpu_reg(); |
4201 XmmRegister exp = locs()->in(1).fpu_reg(); | 4205 XmmRegister exp = locs()->in(1).fpu_reg(); |
4202 XmmRegister result = locs()->out().fpu_reg(); | 4206 XmmRegister result = locs()->out().fpu_reg(); |
4203 Register temp = locs()->temp(0).reg(); | 4207 Register temp = locs()->temp(0).reg(); |
4204 XmmRegister zero_temp = locs()->temp(1).fpu_reg(); | 4208 XmmRegister zero_temp = locs()->temp(1).fpu_reg(); |
4205 | 4209 |
4206 Label check_base_is_one; | 4210 Label check_base_is_one; |
4207 // Check if exponent is 0.0 -> return 1.0; | 4211 // Check if exponent is 0.0 -> return 1.0; |
4208 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(0))); | 4212 __ LoadObjectFromPool(temp, Double::ZoneHandle(Double::NewCanonical(0)), |
| 4213 Assembler::kNotPatchable, PP); |
4209 __ movsd(zero_temp, FieldAddress(temp, Double::value_offset())); | 4214 __ movsd(zero_temp, FieldAddress(temp, Double::value_offset())); |
4210 __ LoadObject(temp, Double::ZoneHandle(Double::NewCanonical(1))); | 4215 __ LoadObjectFromPool(temp, Double::ZoneHandle(Double::NewCanonical(1)), |
| 4216 Assembler::kNotPatchable, PP); |
4211 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 4217 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
4212 // 'result' contains 1.0. | 4218 // 'result' contains 1.0. |
4213 __ comisd(exp, zero_temp); | 4219 __ comisd(exp, zero_temp); |
4214 __ j(PARITY_EVEN, &check_base_is_one, Assembler::kNearJump); // NaN. | 4220 __ j(PARITY_EVEN, &check_base_is_one, Assembler::kNearJump); // NaN. |
4215 __ j(EQUAL, &skip_call, Assembler::kNearJump); // exp is 0, result is 1.0. | 4221 __ j(EQUAL, &skip_call, Assembler::kNearJump); // exp is 0, result is 1.0. |
4216 | 4222 |
4217 Label base_is_nan; | 4223 Label base_is_nan; |
4218 __ Bind(&check_base_is_one); | 4224 __ Bind(&check_base_is_one); |
4219 // Checks if base == 1.0. | 4225 // Checks if base == 1.0. |
4220 __ comisd(base, result); | 4226 __ comisd(base, result); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4298 summary->AddTemp(Location::RequiresRegister()); | 4304 summary->AddTemp(Location::RequiresRegister()); |
4299 } | 4305 } |
4300 return summary; | 4306 return summary; |
4301 } | 4307 } |
4302 | 4308 |
4303 | 4309 |
4304 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4310 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4305 if (IsNullCheck()) { | 4311 if (IsNullCheck()) { |
4306 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4312 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4307 kDeoptCheckClass); | 4313 kDeoptCheckClass); |
4308 const Immediate& raw_null = | 4314 __ CompareObject(locs()->in(0).reg(), |
4309 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 4315 Object::Handle(Object::null())); |
4310 __ cmpq(locs()->in(0).reg(), raw_null); | |
4311 __ j(EQUAL, deopt); | 4316 __ j(EQUAL, deopt); |
4312 return; | 4317 return; |
4313 } | 4318 } |
4314 | 4319 |
4315 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 4320 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
4316 (unary_checks().NumberOfChecks() > 1)); | 4321 (unary_checks().NumberOfChecks() > 1)); |
4317 Register value = locs()->in(0).reg(); | 4322 Register value = locs()->in(0).reg(); |
4318 Register temp = locs()->temp(0).reg(); | 4323 Register temp = locs()->temp(0).reg(); |
4319 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4324 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4320 kDeoptCheckClass); | 4325 kDeoptCheckClass); |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4510 __ Bind(compiler->GetJumpLabel(this)); | 4515 __ Bind(compiler->GetJumpLabel(this)); |
4511 if (!compiler->is_optimizing()) { | 4516 if (!compiler->is_optimizing()) { |
4512 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4517 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4513 deopt_id_, | 4518 deopt_id_, |
4514 Scanner::kDummyTokenIndex); | 4519 Scanner::kDummyTokenIndex); |
4515 // Add an edge counter. | 4520 // Add an edge counter. |
4516 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | 4521 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); |
4517 counter.SetAt(0, Smi::Handle(Smi::New(0))); | 4522 counter.SetAt(0, Smi::Handle(Smi::New(0))); |
4518 Label done; | 4523 Label done; |
4519 __ Comment("Edge counter"); | 4524 __ Comment("Edge counter"); |
4520 __ LoadObject(RAX, counter); | 4525 __ LoadObjectFromPool(RAX, counter, Assembler::kNotPatchable, PP); |
4521 __ addq(FieldAddress(RAX, Array::element_offset(0)), | 4526 __ addq(FieldAddress(RAX, Array::element_offset(0)), |
4522 Immediate(Smi::RawValue(1))); | 4527 Immediate(Smi::RawValue(1))); |
4523 __ j(NO_OVERFLOW, &done); | 4528 __ j(NO_OVERFLOW, &done); |
4524 __ movq(FieldAddress(RAX, Array::element_offset(0)), | 4529 __ movq(FieldAddress(RAX, Array::element_offset(0)), |
4525 Immediate(Smi::RawValue(Smi::kMaxValue))); | 4530 Immediate(Smi::RawValue(Smi::kMaxValue))); |
4526 __ Bind(&done); | 4531 __ Bind(&done); |
4527 } | 4532 } |
4528 if (HasParallelMove()) { | 4533 if (HasParallelMove()) { |
4529 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4534 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4530 } | 4535 } |
(...skipping 10 matching lines...) Expand all Loading... |
4541 // Add deoptimization descriptor for deoptimizing instructions that may | 4546 // Add deoptimization descriptor for deoptimizing instructions that may |
4542 // be inserted before this instruction. | 4547 // be inserted before this instruction. |
4543 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 4548 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
4544 GetDeoptId(), | 4549 GetDeoptId(), |
4545 0); // No token position. | 4550 0); // No token position. |
4546 // Add an edge counter. | 4551 // Add an edge counter. |
4547 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); | 4552 const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld)); |
4548 counter.SetAt(0, Smi::Handle(Smi::New(0))); | 4553 counter.SetAt(0, Smi::Handle(Smi::New(0))); |
4549 Label done; | 4554 Label done; |
4550 __ Comment("Edge counter"); | 4555 __ Comment("Edge counter"); |
4551 __ LoadObject(RAX, counter); | 4556 __ LoadObjectFromPool(RAX, counter, Assembler::kNotPatchable, PP); |
4552 __ addq(FieldAddress(RAX, Array::element_offset(0)), | 4557 __ addq(FieldAddress(RAX, Array::element_offset(0)), |
4553 Immediate(Smi::RawValue(1))); | 4558 Immediate(Smi::RawValue(1))); |
4554 __ j(NO_OVERFLOW, &done); | 4559 __ j(NO_OVERFLOW, &done); |
4555 __ movq(FieldAddress(RAX, Array::element_offset(0)), | 4560 __ movq(FieldAddress(RAX, Array::element_offset(0)), |
4556 Immediate(Smi::RawValue(Smi::kMaxValue))); | 4561 Immediate(Smi::RawValue(Smi::kMaxValue))); |
4557 __ Bind(&done); | 4562 __ Bind(&done); |
4558 } | 4563 } |
4559 if (HasParallelMove()) { | 4564 if (HasParallelMove()) { |
4560 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 4565 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
4561 } | 4566 } |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4624 // Special code for numbers (compare values instead of references.) | 4629 // Special code for numbers (compare values instead of references.) |
4625 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4630 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4626 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4631 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4627 Location left = locs()->in(0); | 4632 Location left = locs()->in(0); |
4628 Location right = locs()->in(1); | 4633 Location right = locs()->in(1); |
4629 if (left.IsConstant() && right.IsConstant()) { | 4634 if (left.IsConstant() && right.IsConstant()) { |
4630 // TODO(vegorov): should be eliminated earlier by constant propagation. | 4635 // TODO(vegorov): should be eliminated earlier by constant propagation. |
4631 const bool result = (kind() == Token::kEQ_STRICT) ? | 4636 const bool result = (kind() == Token::kEQ_STRICT) ? |
4632 left.constant().raw() == right.constant().raw() : | 4637 left.constant().raw() == right.constant().raw() : |
4633 left.constant().raw() != right.constant().raw(); | 4638 left.constant().raw() != right.constant().raw(); |
4634 __ LoadObject(locs()->out().reg(), Bool::Get(result)); | 4639 __ LoadObjectFromPool(locs()->out().reg(), Bool::Get(result), |
| 4640 Assembler::kNotPatchable, PP); |
4635 return; | 4641 return; |
4636 } | 4642 } |
4637 if (left.IsConstant()) { | 4643 if (left.IsConstant()) { |
4638 compiler->EmitEqualityRegConstCompare(right.reg(), | 4644 compiler->EmitEqualityRegConstCompare(right.reg(), |
4639 left.constant(), | 4645 left.constant(), |
4640 needs_number_check(), | 4646 needs_number_check(), |
4641 token_pos()); | 4647 token_pos()); |
4642 } else if (right.IsConstant()) { | 4648 } else if (right.IsConstant()) { |
4643 compiler->EmitEqualityRegConstCompare(left.reg(), | 4649 compiler->EmitEqualityRegConstCompare(left.reg(), |
4644 right.constant(), | 4650 right.constant(), |
4645 needs_number_check(), | 4651 needs_number_check(), |
4646 token_pos()); | 4652 token_pos()); |
4647 } else { | 4653 } else { |
4648 compiler->EmitEqualityRegRegCompare(left.reg(), | 4654 compiler->EmitEqualityRegRegCompare(left.reg(), |
4649 right.reg(), | 4655 right.reg(), |
4650 needs_number_check(), | 4656 needs_number_check(), |
4651 token_pos()); | 4657 token_pos()); |
4652 } | 4658 } |
4653 | 4659 |
4654 Register result = locs()->out().reg(); | 4660 Register result = locs()->out().reg(); |
4655 Label load_true, done; | 4661 Label load_true, done; |
4656 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; | 4662 Condition true_condition = (kind() == Token::kEQ_STRICT) ? EQUAL : NOT_EQUAL; |
4657 __ j(true_condition, &load_true, Assembler::kNearJump); | 4663 __ j(true_condition, &load_true, Assembler::kNearJump); |
4658 __ LoadObject(result, Bool::False()); | 4664 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
4659 __ jmp(&done, Assembler::kNearJump); | 4665 __ jmp(&done, Assembler::kNearJump); |
4660 __ Bind(&load_true); | 4666 __ Bind(&load_true); |
4661 __ LoadObject(result, Bool::True()); | 4667 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
4662 __ Bind(&done); | 4668 __ Bind(&done); |
4663 } | 4669 } |
4664 | 4670 |
4665 | 4671 |
4666 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 4672 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
4667 BranchInstr* branch) { | 4673 BranchInstr* branch) { |
4668 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 4674 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
4669 Location left = locs()->in(0); | 4675 Location left = locs()->in(0); |
4670 Location right = locs()->in(1); | 4676 Location right = locs()->in(1); |
4671 if (left.IsConstant() && right.IsConstant()) { | 4677 if (left.IsConstant() && right.IsConstant()) { |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4710 | 4716 |
4711 | 4717 |
4712 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4718 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4713 // The arguments to the stub include the closure, as does the arguments | 4719 // The arguments to the stub include the closure, as does the arguments |
4714 // descriptor. | 4720 // descriptor. |
4715 Register temp_reg = locs()->temp(0).reg(); | 4721 Register temp_reg = locs()->temp(0).reg(); |
4716 int argument_count = ArgumentCount(); | 4722 int argument_count = ArgumentCount(); |
4717 const Array& arguments_descriptor = | 4723 const Array& arguments_descriptor = |
4718 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | 4724 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
4719 argument_names())); | 4725 argument_names())); |
4720 __ LoadObject(temp_reg, arguments_descriptor); | 4726 __ LoadObjectFromPool(temp_reg, arguments_descriptor, |
| 4727 Assembler::kNotPatchable, PP); |
4721 ASSERT(temp_reg == R10); | 4728 ASSERT(temp_reg == R10); |
4722 compiler->GenerateDartCall(deopt_id(), | 4729 compiler->GenerateDartCall(deopt_id(), |
4723 token_pos(), | 4730 token_pos(), |
4724 &StubCode::CallClosureFunctionLabel(), | 4731 &StubCode::CallClosureFunctionLabel(), |
4725 PcDescriptors::kClosureCall, | 4732 PcDescriptors::kClosureCall, |
4726 locs()); | 4733 locs()); |
4727 __ Drop(argument_count); | 4734 __ Drop(argument_count); |
4728 } | 4735 } |
4729 | 4736 |
4730 | 4737 |
4731 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { | 4738 LocationSummary* BooleanNegateInstr::MakeLocationSummary() const { |
4732 return LocationSummary::Make(1, | 4739 return LocationSummary::Make(1, |
4733 Location::RequiresRegister(), | 4740 Location::RequiresRegister(), |
4734 LocationSummary::kNoCall); | 4741 LocationSummary::kNoCall); |
4735 } | 4742 } |
4736 | 4743 |
4737 | 4744 |
4738 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4745 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4739 Register value = locs()->in(0).reg(); | 4746 Register value = locs()->in(0).reg(); |
4740 Register result = locs()->out().reg(); | 4747 Register result = locs()->out().reg(); |
4741 | 4748 |
4742 Label done; | 4749 Label done; |
4743 __ LoadObject(result, Bool::True()); | 4750 __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP); |
4744 __ CompareRegisters(result, value); | 4751 __ CompareRegisters(result, value); |
4745 __ j(NOT_EQUAL, &done, Assembler::kNearJump); | 4752 __ j(NOT_EQUAL, &done, Assembler::kNearJump); |
4746 __ LoadObject(result, Bool::False()); | 4753 __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP); |
4747 __ Bind(&done); | 4754 __ Bind(&done); |
4748 } | 4755 } |
4749 | 4756 |
4750 | 4757 |
4751 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { | 4758 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { |
4752 const intptr_t kNumInputs = 2; | 4759 const intptr_t kNumInputs = 2; |
4753 const intptr_t kNumTemps = 0; | 4760 const intptr_t kNumTemps = 0; |
4754 LocationSummary* locs = | 4761 LocationSummary* locs = |
4755 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4762 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
4756 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 4763 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4806 PcDescriptors::kOther, | 4813 PcDescriptors::kOther, |
4807 locs()); | 4814 locs()); |
4808 __ Drop(2); // Discard type arguments and receiver. | 4815 __ Drop(2); // Discard type arguments and receiver. |
4809 } | 4816 } |
4810 | 4817 |
4811 } // namespace dart | 4818 } // namespace dart |
4812 | 4819 |
4813 #undef __ | 4820 #undef __ |
4814 | 4821 |
4815 #endif // defined TARGET_ARCH_X64 | 4822 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |