Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(495)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 7063017: Rename TypeRecording...Stub into ...Stub. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after
510 __ j(greater, &negative, Label::kNear); 510 __ j(greater, &negative, Label::kNear);
511 __ mov(ecx, scratch2); 511 __ mov(ecx, scratch2);
512 __ jmp(&done, Label::kNear); 512 __ jmp(&done, Label::kNear);
513 __ bind(&negative); 513 __ bind(&negative);
514 __ sub(ecx, Operand(scratch2)); 514 __ sub(ecx, Operand(scratch2));
515 __ bind(&done); 515 __ bind(&done);
516 } 516 }
517 } 517 }
518 518
519 519
520 Handle<Code> GetTypeRecordingUnaryOpStub(int key, 520 Handle<Code> GetUnaryOpStub(int key,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
521 TRUnaryOpIC::TypeInfo type_info) { 521 UnaryOpIC::TypeInfo type_info) {
522 TypeRecordingUnaryOpStub stub(key, type_info); 522 UnaryOpStub stub(key, type_info);
523 return stub.GetCode(); 523 return stub.GetCode();
524 } 524 }
525 525
526 526
527 const char* TypeRecordingUnaryOpStub::GetName() { 527 const char* UnaryOpStub::GetName() {
528 if (name_ != NULL) return name_; 528 if (name_ != NULL) return name_;
529 const int kMaxNameLength = 100; 529 const int kMaxNameLength = 100;
530 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 530 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
531 kMaxNameLength); 531 kMaxNameLength);
532 if (name_ == NULL) return "OOM"; 532 if (name_ == NULL) return "OOM";
533 const char* op_name = Token::Name(op_); 533 const char* op_name = Token::Name(op_);
534 const char* overwrite_name = NULL; // Make g++ happy. 534 const char* overwrite_name = NULL; // Make g++ happy.
535 switch (mode_) { 535 switch (mode_) {
536 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 536 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
537 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 537 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
538 } 538 }
539 539
540 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 540 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
541 "TypeRecordingUnaryOpStub_%s_%s_%s", 541 "UnaryOpStub_%s_%s_%s",
542 op_name, 542 op_name,
543 overwrite_name, 543 overwrite_name,
544 TRUnaryOpIC::GetName(operand_type_)); 544 UnaryOpIC::GetName(operand_type_));
545 return name_; 545 return name_;
546 } 546 }
547 547
548 548
549 // TODO(svenpanne): Use virtual functions instead of switch. 549 // TODO(svenpanne): Use virtual functions instead of switch.
550 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { 550 void UnaryOpStub::Generate(MacroAssembler* masm) {
551 switch (operand_type_) { 551 switch (operand_type_) {
552 case TRUnaryOpIC::UNINITIALIZED: 552 case UnaryOpIC::UNINITIALIZED:
553 GenerateTypeTransition(masm); 553 GenerateTypeTransition(masm);
554 break; 554 break;
555 case TRUnaryOpIC::SMI: 555 case UnaryOpIC::SMI:
556 GenerateSmiStub(masm); 556 GenerateSmiStub(masm);
557 break; 557 break;
558 case TRUnaryOpIC::HEAP_NUMBER: 558 case UnaryOpIC::HEAP_NUMBER:
559 GenerateHeapNumberStub(masm); 559 GenerateHeapNumberStub(masm);
560 break; 560 break;
561 case TRUnaryOpIC::GENERIC: 561 case UnaryOpIC::GENERIC:
562 GenerateGenericStub(masm); 562 GenerateGenericStub(masm);
563 break; 563 break;
564 } 564 }
565 } 565 }
566 566
567 567
568 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 568 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
569 __ pop(ecx); // Save return address. 569 __ pop(ecx); // Save return address.
570 __ push(eax); 570 __ push(eax);
571 // the argument is now on top. 571 // the argument is now on top.
572 // Push this stub's key. Although the operation and the type info are 572 // Push this stub's key. Although the operation and the type info are
573 // encoded into the key, the encoding is opaque, so push them too. 573 // encoded into the key, the encoding is opaque, so push them too.
574 __ push(Immediate(Smi::FromInt(MinorKey()))); 574 __ push(Immediate(Smi::FromInt(MinorKey())));
575 __ push(Immediate(Smi::FromInt(op_))); 575 __ push(Immediate(Smi::FromInt(op_)));
576 __ push(Immediate(Smi::FromInt(operand_type_))); 576 __ push(Immediate(Smi::FromInt(operand_type_)));
577 577
578 __ push(ecx); // Push return address. 578 __ push(ecx); // Push return address.
579 579
580 // Patch the caller to an appropriate specialized stub and return the 580 // Patch the caller to an appropriate specialized stub and return the
581 // operation result to the caller of the stub. 581 // operation result to the caller of the stub.
582 __ TailCallExternalReference( 582 __ TailCallExternalReference(
583 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), 583 ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
584 masm->isolate()), 584 masm->isolate()), 4, 1);
585 4,
586 1);
587 } 585 }
588 586
589 587
590 // TODO(svenpanne): Use virtual functions instead of switch. 588 // TODO(svenpanne): Use virtual functions instead of switch.
591 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 589 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
592 switch (op_) { 590 switch (op_) {
593 case Token::SUB: 591 case Token::SUB:
594 GenerateSmiStubSub(masm); 592 GenerateSmiStubSub(masm);
595 break; 593 break;
596 case Token::BIT_NOT: 594 case Token::BIT_NOT:
597 GenerateSmiStubBitNot(masm); 595 GenerateSmiStubBitNot(masm);
598 break; 596 break;
599 default: 597 default:
600 UNREACHABLE(); 598 UNREACHABLE();
601 } 599 }
602 } 600 }
603 601
604 602
605 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { 603 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
606 Label non_smi, undo, slow; 604 Label non_smi, undo, slow;
607 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, 605 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
608 Label::kNear, Label::kNear, Label::kNear); 606 Label::kNear, Label::kNear, Label::kNear);
609 __ bind(&undo); 607 __ bind(&undo);
610 GenerateSmiCodeUndo(masm); 608 GenerateSmiCodeUndo(masm);
611 __ bind(&non_smi); 609 __ bind(&non_smi);
612 __ bind(&slow); 610 __ bind(&slow);
613 GenerateTypeTransition(masm); 611 GenerateTypeTransition(masm);
614 } 612 }
615 613
616 614
617 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { 615 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
618 Label non_smi; 616 Label non_smi;
619 GenerateSmiCodeBitNot(masm, &non_smi); 617 GenerateSmiCodeBitNot(masm, &non_smi);
620 __ bind(&non_smi); 618 __ bind(&non_smi);
621 GenerateTypeTransition(masm); 619 GenerateTypeTransition(masm);
622 } 620 }
623 621
624 622
625 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, 623 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
626 Label* non_smi, 624 Label* non_smi,
627 Label* undo, 625 Label* undo,
628 Label* slow, 626 Label* slow,
629 Label::Distance non_smi_near, 627 Label::Distance non_smi_near,
630 Label::Distance undo_near, 628 Label::Distance undo_near,
631 Label::Distance slow_near) { 629 Label::Distance slow_near) {
632 // Check whether the value is a smi. 630 // Check whether the value is a smi.
633 __ test(eax, Immediate(kSmiTagMask)); 631 __ test(eax, Immediate(kSmiTagMask));
634 __ j(not_zero, non_smi, non_smi_near); 632 __ j(not_zero, non_smi, non_smi_near);
635 633
636 // We can't handle -0 with smis, so use a type transition for that case. 634 // We can't handle -0 with smis, so use a type transition for that case.
637 __ test(eax, Operand(eax)); 635 __ test(eax, Operand(eax));
638 __ j(zero, slow, slow_near); 636 __ j(zero, slow, slow_near);
639 637
640 // Try optimistic subtraction '0 - value', saving operand in eax for undo. 638 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
641 __ mov(edx, Operand(eax)); 639 __ mov(edx, Operand(eax));
642 __ Set(eax, Immediate(0)); 640 __ Set(eax, Immediate(0));
643 __ sub(eax, Operand(edx)); 641 __ sub(eax, Operand(edx));
644 __ j(overflow, undo, undo_near); 642 __ j(overflow, undo, undo_near);
645 __ ret(0); 643 __ ret(0);
646 } 644 }
647 645
648 646
649 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( 647 void UnaryOpStub::GenerateSmiCodeBitNot(
650 MacroAssembler* masm, 648 MacroAssembler* masm,
651 Label* non_smi, 649 Label* non_smi,
652 Label::Distance non_smi_near) { 650 Label::Distance non_smi_near) {
653 // Check whether the value is a smi. 651 // Check whether the value is a smi.
654 __ test(eax, Immediate(kSmiTagMask)); 652 __ test(eax, Immediate(kSmiTagMask));
655 __ j(not_zero, non_smi, non_smi_near); 653 __ j(not_zero, non_smi, non_smi_near);
656 654
657 // Flip bits and revert inverted smi-tag. 655 // Flip bits and revert inverted smi-tag.
658 __ not_(eax); 656 __ not_(eax);
659 __ and_(eax, ~kSmiTagMask); 657 __ and_(eax, ~kSmiTagMask);
660 __ ret(0); 658 __ ret(0);
661 } 659 }
662 660
663 661
664 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { 662 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
665 __ mov(eax, Operand(edx)); 663 __ mov(eax, Operand(edx));
666 } 664 }
667 665
668 666
669 // TODO(svenpanne): Use virtual functions instead of switch. 667 // TODO(svenpanne): Use virtual functions instead of switch.
670 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 668 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
671 switch (op_) { 669 switch (op_) {
672 case Token::SUB: 670 case Token::SUB:
673 GenerateHeapNumberStubSub(masm); 671 GenerateHeapNumberStubSub(masm);
674 break; 672 break;
675 case Token::BIT_NOT: 673 case Token::BIT_NOT:
676 GenerateHeapNumberStubBitNot(masm); 674 GenerateHeapNumberStubBitNot(masm);
677 break; 675 break;
678 default: 676 default:
679 UNREACHABLE(); 677 UNREACHABLE();
680 } 678 }
681 } 679 }
682 680
683 681
684 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { 682 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
685 Label non_smi, undo, slow, call_builtin; 683 Label non_smi, undo, slow, call_builtin;
686 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear); 684 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
687 __ bind(&non_smi); 685 __ bind(&non_smi);
688 GenerateHeapNumberCodeSub(masm, &slow); 686 GenerateHeapNumberCodeSub(masm, &slow);
689 __ bind(&undo); 687 __ bind(&undo);
690 GenerateSmiCodeUndo(masm); 688 GenerateSmiCodeUndo(masm);
691 __ bind(&slow); 689 __ bind(&slow);
692 GenerateTypeTransition(masm); 690 GenerateTypeTransition(masm);
693 __ bind(&call_builtin); 691 __ bind(&call_builtin);
694 GenerateGenericCodeFallback(masm); 692 GenerateGenericCodeFallback(masm);
695 } 693 }
696 694
697 695
698 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( 696 void UnaryOpStub::GenerateHeapNumberStubBitNot(
699 MacroAssembler* masm) { 697 MacroAssembler* masm) {
700 Label non_smi, slow; 698 Label non_smi, slow;
701 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); 699 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
702 __ bind(&non_smi); 700 __ bind(&non_smi);
703 GenerateHeapNumberCodeBitNot(masm, &slow); 701 GenerateHeapNumberCodeBitNot(masm, &slow);
704 __ bind(&slow); 702 __ bind(&slow);
705 GenerateTypeTransition(masm); 703 GenerateTypeTransition(masm);
706 } 704 }
707 705
708 706
709 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, 707 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
710 Label* slow) { 708 Label* slow) {
711 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 709 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
712 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); 710 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
713 __ j(not_equal, slow); 711 __ j(not_equal, slow);
714 712
715 if (mode_ == UNARY_OVERWRITE) { 713 if (mode_ == UNARY_OVERWRITE) {
716 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), 714 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
717 Immediate(HeapNumber::kSignMask)); // Flip sign. 715 Immediate(HeapNumber::kSignMask)); // Flip sign.
718 } else { 716 } else {
719 __ mov(edx, Operand(eax)); 717 __ mov(edx, Operand(eax));
(...skipping 15 matching lines...) Expand all
735 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); 733 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
736 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. 734 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
737 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); 735 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
738 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); 736 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
739 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); 737 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
740 } 738 }
741 __ ret(0); 739 __ ret(0);
742 } 740 }
743 741
744 742
745 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( 743 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Move MacroAssembler* masm, up one line and full in
fschneider 2011/05/24 12:16:41 Done.
746 MacroAssembler* masm, 744 MacroAssembler* masm,
747 Label* slow) { 745 Label* slow) {
748 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 746 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
749 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); 747 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
750 __ j(not_equal, slow); 748 __ j(not_equal, slow);
751 749
752 // Convert the heap number in eax to an untagged integer in ecx. 750 // Convert the heap number in eax to an untagged integer in ecx.
753 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow); 751 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
754 752
755 // Do the bitwise operation and check if the result fits in a smi. 753 // Do the bitwise operation and check if the result fits in a smi.
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
796 __ push(ecx); 794 __ push(ecx);
797 __ fild_s(Operand(esp, 0)); 795 __ fild_s(Operand(esp, 0));
798 __ pop(ecx); 796 __ pop(ecx);
799 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 797 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
800 } 798 }
801 __ ret(0); 799 __ ret(0);
802 } 800 }
803 801
804 802
805 // TODO(svenpanne): Use virtual functions instead of switch. 803 // TODO(svenpanne): Use virtual functions instead of switch.
806 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { 804 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
807 switch (op_) { 805 switch (op_) {
808 case Token::SUB: 806 case Token::SUB:
809 GenerateGenericStubSub(masm); 807 GenerateGenericStubSub(masm);
810 break; 808 break;
811 case Token::BIT_NOT: 809 case Token::BIT_NOT:
812 GenerateGenericStubBitNot(masm); 810 GenerateGenericStubBitNot(masm);
813 break; 811 break;
814 default: 812 default:
815 UNREACHABLE(); 813 UNREACHABLE();
816 } 814 }
817 } 815 }
818 816
819 817
820 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { 818 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
821 Label non_smi, undo, slow; 819 Label non_smi, undo, slow;
822 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear); 820 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
823 __ bind(&non_smi); 821 __ bind(&non_smi);
824 GenerateHeapNumberCodeSub(masm, &slow); 822 GenerateHeapNumberCodeSub(masm, &slow);
825 __ bind(&undo); 823 __ bind(&undo);
826 GenerateSmiCodeUndo(masm); 824 GenerateSmiCodeUndo(masm);
827 __ bind(&slow); 825 __ bind(&slow);
828 GenerateGenericCodeFallback(masm); 826 GenerateGenericCodeFallback(masm);
829 } 827 }
830 828
831 829
832 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { 830 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
833 Label non_smi, slow; 831 Label non_smi, slow;
834 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); 832 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
835 __ bind(&non_smi); 833 __ bind(&non_smi);
836 GenerateHeapNumberCodeBitNot(masm, &slow); 834 GenerateHeapNumberCodeBitNot(masm, &slow);
837 __ bind(&slow); 835 __ bind(&slow);
838 GenerateGenericCodeFallback(masm); 836 GenerateGenericCodeFallback(masm);
839 } 837 }
840 838
841 839
842 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( 840 void UnaryOpStub::GenerateGenericCodeFallback(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits one line?
fschneider 2011/05/24 12:16:41 Done.
843 MacroAssembler* masm) { 841 MacroAssembler* masm) {
844 // Handle the slow case by jumping to the corresponding JavaScript builtin. 842 // Handle the slow case by jumping to the corresponding JavaScript builtin.
845 __ pop(ecx); // pop return address. 843 __ pop(ecx); // pop return address.
846 __ push(eax); 844 __ push(eax);
847 __ push(ecx); // push return address 845 __ push(ecx); // push return address
848 switch (op_) { 846 switch (op_) {
849 case Token::SUB: 847 case Token::SUB:
850 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); 848 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
851 break; 849 break;
852 case Token::BIT_NOT: 850 case Token::BIT_NOT:
853 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); 851 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
854 break; 852 break;
855 default: 853 default:
856 UNREACHABLE(); 854 UNREACHABLE();
857 } 855 }
858 } 856 }
859 857
860 858
861 Handle<Code> GetTypeRecordingBinaryOpStub(int key, 859 Handle<Code> GetBinaryOpStub(int key,
862 TRBinaryOpIC::TypeInfo type_info, 860 BinaryOpIC::TypeInfo type_info,
863 TRBinaryOpIC::TypeInfo result_type_info) { 861 BinaryOpIC::TypeInfo result_type_info) {
864 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); 862 BinaryOpStub stub(key, type_info, result_type_info);
865 return stub.GetCode(); 863 return stub.GetCode();
866 } 864 }
867 865
868 866
869 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 867 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
870 __ pop(ecx); // Save return address. 868 __ pop(ecx); // Save return address.
871 __ push(edx); 869 __ push(edx);
872 __ push(eax); 870 __ push(eax);
873 // Left and right arguments are now on top. 871 // Left and right arguments are now on top.
874 // Push this stub's key. Although the operation and the type info are 872 // Push this stub's key. Although the operation and the type info are
875 // encoded into the key, the encoding is opaque, so push them too. 873 // encoded into the key, the encoding is opaque, so push them too.
876 __ push(Immediate(Smi::FromInt(MinorKey()))); 874 __ push(Immediate(Smi::FromInt(MinorKey())));
877 __ push(Immediate(Smi::FromInt(op_))); 875 __ push(Immediate(Smi::FromInt(op_)));
878 __ push(Immediate(Smi::FromInt(operands_type_))); 876 __ push(Immediate(Smi::FromInt(operands_type_)));
879 877
880 __ push(ecx); // Push return address. 878 __ push(ecx); // Push return address.
881 879
882 // Patch the caller to an appropriate specialized stub and return the 880 // Patch the caller to an appropriate specialized stub and return the
883 // operation result to the caller of the stub. 881 // operation result to the caller of the stub.
884 __ TailCallExternalReference( 882 __ TailCallExternalReference(
885 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), 883 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
886 masm->isolate()), 884 masm->isolate()),
887 5, 885 5,
888 1); 886 1);
889 } 887 }
890 888
891 889
892 // Prepare for a type transition runtime call when the args are already on 890 // Prepare for a type transition runtime call when the args are already on
893 // the stack, under the return address. 891 // the stack, under the return address.
894 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( 892 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits one line?
fschneider 2011/05/24 12:16:41 Done.
895 MacroAssembler* masm) { 893 MacroAssembler* masm) {
896 __ pop(ecx); // Save return address. 894 __ pop(ecx); // Save return address.
897 // Left and right arguments are already on top of the stack. 895 // Left and right arguments are already on top of the stack.
898 // Push this stub's key. Although the operation and the type info are 896 // Push this stub's key. Although the operation and the type info are
899 // encoded into the key, the encoding is opaque, so push them too. 897 // encoded into the key, the encoding is opaque, so push them too.
900 __ push(Immediate(Smi::FromInt(MinorKey()))); 898 __ push(Immediate(Smi::FromInt(MinorKey())));
901 __ push(Immediate(Smi::FromInt(op_))); 899 __ push(Immediate(Smi::FromInt(op_)));
902 __ push(Immediate(Smi::FromInt(operands_type_))); 900 __ push(Immediate(Smi::FromInt(operands_type_)));
903 901
904 __ push(ecx); // Push return address. 902 __ push(ecx); // Push return address.
905 903
906 // Patch the caller to an appropriate specialized stub and return the 904 // Patch the caller to an appropriate specialized stub and return the
907 // operation result to the caller of the stub. 905 // operation result to the caller of the stub.
908 __ TailCallExternalReference( 906 __ TailCallExternalReference(
909 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), 907 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
910 masm->isolate()), 908 masm->isolate()),
911 5, 909 5,
912 1); 910 1);
913 } 911 }
914 912
915 913
916 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { 914 void BinaryOpStub::Generate(MacroAssembler* masm) {
917 switch (operands_type_) { 915 switch (operands_type_) {
918 case TRBinaryOpIC::UNINITIALIZED: 916 case BinaryOpIC::UNINITIALIZED:
919 GenerateTypeTransition(masm); 917 GenerateTypeTransition(masm);
920 break; 918 break;
921 case TRBinaryOpIC::SMI: 919 case BinaryOpIC::SMI:
922 GenerateSmiStub(masm); 920 GenerateSmiStub(masm);
923 break; 921 break;
924 case TRBinaryOpIC::INT32: 922 case BinaryOpIC::INT32:
925 GenerateInt32Stub(masm); 923 GenerateInt32Stub(masm);
926 break; 924 break;
927 case TRBinaryOpIC::HEAP_NUMBER: 925 case BinaryOpIC::HEAP_NUMBER:
928 GenerateHeapNumberStub(masm); 926 GenerateHeapNumberStub(masm);
929 break; 927 break;
930 case TRBinaryOpIC::ODDBALL: 928 case BinaryOpIC::ODDBALL:
931 GenerateOddballStub(masm); 929 GenerateOddballStub(masm);
932 break; 930 break;
933 case TRBinaryOpIC::BOTH_STRING: 931 case BinaryOpIC::BOTH_STRING:
934 GenerateBothStringStub(masm); 932 GenerateBothStringStub(masm);
935 break; 933 break;
936 case TRBinaryOpIC::STRING: 934 case BinaryOpIC::STRING:
937 GenerateStringStub(masm); 935 GenerateStringStub(masm);
938 break; 936 break;
939 case TRBinaryOpIC::GENERIC: 937 case BinaryOpIC::GENERIC:
940 GenerateGeneric(masm); 938 GenerateGeneric(masm);
941 break; 939 break;
942 default: 940 default:
943 UNREACHABLE(); 941 UNREACHABLE();
944 } 942 }
945 } 943 }
946 944
947 945
948 const char* TypeRecordingBinaryOpStub::GetName() { 946 const char* BinaryOpStub::GetName() {
949 if (name_ != NULL) return name_; 947 if (name_ != NULL) return name_;
950 const int kMaxNameLength = 100; 948 const int kMaxNameLength = 100;
951 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 949 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
952 kMaxNameLength); 950 kMaxNameLength);
953 if (name_ == NULL) return "OOM"; 951 if (name_ == NULL) return "OOM";
954 const char* op_name = Token::Name(op_); 952 const char* op_name = Token::Name(op_);
955 const char* overwrite_name; 953 const char* overwrite_name;
956 switch (mode_) { 954 switch (mode_) {
957 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 955 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
958 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 956 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
959 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 957 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
960 default: overwrite_name = "UnknownOverwrite"; break; 958 default: overwrite_name = "UnknownOverwrite"; break;
961 } 959 }
962 960
963 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 961 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
964 "TypeRecordingBinaryOpStub_%s_%s_%s", 962 "BinaryOpStub_%s_%s_%s",
965 op_name, 963 op_name,
966 overwrite_name, 964 overwrite_name,
967 TRBinaryOpIC::GetName(operands_type_)); 965 BinaryOpIC::GetName(operands_type_));
968 return name_; 966 return name_;
969 } 967 }
970 968
971 969
972 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, 970 void BinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 MacroAssembler* masm, on a separate line
fschneider 2011/05/24 12:16:41 Done.
973 Label* slow, 971 Label* slow,
974 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { 972 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
975 // 1. Move arguments into edx, eax except for DIV and MOD, which need the 973 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
976 // dividend in eax and edx free for the division. Use eax, ebx for those. 974 // dividend in eax and edx free for the division. Use eax, ebx for those.
977 Comment load_comment(masm, "-- Load arguments"); 975 Comment load_comment(masm, "-- Load arguments");
978 Register left = edx; 976 Register left = edx;
979 Register right = eax; 977 Register right = eax;
980 if (op_ == Token::DIV || op_ == Token::MOD) { 978 if (op_ == Token::DIV || op_ == Token::MOD) {
981 left = eax; 979 left = eax;
982 right = ebx; 980 right = ebx;
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after
1337 __ mov(edx, eax); 1335 __ mov(edx, eax);
1338 __ mov(eax, ebx); 1336 __ mov(eax, ebx);
1339 break; 1337 break;
1340 1338
1341 default: 1339 default:
1342 break; 1340 break;
1343 } 1341 }
1344 } 1342 }
1345 1343
1346 1344
1347 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 1345 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1348 Label call_runtime; 1346 Label call_runtime;
1349 1347
1350 switch (op_) { 1348 switch (op_) {
1351 case Token::ADD: 1349 case Token::ADD:
1352 case Token::SUB: 1350 case Token::SUB:
1353 case Token::MUL: 1351 case Token::MUL:
1354 case Token::DIV: 1352 case Token::DIV:
1355 break; 1353 break;
1356 case Token::MOD: 1354 case Token::MOD:
1357 case Token::BIT_OR: 1355 case Token::BIT_OR:
1358 case Token::BIT_AND: 1356 case Token::BIT_AND:
1359 case Token::BIT_XOR: 1357 case Token::BIT_XOR:
1360 case Token::SAR: 1358 case Token::SAR:
1361 case Token::SHL: 1359 case Token::SHL:
1362 case Token::SHR: 1360 case Token::SHR:
1363 GenerateRegisterArgsPush(masm); 1361 GenerateRegisterArgsPush(masm);
1364 break; 1362 break;
1365 default: 1363 default:
1366 UNREACHABLE(); 1364 UNREACHABLE();
1367 } 1365 }
1368 1366
1369 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || 1367 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1370 result_type_ == TRBinaryOpIC::SMI) { 1368 result_type_ == BinaryOpIC::SMI) {
1371 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS); 1369 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1372 } else { 1370 } else {
1373 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 1371 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1374 } 1372 }
1375 __ bind(&call_runtime); 1373 __ bind(&call_runtime);
1376 switch (op_) { 1374 switch (op_) {
1377 case Token::ADD: 1375 case Token::ADD:
1378 case Token::SUB: 1376 case Token::SUB:
1379 case Token::MUL: 1377 case Token::MUL:
1380 case Token::DIV: 1378 case Token::DIV:
1381 GenerateTypeTransition(masm); 1379 GenerateTypeTransition(masm);
1382 break; 1380 break;
1383 case Token::MOD: 1381 case Token::MOD:
1384 case Token::BIT_OR: 1382 case Token::BIT_OR:
1385 case Token::BIT_AND: 1383 case Token::BIT_AND:
1386 case Token::BIT_XOR: 1384 case Token::BIT_XOR:
1387 case Token::SAR: 1385 case Token::SAR:
1388 case Token::SHL: 1386 case Token::SHL:
1389 case Token::SHR: 1387 case Token::SHR:
1390 GenerateTypeTransitionWithSavedArgs(masm); 1388 GenerateTypeTransitionWithSavedArgs(masm);
1391 break; 1389 break;
1392 default: 1390 default:
1393 UNREACHABLE(); 1391 UNREACHABLE();
1394 } 1392 }
1395 } 1393 }
1396 1394
1397 1395
1398 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { 1396 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1399 ASSERT(operands_type_ == TRBinaryOpIC::STRING); 1397 ASSERT(operands_type_ == BinaryOpIC::STRING);
1400 ASSERT(op_ == Token::ADD); 1398 ASSERT(op_ == Token::ADD);
1401 // Try to add arguments as strings, otherwise, transition to the generic 1399 // Try to add arguments as strings, otherwise, transition to the generic
1402 // TRBinaryOpIC type. 1400 // BinaryOpIC type.
1403 GenerateAddStrings(masm); 1401 GenerateAddStrings(masm);
1404 GenerateTypeTransition(masm); 1402 GenerateTypeTransition(masm);
1405 } 1403 }
1406 1404
1407 1405
1408 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { 1406 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1409 Label call_runtime; 1407 Label call_runtime;
1410 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); 1408 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1411 ASSERT(op_ == Token::ADD); 1409 ASSERT(op_ == Token::ADD);
1412 // If both arguments are strings, call the string add stub. 1410 // If both arguments are strings, call the string add stub.
1413 // Otherwise, do a transition. 1411 // Otherwise, do a transition.
1414 1412
1415 // Registers containing left and right operands respectively. 1413 // Registers containing left and right operands respectively.
1416 Register left = edx; 1414 Register left = edx;
1417 Register right = eax; 1415 Register right = eax;
1418 1416
1419 // Test if left operand is a string. 1417 // Test if left operand is a string.
1420 __ test(left, Immediate(kSmiTagMask)); 1418 __ test(left, Immediate(kSmiTagMask));
1421 __ j(zero, &call_runtime); 1419 __ j(zero, &call_runtime);
1422 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 1420 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1423 __ j(above_equal, &call_runtime); 1421 __ j(above_equal, &call_runtime);
1424 1422
1425 // Test if right operand is a string. 1423 // Test if right operand is a string.
1426 __ test(right, Immediate(kSmiTagMask)); 1424 __ test(right, Immediate(kSmiTagMask));
1427 __ j(zero, &call_runtime); 1425 __ j(zero, &call_runtime);
1428 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 1426 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1429 __ j(above_equal, &call_runtime); 1427 __ j(above_equal, &call_runtime);
1430 1428
1431 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); 1429 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1432 GenerateRegisterArgsPush(masm); 1430 GenerateRegisterArgsPush(masm);
1433 __ TailCallStub(&string_add_stub); 1431 __ TailCallStub(&string_add_stub);
1434 1432
1435 __ bind(&call_runtime); 1433 __ bind(&call_runtime);
1436 GenerateTypeTransition(masm); 1434 GenerateTypeTransition(masm);
1437 } 1435 }
1438 1436
1439 1437
1440 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { 1438 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1441 Label call_runtime; 1439 Label call_runtime;
1442 ASSERT(operands_type_ == TRBinaryOpIC::INT32); 1440 ASSERT(operands_type_ == BinaryOpIC::INT32);
1443 1441
1444 // Floating point case. 1442 // Floating point case.
1445 switch (op_) { 1443 switch (op_) {
1446 case Token::ADD: 1444 case Token::ADD:
1447 case Token::SUB: 1445 case Token::SUB:
1448 case Token::MUL: 1446 case Token::MUL:
1449 case Token::DIV: { 1447 case Token::DIV: {
1450 Label not_floats; 1448 Label not_floats;
1451 Label not_int32; 1449 Label not_int32;
1452 if (CpuFeatures::IsSupported(SSE2)) { 1450 if (CpuFeatures::IsSupported(SSE2)) {
1453 CpuFeatures::Scope use_sse2(SSE2); 1451 CpuFeatures::Scope use_sse2(SSE2);
1454 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats); 1452 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1455 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx); 1453 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1456 switch (op_) { 1454 switch (op_) {
1457 case Token::ADD: __ addsd(xmm0, xmm1); break; 1455 case Token::ADD: __ addsd(xmm0, xmm1); break;
1458 case Token::SUB: __ subsd(xmm0, xmm1); break; 1456 case Token::SUB: __ subsd(xmm0, xmm1); break;
1459 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1457 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1460 case Token::DIV: __ divsd(xmm0, xmm1); break; 1458 case Token::DIV: __ divsd(xmm0, xmm1); break;
1461 default: UNREACHABLE(); 1459 default: UNREACHABLE();
1462 } 1460 }
1463 // Check result type if it is currently Int32. 1461 // Check result type if it is currently Int32.
1464 if (result_type_ <= TRBinaryOpIC::INT32) { 1462 if (result_type_ <= BinaryOpIC::INT32) {
1465 __ cvttsd2si(ecx, Operand(xmm0)); 1463 __ cvttsd2si(ecx, Operand(xmm0));
1466 __ cvtsi2sd(xmm2, Operand(ecx)); 1464 __ cvtsi2sd(xmm2, Operand(ecx));
1467 __ ucomisd(xmm0, xmm2); 1465 __ ucomisd(xmm0, xmm2);
1468 __ j(not_zero, &not_int32); 1466 __ j(not_zero, &not_int32);
1469 __ j(carry, &not_int32); 1467 __ j(carry, &not_int32);
1470 } 1468 }
1471 GenerateHeapResultAllocation(masm, &call_runtime); 1469 GenerateHeapResultAllocation(masm, &call_runtime);
1472 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1470 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1473 __ ret(0); 1471 __ ret(0);
1474 } else { // SSE2 not available, use FPU. 1472 } else { // SSE2 not available, use FPU.
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
1632 break; 1630 break;
1633 case Token::SHR: 1631 case Token::SHR:
1634 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 1632 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1635 break; 1633 break;
1636 default: 1634 default:
1637 UNREACHABLE(); 1635 UNREACHABLE();
1638 } 1636 }
1639 } 1637 }
1640 1638
1641 1639
1642 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 1640 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1643 if (op_ == Token::ADD) { 1641 if (op_ == Token::ADD) {
1644 // Handle string addition here, because it is the only operation 1642 // Handle string addition here, because it is the only operation
1645 // that does not do a ToNumber conversion on the operands. 1643 // that does not do a ToNumber conversion on the operands.
1646 GenerateAddStrings(masm); 1644 GenerateAddStrings(masm);
1647 } 1645 }
1648 1646
1649 Factory* factory = masm->isolate()->factory(); 1647 Factory* factory = masm->isolate()->factory();
1650 1648
1651 // Convert odd ball arguments to numbers. 1649 // Convert odd ball arguments to numbers.
1652 Label check, done; 1650 Label check, done;
(...skipping 12 matching lines...) Expand all
1665 __ xor_(eax, Operand(eax)); 1663 __ xor_(eax, Operand(eax));
1666 } else { 1664 } else {
1667 __ mov(eax, Immediate(factory->nan_value())); 1665 __ mov(eax, Immediate(factory->nan_value()));
1668 } 1666 }
1669 __ bind(&done); 1667 __ bind(&done);
1670 1668
1671 GenerateHeapNumberStub(masm); 1669 GenerateHeapNumberStub(masm);
1672 } 1670 }
1673 1671
1674 1672
1675 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 1673 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1676 Label call_runtime; 1674 Label call_runtime;
1677 1675
1678 // Floating point case. 1676 // Floating point case.
1679 switch (op_) { 1677 switch (op_) {
1680 case Token::ADD: 1678 case Token::ADD:
1681 case Token::SUB: 1679 case Token::SUB:
1682 case Token::MUL: 1680 case Token::MUL:
1683 case Token::DIV: { 1681 case Token::DIV: {
1684 Label not_floats; 1682 Label not_floats;
1685 if (CpuFeatures::IsSupported(SSE2)) { 1683 if (CpuFeatures::IsSupported(SSE2)) {
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
1846 break; 1844 break;
1847 case Token::SHR: 1845 case Token::SHR:
1848 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 1846 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1849 break; 1847 break;
1850 default: 1848 default:
1851 UNREACHABLE(); 1849 UNREACHABLE();
1852 } 1850 }
1853 } 1851 }
1854 1852
1855 1853
1856 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { 1854 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1857 Label call_runtime; 1855 Label call_runtime;
1858 1856
1859 Counters* counters = masm->isolate()->counters(); 1857 Counters* counters = masm->isolate()->counters();
1860 __ IncrementCounter(counters->generic_binary_stub_calls(), 1); 1858 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
1861 1859
1862 switch (op_) { 1860 switch (op_) {
1863 case Token::ADD: 1861 case Token::ADD:
1864 case Token::SUB: 1862 case Token::SUB:
1865 case Token::MUL: 1863 case Token::MUL:
1866 case Token::DIV: 1864 case Token::DIV:
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
2043 break; 2041 break;
2044 case Token::SHR: 2042 case Token::SHR:
2045 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 2043 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2046 break; 2044 break;
2047 default: 2045 default:
2048 UNREACHABLE(); 2046 UNREACHABLE();
2049 } 2047 }
2050 } 2048 }
2051 2049
2052 2050
2053 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { 2051 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2054 ASSERT(op_ == Token::ADD); 2052 ASSERT(op_ == Token::ADD);
2055 Label left_not_string, call_runtime; 2053 Label left_not_string, call_runtime;
2056 2054
2057 // Registers containing left and right operands respectively. 2055 // Registers containing left and right operands respectively.
2058 Register left = edx; 2056 Register left = edx;
2059 Register right = eax; 2057 Register right = eax;
2060 2058
2061 // Test if left operand is a string. 2059 // Test if left operand is a string.
2062 __ test(left, Immediate(kSmiTagMask)); 2060 __ test(left, Immediate(kSmiTagMask));
2063 __ j(zero, &left_not_string, Label::kNear); 2061 __ j(zero, &left_not_string, Label::kNear);
(...skipping 13 matching lines...) Expand all
2077 2075
2078 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); 2076 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2079 GenerateRegisterArgsPush(masm); 2077 GenerateRegisterArgsPush(masm);
2080 __ TailCallStub(&string_add_right_stub); 2078 __ TailCallStub(&string_add_right_stub);
2081 2079
2082 // Neither argument is a string. 2080 // Neither argument is a string.
2083 __ bind(&call_runtime); 2081 __ bind(&call_runtime);
2084 } 2082 }
2085 2083
2086 2084
2087 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( 2085 void BinaryOpStub::GenerateHeapResultAllocation(
2088 MacroAssembler* masm, 2086 MacroAssembler* masm,
2089 Label* alloc_failure) { 2087 Label* alloc_failure) {
2090 Label skip_allocation; 2088 Label skip_allocation;
2091 OverwriteMode mode = mode_; 2089 OverwriteMode mode = mode_;
2092 switch (mode) { 2090 switch (mode) {
2093 case OVERWRITE_LEFT: { 2091 case OVERWRITE_LEFT: {
2094 // If the argument in edx is already an object, we skip the 2092 // If the argument in edx is already an object, we skip the
2095 // allocation of a heap number. 2093 // allocation of a heap number.
2096 __ test(edx, Immediate(kSmiTagMask)); 2094 __ test(edx, Immediate(kSmiTagMask));
2097 __ j(not_zero, &skip_allocation); 2095 __ j(not_zero, &skip_allocation);
(...skipping 21 matching lines...) Expand all
2119 // Now eax can be overwritten losing one of the arguments as we are 2117 // Now eax can be overwritten losing one of the arguments as we are
2120 // now done and will not need it any more. 2118 // now done and will not need it any more.
2121 __ mov(eax, ebx); 2119 __ mov(eax, ebx);
2122 __ bind(&skip_allocation); 2120 __ bind(&skip_allocation);
2123 break; 2121 break;
2124 default: UNREACHABLE(); 2122 default: UNREACHABLE();
2125 } 2123 }
2126 } 2124 }
2127 2125
2128 2126
2129 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { 2127 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
2130 __ pop(ecx); 2128 __ pop(ecx);
2131 __ push(edx); 2129 __ push(edx);
2132 __ push(eax); 2130 __ push(eax);
2133 __ push(ecx); 2131 __ push(ecx);
2134 } 2132 }
2135 2133
2136 2134
2137 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 2135 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
2138 // TAGGED case: 2136 // TAGGED case:
2139 // Input: 2137 // Input:
(...skipping 4051 matching lines...) Expand 10 before | Expand all | Expand 10 after
6191 __ Drop(1); 6189 __ Drop(1);
6192 __ ret(2 * kPointerSize); 6190 __ ret(2 * kPointerSize);
6193 } 6191 }
6194 6192
6195 6193
6196 #undef __ 6194 #undef __
6197 6195
6198 } } // namespace v8::internal 6196 } } // namespace v8::internal
6199 6197
6200 #endif // V8_TARGET_ARCH_IA32 6198 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698