Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 7063017: Rename TypeRecording...Stub into ...Stub. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after
510 __ j(greater, &negative, Label::kNear); 510 __ j(greater, &negative, Label::kNear);
511 __ mov(ecx, scratch2); 511 __ mov(ecx, scratch2);
512 __ jmp(&done, Label::kNear); 512 __ jmp(&done, Label::kNear);
513 __ bind(&negative); 513 __ bind(&negative);
514 __ sub(ecx, Operand(scratch2)); 514 __ sub(ecx, Operand(scratch2));
515 __ bind(&done); 515 __ bind(&done);
516 } 516 }
517 } 517 }
518 518
519 519
520 Handle<Code> GetTypeRecordingUnaryOpStub(int key, 520 Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
521 TRUnaryOpIC::TypeInfo type_info) { 521 UnaryOpStub stub(key, type_info);
522 TypeRecordingUnaryOpStub stub(key, type_info);
523 return stub.GetCode(); 522 return stub.GetCode();
524 } 523 }
525 524
526 525
527 const char* TypeRecordingUnaryOpStub::GetName() { 526 const char* UnaryOpStub::GetName() {
528 if (name_ != NULL) return name_; 527 if (name_ != NULL) return name_;
529 const int kMaxNameLength = 100; 528 const int kMaxNameLength = 100;
530 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 529 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
531 kMaxNameLength); 530 kMaxNameLength);
532 if (name_ == NULL) return "OOM"; 531 if (name_ == NULL) return "OOM";
533 const char* op_name = Token::Name(op_); 532 const char* op_name = Token::Name(op_);
534 const char* overwrite_name = NULL; // Make g++ happy. 533 const char* overwrite_name = NULL; // Make g++ happy.
535 switch (mode_) { 534 switch (mode_) {
536 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 535 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
537 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 536 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
538 } 537 }
539 538
540 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 539 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
541 "TypeRecordingUnaryOpStub_%s_%s_%s", 540 "UnaryOpStub_%s_%s_%s",
542 op_name, 541 op_name,
543 overwrite_name, 542 overwrite_name,
544 TRUnaryOpIC::GetName(operand_type_)); 543 UnaryOpIC::GetName(operand_type_));
545 return name_; 544 return name_;
546 } 545 }
547 546
548 547
549 // TODO(svenpanne): Use virtual functions instead of switch. 548 // TODO(svenpanne): Use virtual functions instead of switch.
550 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { 549 void UnaryOpStub::Generate(MacroAssembler* masm) {
551 switch (operand_type_) { 550 switch (operand_type_) {
552 case TRUnaryOpIC::UNINITIALIZED: 551 case UnaryOpIC::UNINITIALIZED:
553 GenerateTypeTransition(masm); 552 GenerateTypeTransition(masm);
554 break; 553 break;
555 case TRUnaryOpIC::SMI: 554 case UnaryOpIC::SMI:
556 GenerateSmiStub(masm); 555 GenerateSmiStub(masm);
557 break; 556 break;
558 case TRUnaryOpIC::HEAP_NUMBER: 557 case UnaryOpIC::HEAP_NUMBER:
559 GenerateHeapNumberStub(masm); 558 GenerateHeapNumberStub(masm);
560 break; 559 break;
561 case TRUnaryOpIC::GENERIC: 560 case UnaryOpIC::GENERIC:
562 GenerateGenericStub(masm); 561 GenerateGenericStub(masm);
563 break; 562 break;
564 } 563 }
565 } 564 }
566 565
567 566
568 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 567 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
569 __ pop(ecx); // Save return address. 568 __ pop(ecx); // Save return address.
570 __ push(eax); 569 __ push(eax);
571 // the argument is now on top. 570 // the argument is now on top.
572 // Push this stub's key. Although the operation and the type info are 571 // Push this stub's key. Although the operation and the type info are
573 // encoded into the key, the encoding is opaque, so push them too. 572 // encoded into the key, the encoding is opaque, so push them too.
574 __ push(Immediate(Smi::FromInt(MinorKey()))); 573 __ push(Immediate(Smi::FromInt(MinorKey())));
575 __ push(Immediate(Smi::FromInt(op_))); 574 __ push(Immediate(Smi::FromInt(op_)));
576 __ push(Immediate(Smi::FromInt(operand_type_))); 575 __ push(Immediate(Smi::FromInt(operand_type_)));
577 576
578 __ push(ecx); // Push return address. 577 __ push(ecx); // Push return address.
579 578
580 // Patch the caller to an appropriate specialized stub and return the 579 // Patch the caller to an appropriate specialized stub and return the
581 // operation result to the caller of the stub. 580 // operation result to the caller of the stub.
582 __ TailCallExternalReference( 581 __ TailCallExternalReference(
583 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), 582 ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
584 masm->isolate()), 583 masm->isolate()), 4, 1);
585 4,
586 1);
587 } 584 }
588 585
589 586
590 // TODO(svenpanne): Use virtual functions instead of switch. 587 // TODO(svenpanne): Use virtual functions instead of switch.
591 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 588 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
592 switch (op_) { 589 switch (op_) {
593 case Token::SUB: 590 case Token::SUB:
594 GenerateSmiStubSub(masm); 591 GenerateSmiStubSub(masm);
595 break; 592 break;
596 case Token::BIT_NOT: 593 case Token::BIT_NOT:
597 GenerateSmiStubBitNot(masm); 594 GenerateSmiStubBitNot(masm);
598 break; 595 break;
599 default: 596 default:
600 UNREACHABLE(); 597 UNREACHABLE();
601 } 598 }
602 } 599 }
603 600
604 601
605 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { 602 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
606 Label non_smi, undo, slow; 603 Label non_smi, undo, slow;
607 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, 604 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
608 Label::kNear, Label::kNear, Label::kNear); 605 Label::kNear, Label::kNear, Label::kNear);
609 __ bind(&undo); 606 __ bind(&undo);
610 GenerateSmiCodeUndo(masm); 607 GenerateSmiCodeUndo(masm);
611 __ bind(&non_smi); 608 __ bind(&non_smi);
612 __ bind(&slow); 609 __ bind(&slow);
613 GenerateTypeTransition(masm); 610 GenerateTypeTransition(masm);
614 } 611 }
615 612
616 613
617 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { 614 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
618 Label non_smi; 615 Label non_smi;
619 GenerateSmiCodeBitNot(masm, &non_smi); 616 GenerateSmiCodeBitNot(masm, &non_smi);
620 __ bind(&non_smi); 617 __ bind(&non_smi);
621 GenerateTypeTransition(masm); 618 GenerateTypeTransition(masm);
622 } 619 }
623 620
624 621
625 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, 622 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
626 Label* non_smi, 623 Label* non_smi,
627 Label* undo, 624 Label* undo,
628 Label* slow, 625 Label* slow,
629 Label::Distance non_smi_near, 626 Label::Distance non_smi_near,
630 Label::Distance undo_near, 627 Label::Distance undo_near,
631 Label::Distance slow_near) { 628 Label::Distance slow_near) {
632 // Check whether the value is a smi. 629 // Check whether the value is a smi.
633 __ test(eax, Immediate(kSmiTagMask)); 630 __ test(eax, Immediate(kSmiTagMask));
634 __ j(not_zero, non_smi, non_smi_near); 631 __ j(not_zero, non_smi, non_smi_near);
635 632
636 // We can't handle -0 with smis, so use a type transition for that case. 633 // We can't handle -0 with smis, so use a type transition for that case.
637 __ test(eax, Operand(eax)); 634 __ test(eax, Operand(eax));
638 __ j(zero, slow, slow_near); 635 __ j(zero, slow, slow_near);
639 636
640 // Try optimistic subtraction '0 - value', saving operand in eax for undo. 637 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
641 __ mov(edx, Operand(eax)); 638 __ mov(edx, Operand(eax));
642 __ Set(eax, Immediate(0)); 639 __ Set(eax, Immediate(0));
643 __ sub(eax, Operand(edx)); 640 __ sub(eax, Operand(edx));
644 __ j(overflow, undo, undo_near); 641 __ j(overflow, undo, undo_near);
645 __ ret(0); 642 __ ret(0);
646 } 643 }
647 644
648 645
649 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( 646 void UnaryOpStub::GenerateSmiCodeBitNot(
650 MacroAssembler* masm, 647 MacroAssembler* masm,
651 Label* non_smi, 648 Label* non_smi,
652 Label::Distance non_smi_near) { 649 Label::Distance non_smi_near) {
653 // Check whether the value is a smi. 650 // Check whether the value is a smi.
654 __ test(eax, Immediate(kSmiTagMask)); 651 __ test(eax, Immediate(kSmiTagMask));
655 __ j(not_zero, non_smi, non_smi_near); 652 __ j(not_zero, non_smi, non_smi_near);
656 653
657 // Flip bits and revert inverted smi-tag. 654 // Flip bits and revert inverted smi-tag.
658 __ not_(eax); 655 __ not_(eax);
659 __ and_(eax, ~kSmiTagMask); 656 __ and_(eax, ~kSmiTagMask);
660 __ ret(0); 657 __ ret(0);
661 } 658 }
662 659
663 660
664 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { 661 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
665 __ mov(eax, Operand(edx)); 662 __ mov(eax, Operand(edx));
666 } 663 }
667 664
668 665
669 // TODO(svenpanne): Use virtual functions instead of switch. 666 // TODO(svenpanne): Use virtual functions instead of switch.
670 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 667 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
671 switch (op_) { 668 switch (op_) {
672 case Token::SUB: 669 case Token::SUB:
673 GenerateHeapNumberStubSub(masm); 670 GenerateHeapNumberStubSub(masm);
674 break; 671 break;
675 case Token::BIT_NOT: 672 case Token::BIT_NOT:
676 GenerateHeapNumberStubBitNot(masm); 673 GenerateHeapNumberStubBitNot(masm);
677 break; 674 break;
678 default: 675 default:
679 UNREACHABLE(); 676 UNREACHABLE();
680 } 677 }
681 } 678 }
682 679
683 680
684 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { 681 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
685 Label non_smi, undo, slow, call_builtin; 682 Label non_smi, undo, slow, call_builtin;
686 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear); 683 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
687 __ bind(&non_smi); 684 __ bind(&non_smi);
688 GenerateHeapNumberCodeSub(masm, &slow); 685 GenerateHeapNumberCodeSub(masm, &slow);
689 __ bind(&undo); 686 __ bind(&undo);
690 GenerateSmiCodeUndo(masm); 687 GenerateSmiCodeUndo(masm);
691 __ bind(&slow); 688 __ bind(&slow);
692 GenerateTypeTransition(masm); 689 GenerateTypeTransition(masm);
693 __ bind(&call_builtin); 690 __ bind(&call_builtin);
694 GenerateGenericCodeFallback(masm); 691 GenerateGenericCodeFallback(masm);
695 } 692 }
696 693
697 694
698 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( 695 void UnaryOpStub::GenerateHeapNumberStubBitNot(
699 MacroAssembler* masm) { 696 MacroAssembler* masm) {
700 Label non_smi, slow; 697 Label non_smi, slow;
701 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); 698 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
702 __ bind(&non_smi); 699 __ bind(&non_smi);
703 GenerateHeapNumberCodeBitNot(masm, &slow); 700 GenerateHeapNumberCodeBitNot(masm, &slow);
704 __ bind(&slow); 701 __ bind(&slow);
705 GenerateTypeTransition(masm); 702 GenerateTypeTransition(masm);
706 } 703 }
707 704
708 705
709 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, 706 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
710 Label* slow) { 707 Label* slow) {
711 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
712 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); 709 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
713 __ j(not_equal, slow); 710 __ j(not_equal, slow);
714 711
715 if (mode_ == UNARY_OVERWRITE) { 712 if (mode_ == UNARY_OVERWRITE) {
716 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), 713 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
717 Immediate(HeapNumber::kSignMask)); // Flip sign. 714 Immediate(HeapNumber::kSignMask)); // Flip sign.
718 } else { 715 } else {
719 __ mov(edx, Operand(eax)); 716 __ mov(edx, Operand(eax));
720 // edx: operand 717 // edx: operand
(...skipping 14 matching lines...) Expand all
735 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); 732 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
736 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. 733 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
737 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); 734 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
738 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); 735 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
739 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); 736 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
740 } 737 }
741 __ ret(0); 738 __ ret(0);
742 } 739 }
743 740
744 741
745 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( 742 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
746 MacroAssembler* masm, 743 Label* slow) {
747 Label* slow) {
748 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 744 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
749 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); 745 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
750 __ j(not_equal, slow); 746 __ j(not_equal, slow);
751 747
752 // Convert the heap number in eax to an untagged integer in ecx. 748 // Convert the heap number in eax to an untagged integer in ecx.
753 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow); 749 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
754 750
755 // Do the bitwise operation and check if the result fits in a smi. 751 // Do the bitwise operation and check if the result fits in a smi.
756 Label try_float; 752 Label try_float;
757 __ not_(ecx); 753 __ not_(ecx);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
796 __ push(ecx); 792 __ push(ecx);
797 __ fild_s(Operand(esp, 0)); 793 __ fild_s(Operand(esp, 0));
798 __ pop(ecx); 794 __ pop(ecx);
799 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 795 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
800 } 796 }
801 __ ret(0); 797 __ ret(0);
802 } 798 }
803 799
804 800
805 // TODO(svenpanne): Use virtual functions instead of switch. 801 // TODO(svenpanne): Use virtual functions instead of switch.
806 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { 802 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
807 switch (op_) { 803 switch (op_) {
808 case Token::SUB: 804 case Token::SUB:
809 GenerateGenericStubSub(masm); 805 GenerateGenericStubSub(masm);
810 break; 806 break;
811 case Token::BIT_NOT: 807 case Token::BIT_NOT:
812 GenerateGenericStubBitNot(masm); 808 GenerateGenericStubBitNot(masm);
813 break; 809 break;
814 default: 810 default:
815 UNREACHABLE(); 811 UNREACHABLE();
816 } 812 }
817 } 813 }
818 814
819 815
820 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { 816 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
821 Label non_smi, undo, slow; 817 Label non_smi, undo, slow;
822 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear); 818 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
823 __ bind(&non_smi); 819 __ bind(&non_smi);
824 GenerateHeapNumberCodeSub(masm, &slow); 820 GenerateHeapNumberCodeSub(masm, &slow);
825 __ bind(&undo); 821 __ bind(&undo);
826 GenerateSmiCodeUndo(masm); 822 GenerateSmiCodeUndo(masm);
827 __ bind(&slow); 823 __ bind(&slow);
828 GenerateGenericCodeFallback(masm); 824 GenerateGenericCodeFallback(masm);
829 } 825 }
830 826
831 827
832 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { 828 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
833 Label non_smi, slow; 829 Label non_smi, slow;
834 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); 830 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
835 __ bind(&non_smi); 831 __ bind(&non_smi);
836 GenerateHeapNumberCodeBitNot(masm, &slow); 832 GenerateHeapNumberCodeBitNot(masm, &slow);
837 __ bind(&slow); 833 __ bind(&slow);
838 GenerateGenericCodeFallback(masm); 834 GenerateGenericCodeFallback(masm);
839 } 835 }
840 836
841 837
842 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( 838 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
843 MacroAssembler* masm) {
844 // Handle the slow case by jumping to the corresponding JavaScript builtin. 839 // Handle the slow case by jumping to the corresponding JavaScript builtin.
845 __ pop(ecx); // pop return address. 840 __ pop(ecx); // pop return address.
846 __ push(eax); 841 __ push(eax);
847 __ push(ecx); // push return address 842 __ push(ecx); // push return address
848 switch (op_) { 843 switch (op_) {
849 case Token::SUB: 844 case Token::SUB:
850 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); 845 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
851 break; 846 break;
852 case Token::BIT_NOT: 847 case Token::BIT_NOT:
853 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); 848 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
854 break; 849 break;
855 default: 850 default:
856 UNREACHABLE(); 851 UNREACHABLE();
857 } 852 }
858 } 853 }
859 854
860 855
861 Handle<Code> GetTypeRecordingBinaryOpStub(int key, 856 Handle<Code> GetBinaryOpStub(int key,
862 TRBinaryOpIC::TypeInfo type_info, 857 BinaryOpIC::TypeInfo type_info,
863 TRBinaryOpIC::TypeInfo result_type_info) { 858 BinaryOpIC::TypeInfo result_type_info) {
864 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); 859 BinaryOpStub stub(key, type_info, result_type_info);
865 return stub.GetCode(); 860 return stub.GetCode();
866 } 861 }
867 862
868 863
869 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 864 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
870 __ pop(ecx); // Save return address. 865 __ pop(ecx); // Save return address.
871 __ push(edx); 866 __ push(edx);
872 __ push(eax); 867 __ push(eax);
873 // Left and right arguments are now on top. 868 // Left and right arguments are now on top.
874 // Push this stub's key. Although the operation and the type info are 869 // Push this stub's key. Although the operation and the type info are
875 // encoded into the key, the encoding is opaque, so push them too. 870 // encoded into the key, the encoding is opaque, so push them too.
876 __ push(Immediate(Smi::FromInt(MinorKey()))); 871 __ push(Immediate(Smi::FromInt(MinorKey())));
877 __ push(Immediate(Smi::FromInt(op_))); 872 __ push(Immediate(Smi::FromInt(op_)));
878 __ push(Immediate(Smi::FromInt(operands_type_))); 873 __ push(Immediate(Smi::FromInt(operands_type_)));
879 874
880 __ push(ecx); // Push return address. 875 __ push(ecx); // Push return address.
881 876
882 // Patch the caller to an appropriate specialized stub and return the 877 // Patch the caller to an appropriate specialized stub and return the
883 // operation result to the caller of the stub. 878 // operation result to the caller of the stub.
884 __ TailCallExternalReference( 879 __ TailCallExternalReference(
885 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), 880 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
886 masm->isolate()), 881 masm->isolate()),
887 5, 882 5,
888 1); 883 1);
889 } 884 }
890 885
891 886
892 // Prepare for a type transition runtime call when the args are already on 887 // Prepare for a type transition runtime call when the args are already on
893 // the stack, under the return address. 888 // the stack, under the return address.
894 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( 889 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm) {
895 MacroAssembler* masm) {
896 __ pop(ecx); // Save return address. 890 __ pop(ecx); // Save return address.
897 // Left and right arguments are already on top of the stack. 891 // Left and right arguments are already on top of the stack.
898 // Push this stub's key. Although the operation and the type info are 892 // Push this stub's key. Although the operation and the type info are
899 // encoded into the key, the encoding is opaque, so push them too. 893 // encoded into the key, the encoding is opaque, so push them too.
900 __ push(Immediate(Smi::FromInt(MinorKey()))); 894 __ push(Immediate(Smi::FromInt(MinorKey())));
901 __ push(Immediate(Smi::FromInt(op_))); 895 __ push(Immediate(Smi::FromInt(op_)));
902 __ push(Immediate(Smi::FromInt(operands_type_))); 896 __ push(Immediate(Smi::FromInt(operands_type_)));
903 897
904 __ push(ecx); // Push return address. 898 __ push(ecx); // Push return address.
905 899
906 // Patch the caller to an appropriate specialized stub and return the 900 // Patch the caller to an appropriate specialized stub and return the
907 // operation result to the caller of the stub. 901 // operation result to the caller of the stub.
908 __ TailCallExternalReference( 902 __ TailCallExternalReference(
909 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), 903 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
910 masm->isolate()), 904 masm->isolate()),
911 5, 905 5,
912 1); 906 1);
913 } 907 }
914 908
915 909
916 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { 910 void BinaryOpStub::Generate(MacroAssembler* masm) {
917 switch (operands_type_) { 911 switch (operands_type_) {
918 case TRBinaryOpIC::UNINITIALIZED: 912 case BinaryOpIC::UNINITIALIZED:
919 GenerateTypeTransition(masm); 913 GenerateTypeTransition(masm);
920 break; 914 break;
921 case TRBinaryOpIC::SMI: 915 case BinaryOpIC::SMI:
922 GenerateSmiStub(masm); 916 GenerateSmiStub(masm);
923 break; 917 break;
924 case TRBinaryOpIC::INT32: 918 case BinaryOpIC::INT32:
925 GenerateInt32Stub(masm); 919 GenerateInt32Stub(masm);
926 break; 920 break;
927 case TRBinaryOpIC::HEAP_NUMBER: 921 case BinaryOpIC::HEAP_NUMBER:
928 GenerateHeapNumberStub(masm); 922 GenerateHeapNumberStub(masm);
929 break; 923 break;
930 case TRBinaryOpIC::ODDBALL: 924 case BinaryOpIC::ODDBALL:
931 GenerateOddballStub(masm); 925 GenerateOddballStub(masm);
932 break; 926 break;
933 case TRBinaryOpIC::BOTH_STRING: 927 case BinaryOpIC::BOTH_STRING:
934 GenerateBothStringStub(masm); 928 GenerateBothStringStub(masm);
935 break; 929 break;
936 case TRBinaryOpIC::STRING: 930 case BinaryOpIC::STRING:
937 GenerateStringStub(masm); 931 GenerateStringStub(masm);
938 break; 932 break;
939 case TRBinaryOpIC::GENERIC: 933 case BinaryOpIC::GENERIC:
940 GenerateGeneric(masm); 934 GenerateGeneric(masm);
941 break; 935 break;
942 default: 936 default:
943 UNREACHABLE(); 937 UNREACHABLE();
944 } 938 }
945 } 939 }
946 940
947 941
948 const char* TypeRecordingBinaryOpStub::GetName() { 942 const char* BinaryOpStub::GetName() {
949 if (name_ != NULL) return name_; 943 if (name_ != NULL) return name_;
950 const int kMaxNameLength = 100; 944 const int kMaxNameLength = 100;
951 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 945 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
952 kMaxNameLength); 946 kMaxNameLength);
953 if (name_ == NULL) return "OOM"; 947 if (name_ == NULL) return "OOM";
954 const char* op_name = Token::Name(op_); 948 const char* op_name = Token::Name(op_);
955 const char* overwrite_name; 949 const char* overwrite_name;
956 switch (mode_) { 950 switch (mode_) {
957 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 951 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
958 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 952 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
959 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 953 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
960 default: overwrite_name = "UnknownOverwrite"; break; 954 default: overwrite_name = "UnknownOverwrite"; break;
961 } 955 }
962 956
963 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 957 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
964 "TypeRecordingBinaryOpStub_%s_%s_%s", 958 "BinaryOpStub_%s_%s_%s",
965 op_name, 959 op_name,
966 overwrite_name, 960 overwrite_name,
967 TRBinaryOpIC::GetName(operands_type_)); 961 BinaryOpIC::GetName(operands_type_));
968 return name_; 962 return name_;
969 } 963 }
970 964
971 965
972 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, 966 void BinaryOpStub::GenerateSmiCode(
967 MacroAssembler* masm,
973 Label* slow, 968 Label* slow,
974 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { 969 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
975 // 1. Move arguments into edx, eax except for DIV and MOD, which need the 970 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
976 // dividend in eax and edx free for the division. Use eax, ebx for those. 971 // dividend in eax and edx free for the division. Use eax, ebx for those.
977 Comment load_comment(masm, "-- Load arguments"); 972 Comment load_comment(masm, "-- Load arguments");
978 Register left = edx; 973 Register left = edx;
979 Register right = eax; 974 Register right = eax;
980 if (op_ == Token::DIV || op_ == Token::MOD) { 975 if (op_ == Token::DIV || op_ == Token::MOD) {
981 left = eax; 976 left = eax;
982 right = ebx; 977 right = ebx;
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after
1337 __ mov(edx, eax); 1332 __ mov(edx, eax);
1338 __ mov(eax, ebx); 1333 __ mov(eax, ebx);
1339 break; 1334 break;
1340 1335
1341 default: 1336 default:
1342 break; 1337 break;
1343 } 1338 }
1344 } 1339 }
1345 1340
1346 1341
1347 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 1342 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1348 Label call_runtime; 1343 Label call_runtime;
1349 1344
1350 switch (op_) { 1345 switch (op_) {
1351 case Token::ADD: 1346 case Token::ADD:
1352 case Token::SUB: 1347 case Token::SUB:
1353 case Token::MUL: 1348 case Token::MUL:
1354 case Token::DIV: 1349 case Token::DIV:
1355 break; 1350 break;
1356 case Token::MOD: 1351 case Token::MOD:
1357 case Token::BIT_OR: 1352 case Token::BIT_OR:
1358 case Token::BIT_AND: 1353 case Token::BIT_AND:
1359 case Token::BIT_XOR: 1354 case Token::BIT_XOR:
1360 case Token::SAR: 1355 case Token::SAR:
1361 case Token::SHL: 1356 case Token::SHL:
1362 case Token::SHR: 1357 case Token::SHR:
1363 GenerateRegisterArgsPush(masm); 1358 GenerateRegisterArgsPush(masm);
1364 break; 1359 break;
1365 default: 1360 default:
1366 UNREACHABLE(); 1361 UNREACHABLE();
1367 } 1362 }
1368 1363
1369 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || 1364 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1370 result_type_ == TRBinaryOpIC::SMI) { 1365 result_type_ == BinaryOpIC::SMI) {
1371 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS); 1366 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1372 } else { 1367 } else {
1373 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 1368 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1374 } 1369 }
1375 __ bind(&call_runtime); 1370 __ bind(&call_runtime);
1376 switch (op_) { 1371 switch (op_) {
1377 case Token::ADD: 1372 case Token::ADD:
1378 case Token::SUB: 1373 case Token::SUB:
1379 case Token::MUL: 1374 case Token::MUL:
1380 case Token::DIV: 1375 case Token::DIV:
1381 GenerateTypeTransition(masm); 1376 GenerateTypeTransition(masm);
1382 break; 1377 break;
1383 case Token::MOD: 1378 case Token::MOD:
1384 case Token::BIT_OR: 1379 case Token::BIT_OR:
1385 case Token::BIT_AND: 1380 case Token::BIT_AND:
1386 case Token::BIT_XOR: 1381 case Token::BIT_XOR:
1387 case Token::SAR: 1382 case Token::SAR:
1388 case Token::SHL: 1383 case Token::SHL:
1389 case Token::SHR: 1384 case Token::SHR:
1390 GenerateTypeTransitionWithSavedArgs(masm); 1385 GenerateTypeTransitionWithSavedArgs(masm);
1391 break; 1386 break;
1392 default: 1387 default:
1393 UNREACHABLE(); 1388 UNREACHABLE();
1394 } 1389 }
1395 } 1390 }
1396 1391
1397 1392
1398 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { 1393 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1399 ASSERT(operands_type_ == TRBinaryOpIC::STRING); 1394 ASSERT(operands_type_ == BinaryOpIC::STRING);
1400 ASSERT(op_ == Token::ADD); 1395 ASSERT(op_ == Token::ADD);
1401 // Try to add arguments as strings, otherwise, transition to the generic 1396 // Try to add arguments as strings, otherwise, transition to the generic
1402 // TRBinaryOpIC type. 1397 // BinaryOpIC type.
1403 GenerateAddStrings(masm); 1398 GenerateAddStrings(masm);
1404 GenerateTypeTransition(masm); 1399 GenerateTypeTransition(masm);
1405 } 1400 }
1406 1401
1407 1402
1408 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { 1403 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1409 Label call_runtime; 1404 Label call_runtime;
1410 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); 1405 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1411 ASSERT(op_ == Token::ADD); 1406 ASSERT(op_ == Token::ADD);
1412 // If both arguments are strings, call the string add stub. 1407 // If both arguments are strings, call the string add stub.
1413 // Otherwise, do a transition. 1408 // Otherwise, do a transition.
1414 1409
1415 // Registers containing left and right operands respectively. 1410 // Registers containing left and right operands respectively.
1416 Register left = edx; 1411 Register left = edx;
1417 Register right = eax; 1412 Register right = eax;
1418 1413
1419 // Test if left operand is a string. 1414 // Test if left operand is a string.
1420 __ test(left, Immediate(kSmiTagMask)); 1415 __ test(left, Immediate(kSmiTagMask));
1421 __ j(zero, &call_runtime); 1416 __ j(zero, &call_runtime);
1422 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 1417 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1423 __ j(above_equal, &call_runtime); 1418 __ j(above_equal, &call_runtime);
1424 1419
1425 // Test if right operand is a string. 1420 // Test if right operand is a string.
1426 __ test(right, Immediate(kSmiTagMask)); 1421 __ test(right, Immediate(kSmiTagMask));
1427 __ j(zero, &call_runtime); 1422 __ j(zero, &call_runtime);
1428 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 1423 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1429 __ j(above_equal, &call_runtime); 1424 __ j(above_equal, &call_runtime);
1430 1425
1431 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); 1426 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1432 GenerateRegisterArgsPush(masm); 1427 GenerateRegisterArgsPush(masm);
1433 __ TailCallStub(&string_add_stub); 1428 __ TailCallStub(&string_add_stub);
1434 1429
1435 __ bind(&call_runtime); 1430 __ bind(&call_runtime);
1436 GenerateTypeTransition(masm); 1431 GenerateTypeTransition(masm);
1437 } 1432 }
1438 1433
1439 1434
1440 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { 1435 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1441 Label call_runtime; 1436 Label call_runtime;
1442 ASSERT(operands_type_ == TRBinaryOpIC::INT32); 1437 ASSERT(operands_type_ == BinaryOpIC::INT32);
1443 1438
1444 // Floating point case. 1439 // Floating point case.
1445 switch (op_) { 1440 switch (op_) {
1446 case Token::ADD: 1441 case Token::ADD:
1447 case Token::SUB: 1442 case Token::SUB:
1448 case Token::MUL: 1443 case Token::MUL:
1449 case Token::DIV: { 1444 case Token::DIV: {
1450 Label not_floats; 1445 Label not_floats;
1451 Label not_int32; 1446 Label not_int32;
1452 if (CpuFeatures::IsSupported(SSE2)) { 1447 if (CpuFeatures::IsSupported(SSE2)) {
1453 CpuFeatures::Scope use_sse2(SSE2); 1448 CpuFeatures::Scope use_sse2(SSE2);
1454 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats); 1449 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1455 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx); 1450 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1456 switch (op_) { 1451 switch (op_) {
1457 case Token::ADD: __ addsd(xmm0, xmm1); break; 1452 case Token::ADD: __ addsd(xmm0, xmm1); break;
1458 case Token::SUB: __ subsd(xmm0, xmm1); break; 1453 case Token::SUB: __ subsd(xmm0, xmm1); break;
1459 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1454 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1460 case Token::DIV: __ divsd(xmm0, xmm1); break; 1455 case Token::DIV: __ divsd(xmm0, xmm1); break;
1461 default: UNREACHABLE(); 1456 default: UNREACHABLE();
1462 } 1457 }
1463 // Check result type if it is currently Int32. 1458 // Check result type if it is currently Int32.
1464 if (result_type_ <= TRBinaryOpIC::INT32) { 1459 if (result_type_ <= BinaryOpIC::INT32) {
1465 __ cvttsd2si(ecx, Operand(xmm0)); 1460 __ cvttsd2si(ecx, Operand(xmm0));
1466 __ cvtsi2sd(xmm2, Operand(ecx)); 1461 __ cvtsi2sd(xmm2, Operand(ecx));
1467 __ ucomisd(xmm0, xmm2); 1462 __ ucomisd(xmm0, xmm2);
1468 __ j(not_zero, &not_int32); 1463 __ j(not_zero, &not_int32);
1469 __ j(carry, &not_int32); 1464 __ j(carry, &not_int32);
1470 } 1465 }
1471 GenerateHeapResultAllocation(masm, &call_runtime); 1466 GenerateHeapResultAllocation(masm, &call_runtime);
1472 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1467 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1473 __ ret(0); 1468 __ ret(0);
1474 } else { // SSE2 not available, use FPU. 1469 } else { // SSE2 not available, use FPU.
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
1632 break; 1627 break;
1633 case Token::SHR: 1628 case Token::SHR:
1634 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 1629 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1635 break; 1630 break;
1636 default: 1631 default:
1637 UNREACHABLE(); 1632 UNREACHABLE();
1638 } 1633 }
1639 } 1634 }
1640 1635
1641 1636
1642 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 1637 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1643 if (op_ == Token::ADD) { 1638 if (op_ == Token::ADD) {
1644 // Handle string addition here, because it is the only operation 1639 // Handle string addition here, because it is the only operation
1645 // that does not do a ToNumber conversion on the operands. 1640 // that does not do a ToNumber conversion on the operands.
1646 GenerateAddStrings(masm); 1641 GenerateAddStrings(masm);
1647 } 1642 }
1648 1643
1649 Factory* factory = masm->isolate()->factory(); 1644 Factory* factory = masm->isolate()->factory();
1650 1645
1651 // Convert odd ball arguments to numbers. 1646 // Convert odd ball arguments to numbers.
1652 Label check, done; 1647 Label check, done;
(...skipping 12 matching lines...) Expand all
1665 __ xor_(eax, Operand(eax)); 1660 __ xor_(eax, Operand(eax));
1666 } else { 1661 } else {
1667 __ mov(eax, Immediate(factory->nan_value())); 1662 __ mov(eax, Immediate(factory->nan_value()));
1668 } 1663 }
1669 __ bind(&done); 1664 __ bind(&done);
1670 1665
1671 GenerateHeapNumberStub(masm); 1666 GenerateHeapNumberStub(masm);
1672 } 1667 }
1673 1668
1674 1669
1675 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 1670 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1676 Label call_runtime; 1671 Label call_runtime;
1677 1672
1678 // Floating point case. 1673 // Floating point case.
1679 switch (op_) { 1674 switch (op_) {
1680 case Token::ADD: 1675 case Token::ADD:
1681 case Token::SUB: 1676 case Token::SUB:
1682 case Token::MUL: 1677 case Token::MUL:
1683 case Token::DIV: { 1678 case Token::DIV: {
1684 Label not_floats; 1679 Label not_floats;
1685 if (CpuFeatures::IsSupported(SSE2)) { 1680 if (CpuFeatures::IsSupported(SSE2)) {
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
1846 break; 1841 break;
1847 case Token::SHR: 1842 case Token::SHR:
1848 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 1843 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1849 break; 1844 break;
1850 default: 1845 default:
1851 UNREACHABLE(); 1846 UNREACHABLE();
1852 } 1847 }
1853 } 1848 }
1854 1849
1855 1850
1856 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { 1851 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1857 Label call_runtime; 1852 Label call_runtime;
1858 1853
1859 Counters* counters = masm->isolate()->counters(); 1854 Counters* counters = masm->isolate()->counters();
1860 __ IncrementCounter(counters->generic_binary_stub_calls(), 1); 1855 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
1861 1856
1862 switch (op_) { 1857 switch (op_) {
1863 case Token::ADD: 1858 case Token::ADD:
1864 case Token::SUB: 1859 case Token::SUB:
1865 case Token::MUL: 1860 case Token::MUL:
1866 case Token::DIV: 1861 case Token::DIV:
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
2043 break; 2038 break;
2044 case Token::SHR: 2039 case Token::SHR:
2045 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 2040 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2046 break; 2041 break;
2047 default: 2042 default:
2048 UNREACHABLE(); 2043 UNREACHABLE();
2049 } 2044 }
2050 } 2045 }
2051 2046
2052 2047
2053 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { 2048 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2054 ASSERT(op_ == Token::ADD); 2049 ASSERT(op_ == Token::ADD);
2055 Label left_not_string, call_runtime; 2050 Label left_not_string, call_runtime;
2056 2051
2057 // Registers containing left and right operands respectively. 2052 // Registers containing left and right operands respectively.
2058 Register left = edx; 2053 Register left = edx;
2059 Register right = eax; 2054 Register right = eax;
2060 2055
2061 // Test if left operand is a string. 2056 // Test if left operand is a string.
2062 __ test(left, Immediate(kSmiTagMask)); 2057 __ test(left, Immediate(kSmiTagMask));
2063 __ j(zero, &left_not_string, Label::kNear); 2058 __ j(zero, &left_not_string, Label::kNear);
(...skipping 13 matching lines...) Expand all
2077 2072
2078 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); 2073 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2079 GenerateRegisterArgsPush(masm); 2074 GenerateRegisterArgsPush(masm);
2080 __ TailCallStub(&string_add_right_stub); 2075 __ TailCallStub(&string_add_right_stub);
2081 2076
2082 // Neither argument is a string. 2077 // Neither argument is a string.
2083 __ bind(&call_runtime); 2078 __ bind(&call_runtime);
2084 } 2079 }
2085 2080
2086 2081
2087 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( 2082 void BinaryOpStub::GenerateHeapResultAllocation(
2088 MacroAssembler* masm, 2083 MacroAssembler* masm,
2089 Label* alloc_failure) { 2084 Label* alloc_failure) {
2090 Label skip_allocation; 2085 Label skip_allocation;
2091 OverwriteMode mode = mode_; 2086 OverwriteMode mode = mode_;
2092 switch (mode) { 2087 switch (mode) {
2093 case OVERWRITE_LEFT: { 2088 case OVERWRITE_LEFT: {
2094 // If the argument in edx is already an object, we skip the 2089 // If the argument in edx is already an object, we skip the
2095 // allocation of a heap number. 2090 // allocation of a heap number.
2096 __ test(edx, Immediate(kSmiTagMask)); 2091 __ test(edx, Immediate(kSmiTagMask));
2097 __ j(not_zero, &skip_allocation); 2092 __ j(not_zero, &skip_allocation);
(...skipping 21 matching lines...) Expand all
2119 // Now eax can be overwritten losing one of the arguments as we are 2114 // Now eax can be overwritten losing one of the arguments as we are
2120 // now done and will not need it any more. 2115 // now done and will not need it any more.
2121 __ mov(eax, ebx); 2116 __ mov(eax, ebx);
2122 __ bind(&skip_allocation); 2117 __ bind(&skip_allocation);
2123 break; 2118 break;
2124 default: UNREACHABLE(); 2119 default: UNREACHABLE();
2125 } 2120 }
2126 } 2121 }
2127 2122
2128 2123
2129 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { 2124 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
2130 __ pop(ecx); 2125 __ pop(ecx);
2131 __ push(edx); 2126 __ push(edx);
2132 __ push(eax); 2127 __ push(eax);
2133 __ push(ecx); 2128 __ push(ecx);
2134 } 2129 }
2135 2130
2136 2131
2137 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 2132 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
2138 // TAGGED case: 2133 // TAGGED case:
2139 // Input: 2134 // Input:
(...skipping 4051 matching lines...) Expand 10 before | Expand all | Expand 10 after
6191 __ Drop(1); 6186 __ Drop(1);
6192 __ ret(2 * kPointerSize); 6187 __ ret(2 * kPointerSize);
6193 } 6188 }
6194 6189
6195 6190
6196 #undef __ 6191 #undef __
6197 6192
6198 } } // namespace v8::internal 6193 } } // namespace v8::internal
6199 6194
6200 #endif // V8_TARGET_ARCH_IA32 6195 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698