OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
580 __ And(t0, a3, Operand(kIsNotStringMask)); | 580 __ And(t0, a3, Operand(kIsNotStringMask)); |
581 __ Branch(&convert_argument, ne, t0, Operand(zero_reg)); | 581 __ Branch(&convert_argument, ne, t0, Operand(zero_reg)); |
582 __ mov(argument, a0); | 582 __ mov(argument, a0); |
583 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); | 583 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); |
584 __ Branch(&argument_is_string); | 584 __ Branch(&argument_is_string); |
585 | 585 |
586 // Invoke the conversion builtin and put the result into a2. | 586 // Invoke the conversion builtin and put the result into a2. |
587 __ bind(&convert_argument); | 587 __ bind(&convert_argument); |
588 __ push(function); // Preserve the function. | 588 __ push(function); // Preserve the function. |
589 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); | 589 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); |
590 __ EnterInternalFrame(); | 590 { |
591 __ push(v0); | 591 FrameScope scope(masm, StackFrame::INTERNAL); |
592 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 592 __ push(v0); |
593 __ LeaveInternalFrame(); | 593 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 594 } |
594 __ pop(function); | 595 __ pop(function); |
595 __ mov(argument, v0); | 596 __ mov(argument, v0); |
596 __ Branch(&argument_is_string); | 597 __ Branch(&argument_is_string); |
597 | 598 |
598 // Load the empty string into a2, remove the receiver from the | 599 // Load the empty string into a2, remove the receiver from the |
599 // stack, and jump back to the case where the argument is a string. | 600 // stack, and jump back to the case where the argument is a string. |
600 __ bind(&no_arguments); | 601 __ bind(&no_arguments); |
601 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); | 602 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); |
602 __ Drop(1); | 603 __ Drop(1); |
603 __ Branch(&argument_is_string); | 604 __ Branch(&argument_is_string); |
604 | 605 |
605 // At this point the argument is already a string. Call runtime to | 606 // At this point the argument is already a string. Call runtime to |
606 // create a string wrapper. | 607 // create a string wrapper. |
607 __ bind(&gc_required); | 608 __ bind(&gc_required); |
608 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); | 609 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); |
609 __ EnterInternalFrame(); | 610 { |
610 __ push(argument); | 611 FrameScope scope(masm, StackFrame::INTERNAL); |
611 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 612 __ push(argument); |
612 __ LeaveInternalFrame(); | 613 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 614 } |
613 __ Ret(); | 615 __ Ret(); |
614 } | 616 } |
615 | 617 |
616 | 618 |
617 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { | 619 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { |
618 // ----------- S t a t e ------------- | 620 // ----------- S t a t e ------------- |
619 // -- a0 : number of arguments | 621 // -- a0 : number of arguments |
620 // -- a1 : constructor function | 622 // -- a1 : constructor function |
621 // -- ra : return address | 623 // -- ra : return address |
622 // -- sp[...]: constructor arguments | 624 // -- sp[...]: constructor arguments |
623 // ----------------------------------- | 625 // ----------------------------------- |
624 | 626 |
625 Label non_function_call; | 627 Label slow, non_function_call; |
626 // Check that the function is not a smi. | 628 // Check that the function is not a smi. |
627 __ And(t0, a1, Operand(kSmiTagMask)); | 629 __ And(t0, a1, Operand(kSmiTagMask)); |
628 __ Branch(&non_function_call, eq, t0, Operand(zero_reg)); | 630 __ Branch(&non_function_call, eq, t0, Operand(zero_reg)); |
629 // Check that the function is a JSFunction. | 631 // Check that the function is a JSFunction. |
630 __ GetObjectType(a1, a2, a2); | 632 __ GetObjectType(a1, a2, a2); |
631 __ Branch(&non_function_call, ne, a2, Operand(JS_FUNCTION_TYPE)); | 633 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); |
632 | 634 |
633 // Jump to the function-specific construct stub. | 635 // Jump to the function-specific construct stub. |
634 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 636 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
635 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset)); | 637 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset)); |
636 __ Addu(t9, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 638 __ Addu(t9, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
637 __ Jump(t9); | 639 __ Jump(t9); |
638 | 640 |
639 // a0: number of arguments | 641 // a0: number of arguments |
640 // a1: called object | 642 // a1: called object |
| 643 // a2: object type |
| 644 Label do_call; |
| 645 __ bind(&slow); |
| 646 __ Branch(&non_function_call, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 647 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); |
| 648 __ jmp(&do_call); |
| 649 |
641 __ bind(&non_function_call); | 650 __ bind(&non_function_call); |
| 651 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
| 652 __ bind(&do_call); |
642 // CALL_NON_FUNCTION expects the non-function constructor as receiver | 653 // CALL_NON_FUNCTION expects the non-function constructor as receiver |
643 // (instead of the original receiver from the call site). The receiver is | 654 // (instead of the original receiver from the call site). The receiver is |
644 // stack element argc. | 655 // stack element argc. |
645 // Set expected number of arguments to zero (not changing a0). | 656 // Set expected number of arguments to zero (not changing a0). |
646 __ mov(a2, zero_reg); | 657 __ mov(a2, zero_reg); |
647 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | |
648 __ SetCallKind(t1, CALL_AS_METHOD); | 658 __ SetCallKind(t1, CALL_AS_METHOD); |
649 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 659 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
650 RelocInfo::CODE_TARGET); | 660 RelocInfo::CODE_TARGET); |
651 } | 661 } |
652 | 662 |
653 | 663 |
654 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 664 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
655 bool is_api_function, | 665 bool is_api_function, |
656 bool count_constructions) { | 666 bool count_constructions) { |
657 // Should never count constructions for api objects. | 667 // Should never count constructions for api objects. |
658 ASSERT(!is_api_function || !count_constructions); | 668 ASSERT(!is_api_function || !count_constructions); |
659 | 669 |
660 Isolate* isolate = masm->isolate(); | 670 Isolate* isolate = masm->isolate(); |
661 | 671 |
662 // ----------- S t a t e ------------- | 672 // ----------- S t a t e ------------- |
663 // -- a0 : number of arguments | 673 // -- a0 : number of arguments |
664 // -- a1 : constructor function | 674 // -- a1 : constructor function |
665 // -- ra : return address | 675 // -- ra : return address |
666 // -- sp[...]: constructor arguments | 676 // -- sp[...]: constructor arguments |
667 // ----------------------------------- | 677 // ----------------------------------- |
668 | 678 |
669 // Enter a construct frame. | 679 // Enter a construct frame. |
670 __ EnterConstructFrame(); | 680 { |
671 | 681 FrameScope scope(masm, StackFrame::CONSTRUCT); |
672 // Preserve the two incoming parameters on the stack. | 682 |
673 __ sll(a0, a0, kSmiTagSize); // Tag arguments count. | 683 // Preserve the two incoming parameters on the stack. |
674 __ MultiPushReversed(a0.bit() | a1.bit()); | 684 __ sll(a0, a0, kSmiTagSize); // Tag arguments count. |
675 | 685 __ MultiPushReversed(a0.bit() | a1.bit()); |
676 // Use t7 to hold undefined, which is used in several places below. | 686 |
677 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); | 687 // Use t7 to hold undefined, which is used in several places below. |
678 | 688 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); |
679 Label rt_call, allocated; | 689 |
680 // Try to allocate the object without transitioning into C code. If any of the | 690 Label rt_call, allocated; |
681 // preconditions is not met, the code bails out to the runtime call. | 691 // Try to allocate the object without transitioning into C code. If any of |
682 if (FLAG_inline_new) { | 692 // the preconditions is not met, the code bails out to the runtime call. |
683 Label undo_allocation; | 693 if (FLAG_inline_new) { |
| 694 Label undo_allocation; |
684 #ifdef ENABLE_DEBUGGER_SUPPORT | 695 #ifdef ENABLE_DEBUGGER_SUPPORT |
685 ExternalReference debug_step_in_fp = | 696 ExternalReference debug_step_in_fp = |
686 ExternalReference::debug_step_in_fp_address(isolate); | 697 ExternalReference::debug_step_in_fp_address(isolate); |
687 __ li(a2, Operand(debug_step_in_fp)); | 698 __ li(a2, Operand(debug_step_in_fp)); |
688 __ lw(a2, MemOperand(a2)); | 699 __ lw(a2, MemOperand(a2)); |
689 __ Branch(&rt_call, ne, a2, Operand(zero_reg)); | 700 __ Branch(&rt_call, ne, a2, Operand(zero_reg)); |
690 #endif | 701 #endif |
691 | 702 |
692 // Load the initial map and verify that it is in fact a map. | 703 // Load the initial map and verify that it is in fact a map. |
| 704 // a1: constructor function |
| 705 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 706 __ And(t0, a2, Operand(kSmiTagMask)); |
| 707 __ Branch(&rt_call, eq, t0, Operand(zero_reg)); |
| 708 __ GetObjectType(a2, a3, t4); |
| 709 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE)); |
| 710 |
| 711 // Check that the constructor is not constructing a JSFunction (see |
| 712 // comments in Runtime_NewObject in runtime.cc). In which case the |
| 713 // initial map's instance type would be JS_FUNCTION_TYPE. |
| 714 // a1: constructor function |
| 715 // a2: initial map |
| 716 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
| 717 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE)); |
| 718 |
| 719 if (count_constructions) { |
| 720 Label allocate; |
| 721 // Decrease generous allocation count. |
| 722 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 723 MemOperand constructor_count = |
| 724 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset); |
| 725 __ lbu(t0, constructor_count); |
| 726 __ Subu(t0, t0, Operand(1)); |
| 727 __ sb(t0, constructor_count); |
| 728 __ Branch(&allocate, ne, t0, Operand(zero_reg)); |
| 729 |
| 730 __ Push(a1, a2); |
| 731 |
| 732 __ push(a1); // Constructor. |
| 733 // The call will replace the stub, so the countdown is only done once. |
| 734 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
| 735 |
| 736 __ pop(a2); |
| 737 __ pop(a1); |
| 738 |
| 739 __ bind(&allocate); |
| 740 } |
| 741 |
| 742 // Now allocate the JSObject on the heap. |
| 743 // a1: constructor function |
| 744 // a2: initial map |
| 745 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); |
| 746 __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS); |
| 747 |
| 748 // Allocated the JSObject, now initialize the fields. Map is set to |
| 749 // initial map and properties and elements are set to empty fixed array. |
| 750 // a1: constructor function |
| 751 // a2: initial map |
| 752 // a3: object size |
| 753 // t4: JSObject (not tagged) |
| 754 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); |
| 755 __ mov(t5, t4); |
| 756 __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); |
| 757 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); |
| 758 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); |
| 759 __ Addu(t5, t5, Operand(3*kPointerSize)); |
| 760 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); |
| 761 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); |
| 762 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); |
| 763 |
| 764 // Fill all the in-object properties with appropriate filler. |
| 765 // a1: constructor function |
| 766 // a2: initial map |
| 767 // a3: object size (in words) |
| 768 // t4: JSObject (not tagged) |
| 769 // t5: First in-object property of JSObject (not tagged) |
| 770 __ sll(t0, a3, kPointerSizeLog2); |
| 771 __ addu(t6, t4, t0); // End of object. |
| 772 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); |
| 773 { Label loop, entry; |
| 774 if (count_constructions) { |
| 775 // To allow for truncation. |
| 776 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); |
| 777 } else { |
| 778 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); |
| 779 } |
| 780 __ jmp(&entry); |
| 781 __ bind(&loop); |
| 782 __ sw(t7, MemOperand(t5, 0)); |
| 783 __ addiu(t5, t5, kPointerSize); |
| 784 __ bind(&entry); |
| 785 __ Branch(&loop, Uless, t5, Operand(t6)); |
| 786 } |
| 787 |
| 788 // Add the object tag to make the JSObject real, so that we can continue |
| 789 // and jump into the continuation code at any time from now on. Any |
| 790 // failures need to undo the allocation, so that the heap is in a |
| 791 // consistent state and verifiable. |
| 792 __ Addu(t4, t4, Operand(kHeapObjectTag)); |
| 793 |
| 794 // Check if a non-empty properties array is needed. Continue with |
| 795 // allocated object if not fall through to runtime call if it is. |
| 796 // a1: constructor function |
| 797 // t4: JSObject |
| 798 // t5: start of next object (not tagged) |
| 799 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); |
| 800 // The field instance sizes contains both pre-allocated property fields |
| 801 // and in-object properties. |
| 802 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); |
| 803 __ And(t6, |
| 804 a0, |
| 805 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8)); |
| 806 __ srl(t0, t6, Map::kPreAllocatedPropertyFieldsByte * 8); |
| 807 __ Addu(a3, a3, Operand(t0)); |
| 808 __ And(t6, a0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8)); |
| 809 __ srl(t0, t6, Map::kInObjectPropertiesByte * 8); |
| 810 __ subu(a3, a3, t0); |
| 811 |
| 812 // Done if no extra properties are to be allocated. |
| 813 __ Branch(&allocated, eq, a3, Operand(zero_reg)); |
| 814 __ Assert(greater_equal, "Property allocation count failed.", |
| 815 a3, Operand(zero_reg)); |
| 816 |
| 817 // Scale the number of elements by pointer size and add the header for |
| 818 // FixedArrays to the start of the next object calculation from above. |
| 819 // a1: constructor |
| 820 // a3: number of elements in properties array |
| 821 // t4: JSObject |
| 822 // t5: start of next object |
| 823 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize)); |
| 824 __ AllocateInNewSpace( |
| 825 a0, |
| 826 t5, |
| 827 t6, |
| 828 a2, |
| 829 &undo_allocation, |
| 830 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); |
| 831 |
| 832 // Initialize the FixedArray. |
| 833 // a1: constructor |
| 834 // a3: number of elements in properties array (un-tagged) |
| 835 // t4: JSObject |
| 836 // t5: start of next object |
| 837 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex); |
| 838 __ mov(a2, t5); |
| 839 __ sw(t6, MemOperand(a2, JSObject::kMapOffset)); |
| 840 __ sll(a0, a3, kSmiTagSize); |
| 841 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset)); |
| 842 __ Addu(a2, a2, Operand(2 * kPointerSize)); |
| 843 |
| 844 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); |
| 845 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); |
| 846 |
| 847 // Initialize the fields to undefined. |
| 848 // a1: constructor |
| 849 // a2: First element of FixedArray (not tagged) |
| 850 // a3: number of elements in properties array |
| 851 // t4: JSObject |
| 852 // t5: FixedArray (not tagged) |
| 853 __ sll(t3, a3, kPointerSizeLog2); |
| 854 __ addu(t6, a2, t3); // End of object. |
| 855 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); |
| 856 { Label loop, entry; |
| 857 if (count_constructions) { |
| 858 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); |
| 859 } else if (FLAG_debug_code) { |
| 860 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex); |
| 861 __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8)); |
| 862 } |
| 863 __ jmp(&entry); |
| 864 __ bind(&loop); |
| 865 __ sw(t7, MemOperand(a2)); |
| 866 __ addiu(a2, a2, kPointerSize); |
| 867 __ bind(&entry); |
| 868 __ Branch(&loop, less, a2, Operand(t6)); |
| 869 } |
| 870 |
| 871 // Store the initialized FixedArray into the properties field of |
| 872 // the JSObject. |
| 873 // a1: constructor function |
| 874 // t4: JSObject |
| 875 // t5: FixedArray (not tagged) |
| 876 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag. |
| 877 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset)); |
| 878 |
| 879 // Continue with JSObject being successfully allocated. |
| 880 // a1: constructor function |
| 881 // a4: JSObject |
| 882 __ jmp(&allocated); |
| 883 |
| 884 // Undo the setting of the new top so that the heap is verifiable. For |
| 885 // example, the map's unused properties potentially do not match the |
| 886 // allocated objects unused properties. |
| 887 // t4: JSObject (previous new top) |
| 888 __ bind(&undo_allocation); |
| 889 __ UndoAllocationInNewSpace(t4, t5); |
| 890 } |
| 891 |
| 892 __ bind(&rt_call); |
| 893 // Allocate the new receiver object using the runtime call. |
693 // a1: constructor function | 894 // a1: constructor function |
694 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 895 __ push(a1); // Argument for Runtime_NewObject. |
695 __ And(t0, a2, Operand(kSmiTagMask)); | 896 __ CallRuntime(Runtime::kNewObject, 1); |
696 __ Branch(&rt_call, eq, t0, Operand(zero_reg)); | 897 __ mov(t4, v0); |
697 __ GetObjectType(a2, a3, t4); | 898 |
698 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE)); | 899 // Receiver for constructor call allocated. |
699 | 900 // t4: JSObject |
700 // Check that the constructor is not constructing a JSFunction (see comments | 901 __ bind(&allocated); |
701 // in Runtime_NewObject in runtime.cc). In which case the initial map's | 902 __ push(t4); |
702 // instance type would be JS_FUNCTION_TYPE. | 903 |
| 904 // Push the function and the allocated receiver from the stack. |
| 905 // sp[0]: receiver (newly allocated object) |
| 906 // sp[1]: constructor function |
| 907 // sp[2]: number of arguments (smi-tagged) |
| 908 __ lw(a1, MemOperand(sp, kPointerSize)); |
| 909 __ MultiPushReversed(a1.bit() | t4.bit()); |
| 910 |
| 911 // Reload the number of arguments from the stack. |
703 // a1: constructor function | 912 // a1: constructor function |
704 // a2: initial map | 913 // sp[0]: receiver |
705 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); | 914 // sp[1]: constructor function |
706 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE)); | 915 // sp[2]: receiver |
707 | 916 // sp[3]: constructor function |
708 if (count_constructions) { | 917 // sp[4]: number of arguments (smi-tagged) |
709 Label allocate; | 918 __ lw(a3, MemOperand(sp, 4 * kPointerSize)); |
710 // Decrease generous allocation count. | 919 |
711 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 920 // Setup pointer to last argument. |
712 MemOperand constructor_count = | 921 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); |
713 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset); | 922 |
714 __ lbu(t0, constructor_count); | 923 // Setup number of arguments for function call below. |
715 __ Subu(t0, t0, Operand(1)); | 924 __ srl(a0, a3, kSmiTagSize); |
716 __ sb(t0, constructor_count); | 925 |
717 __ Branch(&allocate, ne, t0, Operand(zero_reg)); | 926 // Copy arguments and receiver to the expression stack. |
718 | 927 // a0: number of arguments |
719 __ Push(a1, a2); | 928 // a1: constructor function |
720 | 929 // a2: address of last argument (caller sp) |
721 __ push(a1); // Constructor. | 930 // a3: number of arguments (smi-tagged) |
722 // The call will replace the stub, so the countdown is only done once. | 931 // sp[0]: receiver |
723 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 932 // sp[1]: constructor function |
724 | 933 // sp[2]: receiver |
725 __ pop(a2); | 934 // sp[3]: constructor function |
726 __ pop(a1); | 935 // sp[4]: number of arguments (smi-tagged) |
727 | 936 Label loop, entry; |
728 __ bind(&allocate); | 937 __ jmp(&entry); |
| 938 __ bind(&loop); |
| 939 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 940 __ Addu(t0, a2, Operand(t0)); |
| 941 __ lw(t1, MemOperand(t0)); |
| 942 __ push(t1); |
| 943 __ bind(&entry); |
| 944 __ Addu(a3, a3, Operand(-2)); |
| 945 __ Branch(&loop, greater_equal, a3, Operand(zero_reg)); |
| 946 |
| 947 // Call the function. |
| 948 // a0: number of arguments |
| 949 // a1: constructor function |
| 950 if (is_api_function) { |
| 951 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 952 Handle<Code> code = |
| 953 masm->isolate()->builtins()->HandleApiCallConstruct(); |
| 954 ParameterCount expected(0); |
| 955 __ InvokeCode(code, expected, expected, |
| 956 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD); |
| 957 } else { |
| 958 ParameterCount actual(a0); |
| 959 __ InvokeFunction(a1, actual, CALL_FUNCTION, |
| 960 NullCallWrapper(), CALL_AS_METHOD); |
729 } | 961 } |
730 | 962 |
731 // Now allocate the JSObject on the heap. | 963 // Pop the function from the stack. |
732 // a1: constructor function | 964 // v0: result |
733 // a2: initial map | 965 // sp[0]: constructor function |
734 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); | 966 // sp[2]: receiver |
735 __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS); | 967 // sp[3]: constructor function |
736 | 968 // sp[4]: number of arguments (smi-tagged) |
737 // Allocated the JSObject, now initialize the fields. Map is set to initial | 969 __ Pop(); |
738 // map and properties and elements are set to empty fixed array. | 970 |
739 // a1: constructor function | 971 // Restore context from the frame. |
740 // a2: initial map | 972 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
741 // a3: object size | 973 |
742 // t4: JSObject (not tagged) | 974 // If the result is an object (in the ECMA sense), we should get rid |
743 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); | 975 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 |
744 __ mov(t5, t4); | 976 // on page 74. |
745 __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); | 977 Label use_receiver, exit; |
746 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); | 978 |
747 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); | 979 // If the result is a smi, it is *not* an object in the ECMA sense. |
748 __ Addu(t5, t5, Operand(3*kPointerSize)); | 980 // v0: result |
749 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); | 981 // sp[0]: receiver (newly allocated object) |
750 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); | 982 // sp[1]: constructor function |
751 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); | 983 // sp[2]: number of arguments (smi-tagged) |
752 | 984 __ And(t0, v0, Operand(kSmiTagMask)); |
753 // Fill all the in-object properties with appropriate filler. | 985 __ Branch(&use_receiver, eq, t0, Operand(zero_reg)); |
754 // a1: constructor function | 986 |
755 // a2: initial map | 987 // If the type of the result (stored in its map) is less than |
756 // a3: object size (in words) | 988 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. |
757 // t4: JSObject (not tagged) | 989 __ GetObjectType(v0, a3, a3); |
758 // t5: First in-object property of JSObject (not tagged) | 990 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); |
759 __ sll(t0, a3, kPointerSizeLog2); | 991 |
760 __ addu(t6, t4, t0); // End of object. | 992 // Throw away the result of the constructor invocation and use the |
761 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); | 993 // on-stack receiver as the result. |
762 { Label loop, entry; | 994 __ bind(&use_receiver); |
763 if (count_constructions) { | 995 __ lw(v0, MemOperand(sp)); |
764 // To allow for truncation. | 996 |
765 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); | 997 // Remove receiver from the stack, remove caller arguments, and |
766 } else { | 998 // return. |
767 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); | 999 __ bind(&exit); |
768 } | 1000 // v0: result |
769 __ jmp(&entry); | 1001 // sp[0]: receiver (newly allocated object) |
770 __ bind(&loop); | 1002 // sp[1]: constructor function |
771 __ sw(t7, MemOperand(t5, 0)); | 1003 // sp[2]: number of arguments (smi-tagged) |
772 __ addiu(t5, t5, kPointerSize); | 1004 __ lw(a1, MemOperand(sp, 2 * kPointerSize)); |
773 __ bind(&entry); | 1005 |
774 __ Branch(&loop, Uless, t5, Operand(t6)); | 1006 // Leave construct frame. |
775 } | |
776 | |
777 // Add the object tag to make the JSObject real, so that we can continue and | |
778 // jump into the continuation code at any time from now on. Any failures | |
779 // need to undo the allocation, so that the heap is in a consistent state | |
780 // and verifiable. | |
781 __ Addu(t4, t4, Operand(kHeapObjectTag)); | |
782 | |
783 // Check if a non-empty properties array is needed. Continue with allocated | |
784 // object if not fall through to runtime call if it is. | |
785 // a1: constructor function | |
786 // t4: JSObject | |
787 // t5: start of next object (not tagged) | |
788 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); | |
789 // The field instance sizes contains both pre-allocated property fields and | |
790 // in-object properties. | |
791 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); | |
792 __ And(t6, | |
793 a0, | |
794 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8)); | |
795 __ srl(t0, t6, Map::kPreAllocatedPropertyFieldsByte * 8); | |
796 __ Addu(a3, a3, Operand(t0)); | |
797 __ And(t6, a0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8)); | |
798 __ srl(t0, t6, Map::kInObjectPropertiesByte * 8); | |
799 __ subu(a3, a3, t0); | |
800 | |
801 // Done if no extra properties are to be allocated. | |
802 __ Branch(&allocated, eq, a3, Operand(zero_reg)); | |
803 __ Assert(greater_equal, "Property allocation count failed.", | |
804 a3, Operand(zero_reg)); | |
805 | |
806 // Scale the number of elements by pointer size and add the header for | |
807 // FixedArrays to the start of the next object calculation from above. | |
808 // a1: constructor | |
809 // a3: number of elements in properties array | |
810 // t4: JSObject | |
811 // t5: start of next object | |
812 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize)); | |
813 __ AllocateInNewSpace( | |
814 a0, | |
815 t5, | |
816 t6, | |
817 a2, | |
818 &undo_allocation, | |
819 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); | |
820 | |
821 // Initialize the FixedArray. | |
822 // a1: constructor | |
823 // a3: number of elements in properties array (un-tagged) | |
824 // t4: JSObject | |
825 // t5: start of next object | |
826 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex); | |
827 __ mov(a2, t5); | |
828 __ sw(t6, MemOperand(a2, JSObject::kMapOffset)); | |
829 __ sll(a0, a3, kSmiTagSize); | |
830 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset)); | |
831 __ Addu(a2, a2, Operand(2 * kPointerSize)); | |
832 | |
833 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); | |
834 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); | |
835 | |
836 // Initialize the fields to undefined. | |
837 // a1: constructor | |
838 // a2: First element of FixedArray (not tagged) | |
839 // a3: number of elements in properties array | |
840 // t4: JSObject | |
841 // t5: FixedArray (not tagged) | |
842 __ sll(t3, a3, kPointerSizeLog2); | |
843 __ addu(t6, a2, t3); // End of object. | |
844 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); | |
845 { Label loop, entry; | |
846 if (count_constructions) { | |
847 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); | |
848 } else if (FLAG_debug_code) { | |
849 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex); | |
850 __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8)); | |
851 } | |
852 __ jmp(&entry); | |
853 __ bind(&loop); | |
854 __ sw(t7, MemOperand(a2)); | |
855 __ addiu(a2, a2, kPointerSize); | |
856 __ bind(&entry); | |
857 __ Branch(&loop, less, a2, Operand(t6)); | |
858 } | |
859 | |
860 // Store the initialized FixedArray into the properties field of | |
861 // the JSObject. | |
862 // a1: constructor function | |
863 // t4: JSObject | |
864 // t5: FixedArray (not tagged) | |
865 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag. | |
866 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset)); | |
867 | |
868 // Continue with JSObject being successfully allocated. | |
869 // a1: constructor function | |
870 // a4: JSObject | |
871 __ jmp(&allocated); | |
872 | |
873 // Undo the setting of the new top so that the heap is verifiable. For | |
874 // example, the map's unused properties potentially do not match the | |
875 // allocated objects unused properties. | |
876 // t4: JSObject (previous new top) | |
877 __ bind(&undo_allocation); | |
878 __ UndoAllocationInNewSpace(t4, t5); | |
879 } | 1007 } |
880 | 1008 |
881 __ bind(&rt_call); | |
882 // Allocate the new receiver object using the runtime call. | |
883 // a1: constructor function | |
884 __ push(a1); // Argument for Runtime_NewObject. | |
885 __ CallRuntime(Runtime::kNewObject, 1); | |
886 __ mov(t4, v0); | |
887 | |
888 // Receiver for constructor call allocated. | |
889 // t4: JSObject | |
890 __ bind(&allocated); | |
891 __ push(t4); | |
892 | |
893 // Push the function and the allocated receiver from the stack. | |
894 // sp[0]: receiver (newly allocated object) | |
895 // sp[1]: constructor function | |
896 // sp[2]: number of arguments (smi-tagged) | |
897 __ lw(a1, MemOperand(sp, kPointerSize)); | |
898 __ MultiPushReversed(a1.bit() | t4.bit()); | |
899 | |
900 // Reload the number of arguments from the stack. | |
901 // a1: constructor function | |
902 // sp[0]: receiver | |
903 // sp[1]: constructor function | |
904 // sp[2]: receiver | |
905 // sp[3]: constructor function | |
906 // sp[4]: number of arguments (smi-tagged) | |
907 __ lw(a3, MemOperand(sp, 4 * kPointerSize)); | |
908 | |
909 // Setup pointer to last argument. | |
910 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | |
911 | |
912 // Setup number of arguments for function call below. | |
913 __ srl(a0, a3, kSmiTagSize); | |
914 | |
915 // Copy arguments and receiver to the expression stack. | |
916 // a0: number of arguments | |
917 // a1: constructor function | |
918 // a2: address of last argument (caller sp) | |
919 // a3: number of arguments (smi-tagged) | |
920 // sp[0]: receiver | |
921 // sp[1]: constructor function | |
922 // sp[2]: receiver | |
923 // sp[3]: constructor function | |
924 // sp[4]: number of arguments (smi-tagged) | |
925 Label loop, entry; | |
926 __ jmp(&entry); | |
927 __ bind(&loop); | |
928 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | |
929 __ Addu(t0, a2, Operand(t0)); | |
930 __ lw(t1, MemOperand(t0)); | |
931 __ push(t1); | |
932 __ bind(&entry); | |
933 __ Addu(a3, a3, Operand(-2)); | |
934 __ Branch(&loop, greater_equal, a3, Operand(zero_reg)); | |
935 | |
936 // Call the function. | |
937 // a0: number of arguments | |
938 // a1: constructor function | |
939 if (is_api_function) { | |
940 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
941 Handle<Code> code = | |
942 masm->isolate()->builtins()->HandleApiCallConstruct(); | |
943 ParameterCount expected(0); | |
944 __ InvokeCode(code, expected, expected, | |
945 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD); | |
946 } else { | |
947 ParameterCount actual(a0); | |
948 __ InvokeFunction(a1, actual, CALL_FUNCTION, | |
949 NullCallWrapper(), CALL_AS_METHOD); | |
950 } | |
951 | |
952 // Pop the function from the stack. | |
953 // v0: result | |
954 // sp[0]: constructor function | |
955 // sp[2]: receiver | |
956 // sp[3]: constructor function | |
957 // sp[4]: number of arguments (smi-tagged) | |
958 __ Pop(); | |
959 | |
960 // Restore context from the frame. | |
961 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
962 | |
963 // If the result is an object (in the ECMA sense), we should get rid | |
964 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
965 // on page 74. | |
966 Label use_receiver, exit; | |
967 | |
968 // If the result is a smi, it is *not* an object in the ECMA sense. | |
969 // v0: result | |
970 // sp[0]: receiver (newly allocated object) | |
971 // sp[1]: constructor function | |
972 // sp[2]: number of arguments (smi-tagged) | |
973 __ And(t0, v0, Operand(kSmiTagMask)); | |
974 __ Branch(&use_receiver, eq, t0, Operand(zero_reg)); | |
975 | |
976 // If the type of the result (stored in its map) is less than | |
977 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. | |
978 __ GetObjectType(v0, a3, a3); | |
979 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
980 | |
981 // Throw away the result of the constructor invocation and use the | |
982 // on-stack receiver as the result. | |
983 __ bind(&use_receiver); | |
984 __ lw(v0, MemOperand(sp)); | |
985 | |
986 // Remove receiver from the stack, remove caller arguments, and | |
987 // return. | |
988 __ bind(&exit); | |
989 // v0: result | |
990 // sp[0]: receiver (newly allocated object) | |
991 // sp[1]: constructor function | |
992 // sp[2]: number of arguments (smi-tagged) | |
993 __ lw(a1, MemOperand(sp, 2 * kPointerSize)); | |
994 __ LeaveConstructFrame(); | |
995 __ sll(t0, a1, kPointerSizeLog2 - 1); | 1009 __ sll(t0, a1, kPointerSizeLog2 - 1); |
996 __ Addu(sp, sp, t0); | 1010 __ Addu(sp, sp, t0); |
997 __ Addu(sp, sp, kPointerSize); | 1011 __ Addu(sp, sp, kPointerSize); |
998 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); | 1012 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); |
999 __ Ret(); | 1013 __ Ret(); |
1000 } | 1014 } |
1001 | 1015 |
1002 | 1016 |
1003 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | 1017 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { |
1004 Generate_JSConstructStubHelper(masm, false, true); | 1018 Generate_JSConstructStubHelper(masm, false, true); |
(...skipping 19 matching lines...) Expand all Loading... |
1024 // -- a1: function | 1038 // -- a1: function |
1025 // -- a2: reveiver_pointer | 1039 // -- a2: reveiver_pointer |
1026 // -- a3: argc | 1040 // -- a3: argc |
1027 // -- s0: argv | 1041 // -- s0: argv |
1028 // ----------------------------------- | 1042 // ----------------------------------- |
1029 | 1043 |
1030 // Clear the context before we push it when entering the JS frame. | 1044 // Clear the context before we push it when entering the JS frame. |
1031 __ mov(cp, zero_reg); | 1045 __ mov(cp, zero_reg); |
1032 | 1046 |
1033 // Enter an internal frame. | 1047 // Enter an internal frame. |
1034 __ EnterInternalFrame(); | 1048 { |
| 1049 FrameScope scope(masm, StackFrame::INTERNAL); |
1035 | 1050 |
1036 // Set up the context from the function argument. | 1051 // Set up the context from the function argument. |
1037 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 1052 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1038 | 1053 |
1039 // Set up the roots register. | 1054 // Set up the roots register. |
1040 ExternalReference roots_address = | 1055 ExternalReference roots_address = |
1041 ExternalReference::roots_address(masm->isolate()); | 1056 ExternalReference::roots_address(masm->isolate()); |
1042 __ li(s6, Operand(roots_address)); | 1057 __ li(s6, Operand(roots_address)); |
1043 | 1058 |
1044 // Push the function and the receiver onto the stack. | 1059 // Push the function and the receiver onto the stack. |
1045 __ Push(a1, a2); | 1060 __ Push(a1, a2); |
1046 | 1061 |
1047 // Copy arguments to the stack in a loop. | 1062 // Copy arguments to the stack in a loop. |
1048 // a3: argc | 1063 // a3: argc |
1049 // s0: argv, ie points to first arg | 1064 // s0: argv, ie points to first arg |
1050 Label loop, entry; | 1065 Label loop, entry; |
1051 __ sll(t0, a3, kPointerSizeLog2); | 1066 __ sll(t0, a3, kPointerSizeLog2); |
1052 __ addu(t2, s0, t0); | 1067 __ addu(t2, s0, t0); |
1053 __ b(&entry); | 1068 __ b(&entry); |
1054 __ nop(); // Branch delay slot nop. | 1069 __ nop(); // Branch delay slot nop. |
1055 // t2 points past last arg. | 1070 // t2 points past last arg. |
1056 __ bind(&loop); | 1071 __ bind(&loop); |
1057 __ lw(t0, MemOperand(s0)); // Read next parameter. | 1072 __ lw(t0, MemOperand(s0)); // Read next parameter. |
1058 __ addiu(s0, s0, kPointerSize); | 1073 __ addiu(s0, s0, kPointerSize); |
1059 __ lw(t0, MemOperand(t0)); // Dereference handle. | 1074 __ lw(t0, MemOperand(t0)); // Dereference handle. |
1060 __ push(t0); // Push parameter. | 1075 __ push(t0); // Push parameter. |
1061 __ bind(&entry); | 1076 __ bind(&entry); |
1062 __ Branch(&loop, ne, s0, Operand(t2)); | 1077 __ Branch(&loop, ne, s0, Operand(t2)); |
1063 | 1078 |
1064 // Initialize all JavaScript callee-saved registers, since they will be seen | 1079 // Initialize all JavaScript callee-saved registers, since they will be seen |
1065 // by the garbage collector as part of handlers. | 1080 // by the garbage collector as part of handlers. |
1066 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); | 1081 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); |
1067 __ mov(s1, t0); | 1082 __ mov(s1, t0); |
1068 __ mov(s2, t0); | 1083 __ mov(s2, t0); |
1069 __ mov(s3, t0); | 1084 __ mov(s3, t0); |
1070 __ mov(s4, t0); | 1085 __ mov(s4, t0); |
1071 __ mov(s5, t0); | 1086 __ mov(s5, t0); |
1072 // s6 holds the root address. Do not clobber. | 1087 // s6 holds the root address. Do not clobber. |
1073 // s7 is cp. Do not init. | 1088 // s7 is cp. Do not init. |
1074 | 1089 |
1075 // Invoke the code and pass argc as a0. | 1090 // Invoke the code and pass argc as a0. |
1076 __ mov(a0, a3); | 1091 __ mov(a0, a3); |
1077 if (is_construct) { | 1092 if (is_construct) { |
1078 __ Call(masm->isolate()->builtins()->JSConstructCall()); | 1093 __ Call(masm->isolate()->builtins()->JSConstructCall()); |
1079 } else { | 1094 } else { |
1080 ParameterCount actual(a0); | 1095 ParameterCount actual(a0); |
1081 __ InvokeFunction(a1, actual, CALL_FUNCTION, | 1096 __ InvokeFunction(a1, actual, CALL_FUNCTION, |
1082 NullCallWrapper(), CALL_AS_METHOD); | 1097 NullCallWrapper(), CALL_AS_METHOD); |
| 1098 } |
| 1099 |
| 1100 // Leave internal frame. |
1083 } | 1101 } |
1084 | 1102 |
1085 __ LeaveInternalFrame(); | |
1086 | |
1087 __ Jump(ra); | 1103 __ Jump(ra); |
1088 } | 1104 } |
1089 | 1105 |
1090 | 1106 |
1091 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 1107 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
1092 Generate_JSEntryTrampolineHelper(masm, false); | 1108 Generate_JSEntryTrampolineHelper(masm, false); |
1093 } | 1109 } |
1094 | 1110 |
1095 | 1111 |
1096 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 1112 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
1097 Generate_JSEntryTrampolineHelper(masm, true); | 1113 Generate_JSEntryTrampolineHelper(masm, true); |
1098 } | 1114 } |
1099 | 1115 |
1100 | 1116 |
1101 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 1117 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
1102 // Enter an internal frame. | 1118 // Enter an internal frame. |
1103 __ EnterInternalFrame(); | 1119 { |
| 1120 FrameScope scope(masm, StackFrame::INTERNAL); |
1104 | 1121 |
1105 // Preserve the function. | 1122 // Preserve the function. |
1106 __ push(a1); | 1123 __ push(a1); |
1107 // Push call kind information. | 1124 // Push call kind information. |
1108 __ push(t1); | 1125 __ push(t1); |
1109 | 1126 |
1110 // Push the function on the stack as the argument to the runtime function. | 1127 // Push the function on the stack as the argument to the runtime function. |
1111 __ push(a1); | 1128 __ push(a1); |
1112 // Call the runtime function. | 1129 // Call the runtime function. |
1113 __ CallRuntime(Runtime::kLazyCompile, 1); | 1130 __ CallRuntime(Runtime::kLazyCompile, 1); |
1114 // Calculate the entry point. | 1131 // Calculate the entry point. |
1115 __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag); | 1132 __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag); |
1116 | 1133 |
1117 // Restore call kind information. | 1134 // Restore call kind information. |
1118 __ pop(t1); | 1135 __ pop(t1); |
1119 // Restore saved function. | 1136 // Restore saved function. |
1120 __ pop(a1); | 1137 __ pop(a1); |
1121 | 1138 |
1122 // Tear down temporary frame. | 1139 // Tear down temporary frame. |
1123 __ LeaveInternalFrame(); | 1140 } |
1124 | 1141 |
1125 // Do a tail-call of the compiled function. | 1142 // Do a tail-call of the compiled function. |
1126 __ Jump(t9); | 1143 __ Jump(t9); |
1127 } | 1144 } |
1128 | 1145 |
1129 | 1146 |
1130 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 1147 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { |
1131 // Enter an internal frame. | 1148 // Enter an internal frame. |
1132 __ EnterInternalFrame(); | 1149 { |
| 1150 FrameScope scope(masm, StackFrame::INTERNAL); |
1133 | 1151 |
1134 // Preserve the function. | 1152 // Preserve the function. |
1135 __ push(a1); | 1153 __ push(a1); |
1136 // Push call kind information. | 1154 // Push call kind information. |
1137 __ push(t1); | 1155 __ push(t1); |
1138 | 1156 |
1139 // Push the function on the stack as the argument to the runtime function. | 1157 // Push the function on the stack as the argument to the runtime function. |
1140 __ push(a1); | 1158 __ push(a1); |
1141 __ CallRuntime(Runtime::kLazyRecompile, 1); | 1159 __ CallRuntime(Runtime::kLazyRecompile, 1); |
1142 // Calculate the entry point. | 1160 // Calculate the entry point. |
1143 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1161 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1144 | 1162 |
1145 // Restore call kind information. | 1163 // Restore call kind information. |
1146 __ pop(t1); | 1164 __ pop(t1); |
1147 // Restore saved function. | 1165 // Restore saved function. |
1148 __ pop(a1); | 1166 __ pop(a1); |
1149 | 1167 |
1150 // Tear down temporary frame. | 1168 // Tear down temporary frame. |
1151 __ LeaveInternalFrame(); | 1169 } |
1152 | 1170 |
1153 // Do a tail-call of the compiled function. | 1171 // Do a tail-call of the compiled function. |
1154 __ Jump(t9); | 1172 __ Jump(t9); |
1155 } | 1173 } |
1156 | 1174 |
1157 | 1175 |
1158 // These functions are called from C++ but cannot be used in live code. | 1176 // These functions are called from C++ but cannot be used in live code. |
1159 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 1177 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
1160 __ Abort("Call to unimplemented function in builtins-mips.cc"); | 1178 __ Abort("Call to unimplemented function in builtins-mips.cc"); |
1161 } | 1179 } |
(...skipping 21 matching lines...) Expand all Loading... |
1183 __ Branch(&done, ne, a0, Operand(zero_reg)); | 1201 __ Branch(&done, ne, a0, Operand(zero_reg)); |
1184 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); | 1202 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); |
1185 __ push(t2); | 1203 __ push(t2); |
1186 __ Addu(a0, a0, Operand(1)); | 1204 __ Addu(a0, a0, Operand(1)); |
1187 __ bind(&done); | 1205 __ bind(&done); |
1188 } | 1206 } |
1189 | 1207 |
1190 // 2. Get the function to call (passed as receiver) from the stack, check | 1208 // 2. Get the function to call (passed as receiver) from the stack, check |
1191 // if it is a function. | 1209 // if it is a function. |
1192 // a0: actual number of arguments | 1210 // a0: actual number of arguments |
1193 Label non_function; | 1211 Label slow, non_function; |
1194 __ sll(at, a0, kPointerSizeLog2); | 1212 __ sll(at, a0, kPointerSizeLog2); |
1195 __ addu(at, sp, at); | 1213 __ addu(at, sp, at); |
1196 __ lw(a1, MemOperand(at)); | 1214 __ lw(a1, MemOperand(at)); |
1197 __ And(at, a1, Operand(kSmiTagMask)); | 1215 __ And(at, a1, Operand(kSmiTagMask)); |
1198 __ Branch(&non_function, eq, at, Operand(zero_reg)); | 1216 __ Branch(&non_function, eq, at, Operand(zero_reg)); |
1199 __ GetObjectType(a1, a2, a2); | 1217 __ GetObjectType(a1, a2, a2); |
1200 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_TYPE)); | 1218 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1201 | 1219 |
1202 // 3a. Patch the first argument if necessary when calling a function. | 1220 // 3a. Patch the first argument if necessary when calling a function. |
1203 // a0: actual number of arguments | 1221 // a0: actual number of arguments |
1204 // a1: function | 1222 // a1: function |
1205 Label shift_arguments; | 1223 Label shift_arguments; |
| 1224 __ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION. |
1206 { Label convert_to_object, use_global_receiver, patch_receiver; | 1225 { Label convert_to_object, use_global_receiver, patch_receiver; |
1207 // Change context eagerly in case we need the global receiver. | 1226 // Change context eagerly in case we need the global receiver. |
1208 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 1227 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1209 | 1228 |
1210 // Do not transform the receiver for strict mode functions. | 1229 // Do not transform the receiver for strict mode functions. |
1211 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1230 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1212 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); | 1231 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); |
1213 __ And(t0, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1232 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
1214 kSmiTagSize))); | 1233 kSmiTagSize))); |
1215 __ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); | 1234 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); |
1216 | 1235 |
1217 // Do not transform the receiver for native (Compilerhints already in a3). | 1236 // Do not transform the receiver for native (Compilerhints already in a3). |
1218 __ And(t0, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1237 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
1219 __ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); | 1238 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); |
1220 | 1239 |
1221 // Compute the receiver in non-strict mode. | 1240 // Compute the receiver in non-strict mode. |
1222 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). | 1241 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). |
1223 __ sll(at, a0, kPointerSizeLog2); | 1242 __ sll(at, a0, kPointerSizeLog2); |
1224 __ addu(a2, sp, at); | 1243 __ addu(a2, sp, at); |
1225 __ lw(a2, MemOperand(a2, -kPointerSize)); | 1244 __ lw(a2, MemOperand(a2, -kPointerSize)); |
1226 // a0: actual number of arguments | 1245 // a0: actual number of arguments |
1227 // a1: function | 1246 // a1: function |
1228 // a2: first argument | 1247 // a2: first argument |
1229 __ JumpIfSmi(a2, &convert_to_object, t2); | 1248 __ JumpIfSmi(a2, &convert_to_object, t2); |
1230 | 1249 |
1231 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | 1250 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
1232 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); | 1251 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); |
1233 __ LoadRoot(a3, Heap::kNullValueRootIndex); | 1252 __ LoadRoot(a3, Heap::kNullValueRootIndex); |
1234 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); | 1253 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); |
1235 | 1254 |
1236 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1255 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1237 __ GetObjectType(a2, a3, a3); | 1256 __ GetObjectType(a2, a3, a3); |
1238 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); | 1257 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); |
1239 | 1258 |
1240 __ bind(&convert_to_object); | 1259 __ bind(&convert_to_object); |
1241 __ EnterInternalFrame(); // In order to preserve argument count. | 1260 // Enter an internal frame in order to preserve argument count. |
1242 __ sll(a0, a0, kSmiTagSize); // Smi tagged. | 1261 { |
1243 __ push(a0); | 1262 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1263 __ sll(a0, a0, kSmiTagSize); // Smi tagged. |
| 1264 __ push(a0); |
1244 | 1265 |
1245 __ push(a2); | 1266 __ push(a2); |
1246 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1267 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1247 __ mov(a2, v0); | 1268 __ mov(a2, v0); |
1248 | 1269 |
1249 __ pop(a0); | 1270 __ pop(a0); |
1250 __ sra(a0, a0, kSmiTagSize); // Un-tag. | 1271 __ sra(a0, a0, kSmiTagSize); // Un-tag. |
1251 __ LeaveInternalFrame(); | 1272 // Leave internal frame. |
1252 // Restore the function to a1. | 1273 } |
| 1274 // Restore the function to a1, and the flag to t0. |
1253 __ sll(at, a0, kPointerSizeLog2); | 1275 __ sll(at, a0, kPointerSizeLog2); |
1254 __ addu(at, sp, at); | 1276 __ addu(at, sp, at); |
1255 __ lw(a1, MemOperand(at)); | 1277 __ lw(a1, MemOperand(at)); |
| 1278 __ li(t0, Operand(0, RelocInfo::NONE)); |
1256 __ Branch(&patch_receiver); | 1279 __ Branch(&patch_receiver); |
1257 | 1280 |
1258 // Use the global receiver object from the called function as the | 1281 // Use the global receiver object from the called function as the |
1259 // receiver. | 1282 // receiver. |
1260 __ bind(&use_global_receiver); | 1283 __ bind(&use_global_receiver); |
1261 const int kGlobalIndex = | 1284 const int kGlobalIndex = |
1262 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1285 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
1263 __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); | 1286 __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); |
1264 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); | 1287 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); |
1265 __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); | 1288 __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); |
1266 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); | 1289 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); |
1267 | 1290 |
1268 __ bind(&patch_receiver); | 1291 __ bind(&patch_receiver); |
1269 __ sll(at, a0, kPointerSizeLog2); | 1292 __ sll(at, a0, kPointerSizeLog2); |
1270 __ addu(a3, sp, at); | 1293 __ addu(a3, sp, at); |
1271 __ sw(a2, MemOperand(a3, -kPointerSize)); | 1294 __ sw(a2, MemOperand(a3, -kPointerSize)); |
1272 | 1295 |
1273 __ Branch(&shift_arguments); | 1296 __ Branch(&shift_arguments); |
1274 } | 1297 } |
1275 | 1298 |
1276 // 3b. Patch the first argument when calling a non-function. The | 1299 // 3b. Check for function proxy. |
| 1300 __ bind(&slow); |
| 1301 __ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy. |
| 1302 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 1303 |
| 1304 __ bind(&non_function); |
| 1305 __ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function. |
| 1306 |
| 1307 // 3c. Patch the first argument when calling a non-function. The |
1277 // CALL_NON_FUNCTION builtin expects the non-function callee as | 1308 // CALL_NON_FUNCTION builtin expects the non-function callee as |
1278 // receiver, so overwrite the first argument which will ultimately | 1309 // receiver, so overwrite the first argument which will ultimately |
1279 // become the receiver. | 1310 // become the receiver. |
1280 // a0: actual number of arguments | 1311 // a0: actual number of arguments |
1281 // a1: function | 1312 // a1: function |
1282 __ bind(&non_function); | 1313 // t0: call type (0: JS function, 1: function proxy, 2: non-function) |
1283 // Restore the function in case it has been modified. | |
1284 __ sll(at, a0, kPointerSizeLog2); | 1314 __ sll(at, a0, kPointerSizeLog2); |
1285 __ addu(a2, sp, at); | 1315 __ addu(a2, sp, at); |
1286 __ sw(a1, MemOperand(a2, -kPointerSize)); | 1316 __ sw(a1, MemOperand(a2, -kPointerSize)); |
1287 // Clear a1 to indicate a non-function being called. | |
1288 __ mov(a1, zero_reg); | |
1289 | 1317 |
1290 // 4. Shift arguments and return address one slot down on the stack | 1318 // 4. Shift arguments and return address one slot down on the stack |
1291 // (overwriting the original receiver). Adjust argument count to make | 1319 // (overwriting the original receiver). Adjust argument count to make |
1292 // the original first argument the new receiver. | 1320 // the original first argument the new receiver. |
1293 // a0: actual number of arguments | 1321 // a0: actual number of arguments |
1294 // a1: function | 1322 // a1: function |
| 1323 // t0: call type (0: JS function, 1: function proxy, 2: non-function) |
1295 __ bind(&shift_arguments); | 1324 __ bind(&shift_arguments); |
1296 { Label loop; | 1325 { Label loop; |
1297 // Calculate the copy start address (destination). Copy end address is sp. | 1326 // Calculate the copy start address (destination). Copy end address is sp. |
1298 __ sll(at, a0, kPointerSizeLog2); | 1327 __ sll(at, a0, kPointerSizeLog2); |
1299 __ addu(a2, sp, at); | 1328 __ addu(a2, sp, at); |
1300 | 1329 |
1301 __ bind(&loop); | 1330 __ bind(&loop); |
1302 __ lw(at, MemOperand(a2, -kPointerSize)); | 1331 __ lw(at, MemOperand(a2, -kPointerSize)); |
1303 __ sw(at, MemOperand(a2)); | 1332 __ sw(at, MemOperand(a2)); |
1304 __ Subu(a2, a2, Operand(kPointerSize)); | 1333 __ Subu(a2, a2, Operand(kPointerSize)); |
1305 __ Branch(&loop, ne, a2, Operand(sp)); | 1334 __ Branch(&loop, ne, a2, Operand(sp)); |
1306 // Adjust the actual number of arguments and remove the top element | 1335 // Adjust the actual number of arguments and remove the top element |
1307 // (which is a copy of the last argument). | 1336 // (which is a copy of the last argument). |
1308 __ Subu(a0, a0, Operand(1)); | 1337 __ Subu(a0, a0, Operand(1)); |
1309 __ Pop(); | 1338 __ Pop(); |
1310 } | 1339 } |
1311 | 1340 |
1312 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin. | 1341 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
| 1342 // or a function proxy via CALL_FUNCTION_PROXY. |
1313 // a0: actual number of arguments | 1343 // a0: actual number of arguments |
1314 // a1: function | 1344 // a1: function |
1315 { Label function; | 1345 // t0: call type (0: JS function, 1: function proxy, 2: non-function) |
1316 __ Branch(&function, ne, a1, Operand(zero_reg)); | 1346 { Label function, non_proxy; |
1317 __ mov(a2, zero_reg); // expected arguments is 0 for CALL_NON_FUNCTION | 1347 __ Branch(&function, eq, t0, Operand(zero_reg)); |
| 1348 // Expected number of arguments is 0 for CALL_NON_FUNCTION. |
| 1349 __ mov(a2, zero_reg); |
| 1350 __ SetCallKind(t1, CALL_AS_METHOD); |
| 1351 __ Branch(&non_proxy, ne, t0, Operand(1)); |
| 1352 |
| 1353 __ push(a1); // Re-add proxy object as additional argument. |
| 1354 __ Addu(a0, a0, Operand(1)); |
| 1355 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); |
| 1356 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1357 RelocInfo::CODE_TARGET); |
| 1358 |
| 1359 __ bind(&non_proxy); |
1318 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); | 1360 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); |
1319 __ SetCallKind(t1, CALL_AS_METHOD); | |
1320 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1361 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1321 RelocInfo::CODE_TARGET); | 1362 RelocInfo::CODE_TARGET); |
1322 __ bind(&function); | 1363 __ bind(&function); |
1323 } | 1364 } |
1324 | 1365 |
1325 // 5b. Get the code to call from the function and check that the number of | 1366 // 5b. Get the code to call from the function and check that the number of |
1326 // expected arguments matches what we're providing. If so, jump | 1367 // expected arguments matches what we're providing. If so, jump |
1327 // (tail-call) to the code in register edx without checking arguments. | 1368 // (tail-call) to the code in register edx without checking arguments. |
1328 // a0: actual number of arguments | 1369 // a0: actual number of arguments |
1329 // a1: function | 1370 // a1: function |
(...skipping 13 matching lines...) Expand all Loading... |
1343 } | 1384 } |
1344 | 1385 |
1345 | 1386 |
1346 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1387 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1347 const int kIndexOffset = -5 * kPointerSize; | 1388 const int kIndexOffset = -5 * kPointerSize; |
1348 const int kLimitOffset = -4 * kPointerSize; | 1389 const int kLimitOffset = -4 * kPointerSize; |
1349 const int kArgsOffset = 2 * kPointerSize; | 1390 const int kArgsOffset = 2 * kPointerSize; |
1350 const int kRecvOffset = 3 * kPointerSize; | 1391 const int kRecvOffset = 3 * kPointerSize; |
1351 const int kFunctionOffset = 4 * kPointerSize; | 1392 const int kFunctionOffset = 4 * kPointerSize; |
1352 | 1393 |
1353 __ EnterInternalFrame(); | 1394 { |
| 1395 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 1396 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. |
| 1397 __ push(a0); |
| 1398 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. |
| 1399 __ push(a0); |
| 1400 // Returns (in v0) number of arguments to copy to stack as Smi. |
| 1401 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
1354 | 1402 |
1355 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. | 1403 // Check the stack for overflow. We are not trying to catch |
1356 __ push(a0); | 1404 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1357 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. | 1405 // limit" is checked. |
1358 __ push(a0); | 1406 Label okay; |
1359 // Returns (in v0) number of arguments to copy to stack as Smi. | 1407 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
1360 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1408 // Make a2 the space we have left. The stack might already be overflowed |
| 1409 // here which will cause a2 to become negative. |
| 1410 __ subu(a2, sp, a2); |
| 1411 // Check if the arguments will overflow the stack. |
| 1412 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); |
| 1413 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison. |
1361 | 1414 |
1362 // Check the stack for overflow. We are not trying need to catch | 1415 // Out of stack space. |
1363 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1416 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1364 // limit" is checked. | 1417 __ push(a1); |
1365 Label okay; | 1418 __ push(v0); |
1366 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 1419 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
1367 // Make a2 the space we have left. The stack might already be overflowed | 1420 // End of stack check. |
1368 // here which will cause a2 to become negative. | |
1369 __ subu(a2, sp, a2); | |
1370 // Check if the arguments will overflow the stack. | |
1371 __ sll(t0, v0, kPointerSizeLog2 - kSmiTagSize); | |
1372 __ Branch(&okay, gt, a2, Operand(t0)); // Signed comparison. | |
1373 | 1421 |
1374 // Out of stack space. | 1422 // Push current limit and index. |
1375 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1423 __ bind(&okay); |
1376 __ push(a1); | 1424 __ push(v0); // Limit. |
1377 __ push(v0); | 1425 __ mov(a1, zero_reg); // Initial index. |
1378 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | 1426 __ push(a1); |
1379 // End of stack check. | |
1380 | 1427 |
1381 // Push current limit and index. | 1428 // Get the receiver. |
1382 __ bind(&okay); | 1429 __ lw(a0, MemOperand(fp, kRecvOffset)); |
1383 __ push(v0); // Limit. | |
1384 __ mov(a1, zero_reg); // Initial index. | |
1385 __ push(a1); | |
1386 | 1430 |
1387 // Change context eagerly to get the right global object if necessary. | 1431 // Check that the function is a JS function (otherwise it must be a proxy). |
1388 __ lw(a0, MemOperand(fp, kFunctionOffset)); | 1432 Label push_receiver; |
1389 __ lw(cp, FieldMemOperand(a0, JSFunction::kContextOffset)); | 1433 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1390 // Load the shared function info while the function is still in a0. | 1434 __ GetObjectType(a1, a2, a2); |
1391 __ lw(a1, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); | 1435 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1392 | 1436 |
1393 // Compute the receiver. | 1437 // Change context eagerly to get the right global object if necessary. |
1394 Label call_to_object, use_global_receiver, push_receiver; | 1438 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1395 __ lw(a0, MemOperand(fp, kRecvOffset)); | 1439 // Load the shared function info while the function is still in a1. |
| 1440 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1396 | 1441 |
1397 // Do not transform the receiver for strict mode functions. | 1442 // Compute the receiver. |
1398 __ lw(a2, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset)); | 1443 // Do not transform the receiver for strict mode functions. |
1399 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1444 Label call_to_object, use_global_receiver; |
1400 kSmiTagSize))); | 1445 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); |
1401 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); | 1446 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
| 1447 kSmiTagSize))); |
| 1448 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); |
1402 | 1449 |
1403 // Do not transform the receiver for native (Compilerhints already in a2). | 1450 // Do not transform the receiver for native (Compilerhints already in a2). |
1404 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1451 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
1405 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); | 1452 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); |
1406 | 1453 |
1407 // Compute the receiver in non-strict mode. | 1454 // Compute the receiver in non-strict mode. |
1408 __ And(t0, a0, Operand(kSmiTagMask)); | 1455 __ And(t3, a0, Operand(kSmiTagMask)); |
1409 __ Branch(&call_to_object, eq, t0, Operand(zero_reg)); | 1456 __ Branch(&call_to_object, eq, t3, Operand(zero_reg)); |
1410 __ LoadRoot(a1, Heap::kNullValueRootIndex); | 1457 __ LoadRoot(a1, Heap::kNullValueRootIndex); |
1411 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); | 1458 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); |
1412 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 1459 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
1413 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); | 1460 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); |
1414 | 1461 |
1415 // Check if the receiver is already a JavaScript object. | 1462 // Check if the receiver is already a JavaScript object. |
1416 // a0: receiver | 1463 // a0: receiver |
1417 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1464 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1418 __ GetObjectType(a0, a1, a1); | 1465 __ GetObjectType(a0, a1, a1); |
1419 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | 1466 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); |
1420 | 1467 |
1421 // Convert the receiver to a regular object. | 1468 // Convert the receiver to a regular object. |
1422 // a0: receiver | 1469 // a0: receiver |
1423 __ bind(&call_to_object); | 1470 __ bind(&call_to_object); |
1424 __ push(a0); | 1471 __ push(a0); |
1425 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1472 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1426 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. | 1473 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. |
1427 __ Branch(&push_receiver); | 1474 __ Branch(&push_receiver); |
1428 | 1475 |
1429 // Use the current global receiver object as the receiver. | 1476 // Use the current global receiver object as the receiver. |
1430 __ bind(&use_global_receiver); | 1477 __ bind(&use_global_receiver); |
1431 const int kGlobalOffset = | 1478 const int kGlobalOffset = |
1432 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1479 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
1433 __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); | 1480 __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); |
1434 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); | 1481 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); |
1435 __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); | 1482 __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); |
1436 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); | 1483 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); |
1437 | 1484 |
1438 // Push the receiver. | 1485 // Push the receiver. |
1439 // a0: receiver | 1486 // a0: receiver |
1440 __ bind(&push_receiver); | 1487 __ bind(&push_receiver); |
1441 __ push(a0); | 1488 __ push(a0); |
1442 | 1489 |
1443 // Copy all arguments from the array to the stack. | 1490 // Copy all arguments from the array to the stack. |
1444 Label entry, loop; | 1491 Label entry, loop; |
1445 __ lw(a0, MemOperand(fp, kIndexOffset)); | 1492 __ lw(a0, MemOperand(fp, kIndexOffset)); |
1446 __ Branch(&entry); | 1493 __ Branch(&entry); |
1447 | 1494 |
1448 // Load the current argument from the arguments array and push it to the | 1495 // Load the current argument from the arguments array and push it to the |
1449 // stack. | 1496 // stack. |
1450 // a0: current argument index | 1497 // a0: current argument index |
1451 __ bind(&loop); | 1498 __ bind(&loop); |
1452 __ lw(a1, MemOperand(fp, kArgsOffset)); | 1499 __ lw(a1, MemOperand(fp, kArgsOffset)); |
1453 __ push(a1); | 1500 __ push(a1); |
1454 __ push(a0); | 1501 __ push(a0); |
1455 | 1502 |
1456 // Call the runtime to access the property in the arguments array. | 1503 // Call the runtime to access the property in the arguments array. |
1457 __ CallRuntime(Runtime::kGetProperty, 2); | 1504 __ CallRuntime(Runtime::kGetProperty, 2); |
1458 __ push(v0); | 1505 __ push(v0); |
1459 | 1506 |
1460 // Use inline caching to access the arguments. | 1507 // Use inline caching to access the arguments. |
1461 __ lw(a0, MemOperand(fp, kIndexOffset)); | 1508 __ lw(a0, MemOperand(fp, kIndexOffset)); |
1462 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); | 1509 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); |
1463 __ sw(a0, MemOperand(fp, kIndexOffset)); | 1510 __ sw(a0, MemOperand(fp, kIndexOffset)); |
1464 | 1511 |
1465 // Test if the copy loop has finished copying all the elements from the | 1512 // Test if the copy loop has finished copying all the elements from the |
1466 // arguments object. | 1513 // arguments object. |
1467 __ bind(&entry); | 1514 __ bind(&entry); |
1468 __ lw(a1, MemOperand(fp, kLimitOffset)); | 1515 __ lw(a1, MemOperand(fp, kLimitOffset)); |
1469 __ Branch(&loop, ne, a0, Operand(a1)); | 1516 __ Branch(&loop, ne, a0, Operand(a1)); |
1470 // Invoke the function. | |
1471 ParameterCount actual(a0); | |
1472 __ sra(a0, a0, kSmiTagSize); | |
1473 __ lw(a1, MemOperand(fp, kFunctionOffset)); | |
1474 __ InvokeFunction(a1, actual, CALL_FUNCTION, | |
1475 NullCallWrapper(), CALL_AS_METHOD); | |
1476 | 1517 |
1477 // Tear down the internal frame and remove function, receiver and args. | 1518 // Invoke the function. |
1478 __ LeaveInternalFrame(); | 1519 Label call_proxy; |
1479 __ Addu(sp, sp, Operand(3 * kPointerSize)); | 1520 ParameterCount actual(a0); |
1480 __ Ret(); | 1521 __ sra(a0, a0, kSmiTagSize); |
| 1522 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
| 1523 __ GetObjectType(a1, a2, a2); |
| 1524 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); |
| 1525 |
| 1526 __ InvokeFunction(a1, actual, CALL_FUNCTION, |
| 1527 NullCallWrapper(), CALL_AS_METHOD); |
| 1528 |
| 1529 frame_scope.GenerateLeaveFrame(); |
| 1530 __ Ret(USE_DELAY_SLOT); |
| 1531 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. |
| 1532 |
| 1533 // Invoke the function proxy. |
| 1534 __ bind(&call_proxy); |
| 1535 __ push(a1); // Add function proxy as last argument. |
| 1536 __ Addu(a0, a0, Operand(1)); |
| 1537 __ li(a2, Operand(0, RelocInfo::NONE)); |
| 1538 __ SetCallKind(t1, CALL_AS_METHOD); |
| 1539 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); |
| 1540 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1541 RelocInfo::CODE_TARGET); |
| 1542 |
| 1543 // Tear down the internal frame and remove function, receiver and args. |
| 1544 } |
| 1545 |
| 1546 __ Ret(USE_DELAY_SLOT); |
| 1547 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. |
1481 } | 1548 } |
1482 | 1549 |
1483 | 1550 |
1484 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1551 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
1485 __ sll(a0, a0, kSmiTagSize); | 1552 __ sll(a0, a0, kSmiTagSize); |
1486 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1553 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
1487 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); | 1554 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); |
1488 __ Addu(fp, sp, Operand(3 * kPointerSize)); | 1555 __ Addu(fp, sp, Operand(3 * kPointerSize)); |
1489 } | 1556 } |
1490 | 1557 |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1618 __ bind(&dont_adapt_arguments); | 1685 __ bind(&dont_adapt_arguments); |
1619 __ Jump(a3); | 1686 __ Jump(a3); |
1620 } | 1687 } |
1621 | 1688 |
1622 | 1689 |
1623 #undef __ | 1690 #undef __ |
1624 | 1691 |
1625 } } // namespace v8::internal | 1692 } } // namespace v8::internal |
1626 | 1693 |
1627 #endif // V8_TARGET_ARCH_MIPS | 1694 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |