Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(468)

Side by Side Diff: src/mips/builtins-mips.cc

Issue 7891042: Add asserts to ensure that we: (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/macro-assembler.h ('k') | src/mips/code-stubs-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after
580 __ And(t0, a3, Operand(kIsNotStringMask)); 580 __ And(t0, a3, Operand(kIsNotStringMask));
581 __ Branch(&convert_argument, ne, t0, Operand(zero_reg)); 581 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
582 __ mov(argument, a0); 582 __ mov(argument, a0);
583 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); 583 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
584 __ Branch(&argument_is_string); 584 __ Branch(&argument_is_string);
585 585
586 // Invoke the conversion builtin and put the result into a2. 586 // Invoke the conversion builtin and put the result into a2.
587 __ bind(&convert_argument); 587 __ bind(&convert_argument);
588 __ push(function); // Preserve the function. 588 __ push(function); // Preserve the function.
589 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); 589 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
590 __ EnterInternalFrame(); 590 {
591 __ push(v0); 591 FrameScope scope(masm, StackFrame::INTERNAL);
592 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 592 __ push(v0);
593 __ LeaveInternalFrame(); 593 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
594 }
594 __ pop(function); 595 __ pop(function);
595 __ mov(argument, v0); 596 __ mov(argument, v0);
596 __ Branch(&argument_is_string); 597 __ Branch(&argument_is_string);
597 598
598 // Load the empty string into a2, remove the receiver from the 599 // Load the empty string into a2, remove the receiver from the
599 // stack, and jump back to the case where the argument is a string. 600 // stack, and jump back to the case where the argument is a string.
600 __ bind(&no_arguments); 601 __ bind(&no_arguments);
601 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); 602 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
602 __ Drop(1); 603 __ Drop(1);
603 __ Branch(&argument_is_string); 604 __ Branch(&argument_is_string);
604 605
605 // At this point the argument is already a string. Call runtime to 606 // At this point the argument is already a string. Call runtime to
606 // create a string wrapper. 607 // create a string wrapper.
607 __ bind(&gc_required); 608 __ bind(&gc_required);
608 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); 609 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
609 __ EnterInternalFrame(); 610 {
610 __ push(argument); 611 FrameScope scope(masm, StackFrame::INTERNAL);
611 __ CallRuntime(Runtime::kNewStringWrapper, 1); 612 __ push(argument);
612 __ LeaveInternalFrame(); 613 __ CallRuntime(Runtime::kNewStringWrapper, 1);
614 }
613 __ Ret(); 615 __ Ret();
614 } 616 }
615 617
616 618
617 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { 619 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
618 // ----------- S t a t e ------------- 620 // ----------- S t a t e -------------
619 // -- a0 : number of arguments 621 // -- a0 : number of arguments
620 // -- a1 : constructor function 622 // -- a1 : constructor function
621 // -- ra : return address 623 // -- ra : return address
622 // -- sp[...]: constructor arguments 624 // -- sp[...]: constructor arguments
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
660 Isolate* isolate = masm->isolate(); 662 Isolate* isolate = masm->isolate();
661 663
662 // ----------- S t a t e ------------- 664 // ----------- S t a t e -------------
663 // -- a0 : number of arguments 665 // -- a0 : number of arguments
664 // -- a1 : constructor function 666 // -- a1 : constructor function
665 // -- ra : return address 667 // -- ra : return address
666 // -- sp[...]: constructor arguments 668 // -- sp[...]: constructor arguments
667 // ----------------------------------- 669 // -----------------------------------
668 670
669 // Enter a construct frame. 671 // Enter a construct frame.
670 __ EnterConstructFrame(); 672 {
671 673 FrameScope scope(masm, StackFrame::CONSTRUCT);
672 // Preserve the two incoming parameters on the stack. 674
673 __ sll(a0, a0, kSmiTagSize); // Tag arguments count. 675 // Preserve the two incoming parameters on the stack.
674 __ MultiPushReversed(a0.bit() | a1.bit()); 676 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
675 677 __ MultiPushReversed(a0.bit() | a1.bit());
676 // Use t7 to hold undefined, which is used in several places below. 678
677 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); 679 // Use t7 to hold undefined, which is used in several places below.
678 680 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
679 Label rt_call, allocated; 681
680 // Try to allocate the object without transitioning into C code. If any of the 682 Label rt_call, allocated;
681 // preconditions is not met, the code bails out to the runtime call. 683 // Try to allocate the object without transitioning into C code. If any of
682 if (FLAG_inline_new) { 684 // the preconditions is not met, the code bails out to the runtime call.
683 Label undo_allocation; 685 if (FLAG_inline_new) {
686 Label undo_allocation;
684 #ifdef ENABLE_DEBUGGER_SUPPORT 687 #ifdef ENABLE_DEBUGGER_SUPPORT
685 ExternalReference debug_step_in_fp = 688 ExternalReference debug_step_in_fp =
686 ExternalReference::debug_step_in_fp_address(isolate); 689 ExternalReference::debug_step_in_fp_address(isolate);
687 __ li(a2, Operand(debug_step_in_fp)); 690 __ li(a2, Operand(debug_step_in_fp));
688 __ lw(a2, MemOperand(a2)); 691 __ lw(a2, MemOperand(a2));
689 __ Branch(&rt_call, ne, a2, Operand(zero_reg)); 692 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
690 #endif 693 #endif
691 694
692 // Load the initial map and verify that it is in fact a map. 695 // Load the initial map and verify that it is in fact a map.
696 // a1: constructor function
697 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
698 __ And(t0, a2, Operand(kSmiTagMask));
699 __ Branch(&rt_call, eq, t0, Operand(zero_reg));
700 __ GetObjectType(a2, a3, t4);
701 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
702
703 // Check that the constructor is not constructing a JSFunction (see
704 // comments in Runtime_NewObject in runtime.cc). In which case the
705 // initial map's instance type would be JS_FUNCTION_TYPE.
706 // a1: constructor function
707 // a2: initial map
708 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
709 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
710
711 if (count_constructions) {
712 Label allocate;
713 // Decrease generous allocation count.
714 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
715 MemOperand constructor_count =
716 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset);
717 __ lbu(t0, constructor_count);
718 __ Subu(t0, t0, Operand(1));
719 __ sb(t0, constructor_count);
720 __ Branch(&allocate, ne, t0, Operand(zero_reg));
721
722 __ Push(a1, a2);
723
724 __ push(a1); // Constructor.
725 // The call will replace the stub, so the countdown is only done once.
726 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
727
728 __ pop(a2);
729 __ pop(a1);
730
731 __ bind(&allocate);
732 }
733
734 // Now allocate the JSObject on the heap.
735 // a1: constructor function
736 // a2: initial map
737 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
738 __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
739
740 // Allocated the JSObject, now initialize the fields. Map is set to
741 // initial map and properties and elements are set to empty fixed array.
742 // a1: constructor function
743 // a2: initial map
744 // a3: object size
745 // t4: JSObject (not tagged)
746 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
747 __ mov(t5, t4);
748 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
749 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
750 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
751 __ Addu(t5, t5, Operand(3*kPointerSize));
752 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
753 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
754 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
755
756 // Fill all the in-object properties with appropriate filler.
757 // a1: constructor function
758 // a2: initial map
759 // a3: object size (in words)
760 // t4: JSObject (not tagged)
761 // t5: First in-object property of JSObject (not tagged)
762 __ sll(t0, a3, kPointerSizeLog2);
763 __ addu(t6, t4, t0); // End of object.
764 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
765 { Label loop, entry;
766 if (count_constructions) {
767 // To allow for truncation.
768 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
769 } else {
770 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
771 }
772 __ jmp(&entry);
773 __ bind(&loop);
774 __ sw(t7, MemOperand(t5, 0));
775 __ addiu(t5, t5, kPointerSize);
776 __ bind(&entry);
777 __ Branch(&loop, Uless, t5, Operand(t6));
778 }
779
780 // Add the object tag to make the JSObject real, so that we can continue
781 // and jump into the continuation code at any time from now on. Any
782 // failures need to undo the allocation, so that the heap is in a
783 // consistent state and verifiable.
784 __ Addu(t4, t4, Operand(kHeapObjectTag));
785
786 // Check if a non-empty properties array is needed. Continue with
787 // allocated object if not fall through to runtime call if it is.
788 // a1: constructor function
789 // t4: JSObject
790 // t5: start of next object (not tagged)
791 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
792 // The field instance sizes contains both pre-allocated property fields
793 // and in-object properties.
794 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
795 __ And(t6,
796 a0,
797 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
798 __ srl(t0, t6, Map::kPreAllocatedPropertyFieldsByte * 8);
799 __ Addu(a3, a3, Operand(t0));
800 __ And(t6, a0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
801 __ srl(t0, t6, Map::kInObjectPropertiesByte * 8);
802 __ subu(a3, a3, t0);
803
804 // Done if no extra properties are to be allocated.
805 __ Branch(&allocated, eq, a3, Operand(zero_reg));
806 __ Assert(greater_equal, "Property allocation count failed.",
807 a3, Operand(zero_reg));
808
809 // Scale the number of elements by pointer size and add the header for
810 // FixedArrays to the start of the next object calculation from above.
811 // a1: constructor
812 // a3: number of elements in properties array
813 // t4: JSObject
814 // t5: start of next object
815 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
816 __ AllocateInNewSpace(
817 a0,
818 t5,
819 t6,
820 a2,
821 &undo_allocation,
822 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
823
824 // Initialize the FixedArray.
825 // a1: constructor
826 // a3: number of elements in properties array (un-tagged)
827 // t4: JSObject
828 // t5: start of next object
829 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
830 __ mov(a2, t5);
831 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
832 __ sll(a0, a3, kSmiTagSize);
833 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
834 __ Addu(a2, a2, Operand(2 * kPointerSize));
835
836 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
837 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
838
839 // Initialize the fields to undefined.
840 // a1: constructor
841 // a2: First element of FixedArray (not tagged)
842 // a3: number of elements in properties array
843 // t4: JSObject
844 // t5: FixedArray (not tagged)
845 __ sll(t3, a3, kPointerSizeLog2);
846 __ addu(t6, a2, t3); // End of object.
847 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
848 { Label loop, entry;
849 if (count_constructions) {
850 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
851 } else if (FLAG_debug_code) {
852 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
853 __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
854 }
855 __ jmp(&entry);
856 __ bind(&loop);
857 __ sw(t7, MemOperand(a2));
858 __ addiu(a2, a2, kPointerSize);
859 __ bind(&entry);
860 __ Branch(&loop, less, a2, Operand(t6));
861 }
862
863 // Store the initialized FixedArray into the properties field of
864 // the JSObject.
865 // a1: constructor function
866 // t4: JSObject
867 // t5: FixedArray (not tagged)
868 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
869 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
870
871 // Continue with JSObject being successfully allocated.
872 // a1: constructor function
873 // a4: JSObject
874 __ jmp(&allocated);
875
876 // Undo the setting of the new top so that the heap is verifiable. For
877 // example, the map's unused properties potentially do not match the
878 // allocated objects unused properties.
879 // t4: JSObject (previous new top)
880 __ bind(&undo_allocation);
881 __ UndoAllocationInNewSpace(t4, t5);
882 }
883
884 __ bind(&rt_call);
885 // Allocate the new receiver object using the runtime call.
693 // a1: constructor function 886 // a1: constructor function
694 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 887 __ push(a1); // Argument for Runtime_NewObject.
695 __ And(t0, a2, Operand(kSmiTagMask)); 888 __ CallRuntime(Runtime::kNewObject, 1);
696 __ Branch(&rt_call, eq, t0, Operand(zero_reg)); 889 __ mov(t4, v0);
697 __ GetObjectType(a2, a3, t4); 890
698 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE)); 891 // Receiver for constructor call allocated.
699 892 // t4: JSObject
700 // Check that the constructor is not constructing a JSFunction (see comments 893 __ bind(&allocated);
701 // in Runtime_NewObject in runtime.cc). In which case the initial map's 894 __ push(t4);
702 // instance type would be JS_FUNCTION_TYPE. 895
896 // Push the function and the allocated receiver from the stack.
897 // sp[0]: receiver (newly allocated object)
898 // sp[1]: constructor function
899 // sp[2]: number of arguments (smi-tagged)
900 __ lw(a1, MemOperand(sp, kPointerSize));
901 __ MultiPushReversed(a1.bit() | t4.bit());
902
903 // Reload the number of arguments from the stack.
703 // a1: constructor function 904 // a1: constructor function
704 // a2: initial map 905 // sp[0]: receiver
705 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); 906 // sp[1]: constructor function
706 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE)); 907 // sp[2]: receiver
707 908 // sp[3]: constructor function
708 if (count_constructions) { 909 // sp[4]: number of arguments (smi-tagged)
709 Label allocate; 910 __ lw(a3, MemOperand(sp, 4 * kPointerSize));
710 // Decrease generous allocation count. 911
711 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 912 // Setup pointer to last argument.
712 MemOperand constructor_count = 913 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
713 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset); 914
714 __ lbu(t0, constructor_count); 915 // Setup number of arguments for function call below.
715 __ Subu(t0, t0, Operand(1)); 916 __ srl(a0, a3, kSmiTagSize);
716 __ sb(t0, constructor_count); 917
717 __ Branch(&allocate, ne, t0, Operand(zero_reg)); 918 // Copy arguments and receiver to the expression stack.
718 919 // a0: number of arguments
719 __ Push(a1, a2); 920 // a1: constructor function
720 921 // a2: address of last argument (caller sp)
721 __ push(a1); // Constructor. 922 // a3: number of arguments (smi-tagged)
722 // The call will replace the stub, so the countdown is only done once. 923 // sp[0]: receiver
723 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 924 // sp[1]: constructor function
724 925 // sp[2]: receiver
725 __ pop(a2); 926 // sp[3]: constructor function
726 __ pop(a1); 927 // sp[4]: number of arguments (smi-tagged)
727 928 Label loop, entry;
728 __ bind(&allocate); 929 __ jmp(&entry);
930 __ bind(&loop);
931 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
932 __ Addu(t0, a2, Operand(t0));
933 __ lw(t1, MemOperand(t0));
934 __ push(t1);
935 __ bind(&entry);
936 __ Addu(a3, a3, Operand(-2));
937 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
938
939 // Call the function.
940 // a0: number of arguments
941 // a1: constructor function
942 if (is_api_function) {
943 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
944 Handle<Code> code =
945 masm->isolate()->builtins()->HandleApiCallConstruct();
946 ParameterCount expected(0);
947 __ InvokeCode(code, expected, expected,
948 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
949 } else {
950 ParameterCount actual(a0);
951 __ InvokeFunction(a1, actual, CALL_FUNCTION,
952 NullCallWrapper(), CALL_AS_METHOD);
729 } 953 }
730 954
731 // Now allocate the JSObject on the heap. 955 // Pop the function from the stack.
732 // a1: constructor function 956 // v0: result
733 // a2: initial map 957 // sp[0]: constructor function
734 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); 958 // sp[2]: receiver
735 __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS); 959 // sp[3]: constructor function
736 960 // sp[4]: number of arguments (smi-tagged)
737 // Allocated the JSObject, now initialize the fields. Map is set to initial 961 __ Pop();
738 // map and properties and elements are set to empty fixed array. 962
739 // a1: constructor function 963 // Restore context from the frame.
740 // a2: initial map 964 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
741 // a3: object size 965
742 // t4: JSObject (not tagged) 966 // If the result is an object (in the ECMA sense), we should get rid
743 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); 967 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
744 __ mov(t5, t4); 968 // on page 74.
745 __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); 969 Label use_receiver, exit;
746 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); 970
747 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); 971 // If the result is a smi, it is *not* an object in the ECMA sense.
748 __ Addu(t5, t5, Operand(3*kPointerSize)); 972 // v0: result
749 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 973 // sp[0]: receiver (newly allocated object)
750 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); 974 // sp[1]: constructor function
751 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); 975 // sp[2]: number of arguments (smi-tagged)
752 976 __ And(t0, v0, Operand(kSmiTagMask));
753 // Fill all the in-object properties with appropriate filler. 977 __ Branch(&use_receiver, eq, t0, Operand(zero_reg));
754 // a1: constructor function 978
755 // a2: initial map 979 // If the type of the result (stored in its map) is less than
756 // a3: object size (in words) 980 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
757 // t4: JSObject (not tagged) 981 __ GetObjectType(v0, a3, a3);
758 // t5: First in-object property of JSObject (not tagged) 982 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
759 __ sll(t0, a3, kPointerSizeLog2); 983
760 __ addu(t6, t4, t0); // End of object. 984 // Throw away the result of the constructor invocation and use the
761 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); 985 // on-stack receiver as the result.
762 { Label loop, entry; 986 __ bind(&use_receiver);
763 if (count_constructions) { 987 __ lw(v0, MemOperand(sp));
764 // To allow for truncation. 988
765 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); 989 // Remove receiver from the stack, remove caller arguments, and
766 } else { 990 // return.
767 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); 991 __ bind(&exit);
768 } 992 // v0: result
769 __ jmp(&entry); 993 // sp[0]: receiver (newly allocated object)
770 __ bind(&loop); 994 // sp[1]: constructor function
771 __ sw(t7, MemOperand(t5, 0)); 995 // sp[2]: number of arguments (smi-tagged)
772 __ addiu(t5, t5, kPointerSize); 996 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
773 __ bind(&entry); 997
774 __ Branch(&loop, Uless, t5, Operand(t6)); 998 // Leave construct frame.
775 }
776
777 // Add the object tag to make the JSObject real, so that we can continue and
778 // jump into the continuation code at any time from now on. Any failures
779 // need to undo the allocation, so that the heap is in a consistent state
780 // and verifiable.
781 __ Addu(t4, t4, Operand(kHeapObjectTag));
782
783 // Check if a non-empty properties array is needed. Continue with allocated
784 // object if not fall through to runtime call if it is.
785 // a1: constructor function
786 // t4: JSObject
787 // t5: start of next object (not tagged)
788 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
789 // The field instance sizes contains both pre-allocated property fields and
790 // in-object properties.
791 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
792 __ And(t6,
793 a0,
794 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
795 __ srl(t0, t6, Map::kPreAllocatedPropertyFieldsByte * 8);
796 __ Addu(a3, a3, Operand(t0));
797 __ And(t6, a0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
798 __ srl(t0, t6, Map::kInObjectPropertiesByte * 8);
799 __ subu(a3, a3, t0);
800
801 // Done if no extra properties are to be allocated.
802 __ Branch(&allocated, eq, a3, Operand(zero_reg));
803 __ Assert(greater_equal, "Property allocation count failed.",
804 a3, Operand(zero_reg));
805
806 // Scale the number of elements by pointer size and add the header for
807 // FixedArrays to the start of the next object calculation from above.
808 // a1: constructor
809 // a3: number of elements in properties array
810 // t4: JSObject
811 // t5: start of next object
812 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
813 __ AllocateInNewSpace(
814 a0,
815 t5,
816 t6,
817 a2,
818 &undo_allocation,
819 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
820
821 // Initialize the FixedArray.
822 // a1: constructor
823 // a3: number of elements in properties array (un-tagged)
824 // t4: JSObject
825 // t5: start of next object
826 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
827 __ mov(a2, t5);
828 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
829 __ sll(a0, a3, kSmiTagSize);
830 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
831 __ Addu(a2, a2, Operand(2 * kPointerSize));
832
833 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
834 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
835
836 // Initialize the fields to undefined.
837 // a1: constructor
838 // a2: First element of FixedArray (not tagged)
839 // a3: number of elements in properties array
840 // t4: JSObject
841 // t5: FixedArray (not tagged)
842 __ sll(t3, a3, kPointerSizeLog2);
843 __ addu(t6, a2, t3); // End of object.
844 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
845 { Label loop, entry;
846 if (count_constructions) {
847 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
848 } else if (FLAG_debug_code) {
849 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
850 __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
851 }
852 __ jmp(&entry);
853 __ bind(&loop);
854 __ sw(t7, MemOperand(a2));
855 __ addiu(a2, a2, kPointerSize);
856 __ bind(&entry);
857 __ Branch(&loop, less, a2, Operand(t6));
858 }
859
860 // Store the initialized FixedArray into the properties field of
861 // the JSObject.
862 // a1: constructor function
863 // t4: JSObject
864 // t5: FixedArray (not tagged)
865 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
866 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
867
868 // Continue with JSObject being successfully allocated.
869 // a1: constructor function
870 // a4: JSObject
871 __ jmp(&allocated);
872
873 // Undo the setting of the new top so that the heap is verifiable. For
874 // example, the map's unused properties potentially do not match the
875 // allocated objects unused properties.
876 // t4: JSObject (previous new top)
877 __ bind(&undo_allocation);
878 __ UndoAllocationInNewSpace(t4, t5);
879 } 999 }
880 1000
881 __ bind(&rt_call);
882 // Allocate the new receiver object using the runtime call.
883 // a1: constructor function
884 __ push(a1); // Argument for Runtime_NewObject.
885 __ CallRuntime(Runtime::kNewObject, 1);
886 __ mov(t4, v0);
887
888 // Receiver for constructor call allocated.
889 // t4: JSObject
890 __ bind(&allocated);
891 __ push(t4);
892
893 // Push the function and the allocated receiver from the stack.
894 // sp[0]: receiver (newly allocated object)
895 // sp[1]: constructor function
896 // sp[2]: number of arguments (smi-tagged)
897 __ lw(a1, MemOperand(sp, kPointerSize));
898 __ MultiPushReversed(a1.bit() | t4.bit());
899
900 // Reload the number of arguments from the stack.
901 // a1: constructor function
902 // sp[0]: receiver
903 // sp[1]: constructor function
904 // sp[2]: receiver
905 // sp[3]: constructor function
906 // sp[4]: number of arguments (smi-tagged)
907 __ lw(a3, MemOperand(sp, 4 * kPointerSize));
908
909 // Setup pointer to last argument.
910 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
911
912 // Setup number of arguments for function call below.
913 __ srl(a0, a3, kSmiTagSize);
914
915 // Copy arguments and receiver to the expression stack.
916 // a0: number of arguments
917 // a1: constructor function
918 // a2: address of last argument (caller sp)
919 // a3: number of arguments (smi-tagged)
920 // sp[0]: receiver
921 // sp[1]: constructor function
922 // sp[2]: receiver
923 // sp[3]: constructor function
924 // sp[4]: number of arguments (smi-tagged)
925 Label loop, entry;
926 __ jmp(&entry);
927 __ bind(&loop);
928 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
929 __ Addu(t0, a2, Operand(t0));
930 __ lw(t1, MemOperand(t0));
931 __ push(t1);
932 __ bind(&entry);
933 __ Addu(a3, a3, Operand(-2));
934 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
935
936 // Call the function.
937 // a0: number of arguments
938 // a1: constructor function
939 if (is_api_function) {
940 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
941 Handle<Code> code =
942 masm->isolate()->builtins()->HandleApiCallConstruct();
943 ParameterCount expected(0);
944 __ InvokeCode(code, expected, expected,
945 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
946 } else {
947 ParameterCount actual(a0);
948 __ InvokeFunction(a1, actual, CALL_FUNCTION,
949 NullCallWrapper(), CALL_AS_METHOD);
950 }
951
952 // Pop the function from the stack.
953 // v0: result
954 // sp[0]: constructor function
955 // sp[2]: receiver
956 // sp[3]: constructor function
957 // sp[4]: number of arguments (smi-tagged)
958 __ Pop();
959
960 // Restore context from the frame.
961 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
962
963 // If the result is an object (in the ECMA sense), we should get rid
964 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
965 // on page 74.
966 Label use_receiver, exit;
967
968 // If the result is a smi, it is *not* an object in the ECMA sense.
969 // v0: result
970 // sp[0]: receiver (newly allocated object)
971 // sp[1]: constructor function
972 // sp[2]: number of arguments (smi-tagged)
973 __ And(t0, v0, Operand(kSmiTagMask));
974 __ Branch(&use_receiver, eq, t0, Operand(zero_reg));
975
976 // If the type of the result (stored in its map) is less than
977 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
978 __ GetObjectType(v0, a3, a3);
979 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
980
981 // Throw away the result of the constructor invocation and use the
982 // on-stack receiver as the result.
983 __ bind(&use_receiver);
984 __ lw(v0, MemOperand(sp));
985
986 // Remove receiver from the stack, remove caller arguments, and
987 // return.
988 __ bind(&exit);
989 // v0: result
990 // sp[0]: receiver (newly allocated object)
991 // sp[1]: constructor function
992 // sp[2]: number of arguments (smi-tagged)
993 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
994 __ LeaveConstructFrame();
995 __ sll(t0, a1, kPointerSizeLog2 - 1); 1001 __ sll(t0, a1, kPointerSizeLog2 - 1);
996 __ Addu(sp, sp, t0); 1002 __ Addu(sp, sp, t0);
997 __ Addu(sp, sp, kPointerSize); 1003 __ Addu(sp, sp, kPointerSize);
998 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); 1004 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
999 __ Ret(); 1005 __ Ret();
1000 } 1006 }
1001 1007
1002 1008
1003 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 1009 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1004 Generate_JSConstructStubHelper(masm, false, true); 1010 Generate_JSConstructStubHelper(masm, false, true);
(...skipping 19 matching lines...) Expand all
1024 // -- a1: function 1030 // -- a1: function
1025 // -- a2: reveiver_pointer 1031 // -- a2: reveiver_pointer
1026 // -- a3: argc 1032 // -- a3: argc
1027 // -- s0: argv 1033 // -- s0: argv
1028 // ----------------------------------- 1034 // -----------------------------------
1029 1035
1030 // Clear the context before we push it when entering the JS frame. 1036 // Clear the context before we push it when entering the JS frame.
1031 __ mov(cp, zero_reg); 1037 __ mov(cp, zero_reg);
1032 1038
1033 // Enter an internal frame. 1039 // Enter an internal frame.
1034 __ EnterInternalFrame(); 1040 {
1041 FrameScope scope(masm, StackFrame::INTERNAL);
1035 1042
1036 // Set up the context from the function argument. 1043 // Set up the context from the function argument.
1037 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 1044 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1038 1045
1039 // Set up the roots register. 1046 // Set up the roots register.
1040 ExternalReference roots_address = 1047 ExternalReference roots_address =
1041 ExternalReference::roots_address(masm->isolate()); 1048 ExternalReference::roots_address(masm->isolate());
1042 __ li(s6, Operand(roots_address)); 1049 __ li(s6, Operand(roots_address));
1043 1050
1044 // Push the function and the receiver onto the stack. 1051 // Push the function and the receiver onto the stack.
1045 __ Push(a1, a2); 1052 __ Push(a1, a2);
1046 1053
1047 // Copy arguments to the stack in a loop. 1054 // Copy arguments to the stack in a loop.
1048 // a3: argc 1055 // a3: argc
1049 // s0: argv, ie points to first arg 1056 // s0: argv, ie points to first arg
1050 Label loop, entry; 1057 Label loop, entry;
1051 __ sll(t0, a3, kPointerSizeLog2); 1058 __ sll(t0, a3, kPointerSizeLog2);
1052 __ addu(t2, s0, t0); 1059 __ addu(t2, s0, t0);
1053 __ b(&entry); 1060 __ b(&entry);
1054 __ nop(); // Branch delay slot nop. 1061 __ nop(); // Branch delay slot nop.
1055 // t2 points past last arg. 1062 // t2 points past last arg.
1056 __ bind(&loop); 1063 __ bind(&loop);
1057 __ lw(t0, MemOperand(s0)); // Read next parameter. 1064 __ lw(t0, MemOperand(s0)); // Read next parameter.
1058 __ addiu(s0, s0, kPointerSize); 1065 __ addiu(s0, s0, kPointerSize);
1059 __ lw(t0, MemOperand(t0)); // Dereference handle. 1066 __ lw(t0, MemOperand(t0)); // Dereference handle.
1060 __ push(t0); // Push parameter. 1067 __ push(t0); // Push parameter.
1061 __ bind(&entry); 1068 __ bind(&entry);
1062 __ Branch(&loop, ne, s0, Operand(t2)); 1069 __ Branch(&loop, ne, s0, Operand(t2));
1063 1070
1064 // Initialize all JavaScript callee-saved registers, since they will be seen 1071 // Initialize all JavaScript callee-saved registers, since they will be seen
1065 // by the garbage collector as part of handlers. 1072 // by the garbage collector as part of handlers.
1066 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); 1073 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1067 __ mov(s1, t0); 1074 __ mov(s1, t0);
1068 __ mov(s2, t0); 1075 __ mov(s2, t0);
1069 __ mov(s3, t0); 1076 __ mov(s3, t0);
1070 __ mov(s4, t0); 1077 __ mov(s4, t0);
1071 __ mov(s5, t0); 1078 __ mov(s5, t0);
1072 // s6 holds the root address. Do not clobber. 1079 // s6 holds the root address. Do not clobber.
1073 // s7 is cp. Do not init. 1080 // s7 is cp. Do not init.
1074 1081
1075 // Invoke the code and pass argc as a0. 1082 // Invoke the code and pass argc as a0.
1076 __ mov(a0, a3); 1083 __ mov(a0, a3);
1077 if (is_construct) { 1084 if (is_construct) {
1078 __ Call(masm->isolate()->builtins()->JSConstructCall()); 1085 __ Call(masm->isolate()->builtins()->JSConstructCall());
1079 } else { 1086 } else {
1080 ParameterCount actual(a0); 1087 ParameterCount actual(a0);
1081 __ InvokeFunction(a1, actual, CALL_FUNCTION, 1088 __ InvokeFunction(a1, actual, CALL_FUNCTION,
1082 NullCallWrapper(), CALL_AS_METHOD); 1089 NullCallWrapper(), CALL_AS_METHOD);
1090 }
1091
1092 // Leave internal frame.
1083 } 1093 }
1084 1094
1085 __ LeaveInternalFrame();
1086
1087 __ Jump(ra); 1095 __ Jump(ra);
1088 } 1096 }
1089 1097
1090 1098
1091 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1099 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1092 Generate_JSEntryTrampolineHelper(masm, false); 1100 Generate_JSEntryTrampolineHelper(masm, false);
1093 } 1101 }
1094 1102
1095 1103
1096 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 1104 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1097 Generate_JSEntryTrampolineHelper(masm, true); 1105 Generate_JSEntryTrampolineHelper(masm, true);
1098 } 1106 }
1099 1107
1100 1108
1101 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 1109 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1102 // Enter an internal frame. 1110 // Enter an internal frame.
1103 __ EnterInternalFrame(); 1111 {
1112 FrameScope scope(masm, StackFrame::INTERNAL);
1104 1113
1105 // Preserve the function. 1114 // Preserve the function.
1106 __ push(a1); 1115 __ push(a1);
1107 // Push call kind information. 1116 // Push call kind information.
1108 __ push(t1); 1117 __ push(t1);
1109 1118
1110 // Push the function on the stack as the argument to the runtime function. 1119 // Push the function on the stack as the argument to the runtime function.
1111 __ push(a1); 1120 __ push(a1);
1112 // Call the runtime function. 1121 // Call the runtime function.
1113 __ CallRuntime(Runtime::kLazyCompile, 1); 1122 __ CallRuntime(Runtime::kLazyCompile, 1);
1114 // Calculate the entry point. 1123 // Calculate the entry point.
1115 __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag); 1124 __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
1116 1125
1117 // Restore call kind information. 1126 // Restore call kind information.
1118 __ pop(t1); 1127 __ pop(t1);
1119 // Restore saved function. 1128 // Restore saved function.
1120 __ pop(a1); 1129 __ pop(a1);
1121 1130
1122 // Tear down temporary frame. 1131 // Tear down temporary frame.
1123 __ LeaveInternalFrame(); 1132 }
1124 1133
1125 // Do a tail-call of the compiled function. 1134 // Do a tail-call of the compiled function.
1126 __ Jump(t9); 1135 __ Jump(t9);
1127 } 1136 }
1128 1137
1129 1138
1130 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 1139 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1131 // Enter an internal frame. 1140 // Enter an internal frame.
1132 __ EnterInternalFrame(); 1141 {
1142 FrameScope scope(masm, StackFrame::INTERNAL);
1133 1143
1134 // Preserve the function. 1144 // Preserve the function.
1135 __ push(a1); 1145 __ push(a1);
1136 // Push call kind information. 1146 // Push call kind information.
1137 __ push(t1); 1147 __ push(t1);
1138 1148
1139 // Push the function on the stack as the argument to the runtime function. 1149 // Push the function on the stack as the argument to the runtime function.
1140 __ push(a1); 1150 __ push(a1);
1141 __ CallRuntime(Runtime::kLazyRecompile, 1); 1151 __ CallRuntime(Runtime::kLazyRecompile, 1);
1142 // Calculate the entry point. 1152 // Calculate the entry point.
1143 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1153 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
1144 1154
1145 // Restore call kind information. 1155 // Restore call kind information.
1146 __ pop(t1); 1156 __ pop(t1);
1147 // Restore saved function. 1157 // Restore saved function.
1148 __ pop(a1); 1158 __ pop(a1);
1149 1159
1150 // Tear down temporary frame. 1160 // Tear down temporary frame.
1151 __ LeaveInternalFrame(); 1161 }
1152 1162
1153 // Do a tail-call of the compiled function. 1163 // Do a tail-call of the compiled function.
1154 __ Jump(t9); 1164 __ Jump(t9);
1155 } 1165 }
1156 1166
1157 1167
1158 // These functions are called from C++ but cannot be used in live code. 1168 // These functions are called from C++ but cannot be used in live code.
1159 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1169 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1160 __ Abort("Call to unimplemented function in builtins-mips.cc"); 1170 __ Abort("Call to unimplemented function in builtins-mips.cc");
1161 } 1171 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1231 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); 1241 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1232 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); 1242 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1233 __ LoadRoot(a3, Heap::kNullValueRootIndex); 1243 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1234 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); 1244 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1235 1245
1236 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1246 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1237 __ GetObjectType(a2, a3, a3); 1247 __ GetObjectType(a2, a3, a3);
1238 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); 1248 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1239 1249
1240 __ bind(&convert_to_object); 1250 __ bind(&convert_to_object);
1241 __ EnterInternalFrame(); // In order to preserve argument count. 1251 // Enter an internal frame in order to preserve argument count.
1242 __ sll(a0, a0, kSmiTagSize); // Smi tagged. 1252 {
1243 __ push(a0); 1253 FrameScope scope(masm, StackFrame::INTERNAL);
1254 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1255 __ push(a0);
1244 1256
1245 __ push(a2); 1257 __ push(a2);
1246 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1258 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1247 __ mov(a2, v0); 1259 __ mov(a2, v0);
1248 1260
1249 __ pop(a0); 1261 __ pop(a0);
1250 __ sra(a0, a0, kSmiTagSize); // Un-tag. 1262 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1251 __ LeaveInternalFrame(); 1263 // Leave internal frame.
1264 }
1252 // Restore the function to a1. 1265 // Restore the function to a1.
1253 __ sll(at, a0, kPointerSizeLog2); 1266 __ sll(at, a0, kPointerSizeLog2);
1254 __ addu(at, sp, at); 1267 __ addu(at, sp, at);
1255 __ lw(a1, MemOperand(at)); 1268 __ lw(a1, MemOperand(at));
1256 __ Branch(&patch_receiver); 1269 __ Branch(&patch_receiver);
1257 1270
1258 // Use the global receiver object from the called function as the 1271 // Use the global receiver object from the called function as the
1259 // receiver. 1272 // receiver.
1260 __ bind(&use_global_receiver); 1273 __ bind(&use_global_receiver);
1261 const int kGlobalIndex = 1274 const int kGlobalIndex =
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1343 } 1356 }
1344 1357
1345 1358
1346 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1359 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1347 const int kIndexOffset = -5 * kPointerSize; 1360 const int kIndexOffset = -5 * kPointerSize;
1348 const int kLimitOffset = -4 * kPointerSize; 1361 const int kLimitOffset = -4 * kPointerSize;
1349 const int kArgsOffset = 2 * kPointerSize; 1362 const int kArgsOffset = 2 * kPointerSize;
1350 const int kRecvOffset = 3 * kPointerSize; 1363 const int kRecvOffset = 3 * kPointerSize;
1351 const int kFunctionOffset = 4 * kPointerSize; 1364 const int kFunctionOffset = 4 * kPointerSize;
1352 1365
1353 __ EnterInternalFrame(); 1366 {
1367 FrameScope scope(masm, StackFrame::INTERNAL);
1354 1368
1355 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. 1369 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1356 __ push(a0); 1370 __ push(a0);
1357 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. 1371 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1358 __ push(a0); 1372 __ push(a0);
1359 // Returns (in v0) number of arguments to copy to stack as Smi. 1373 // Returns (in v0) number of arguments to copy to stack as Smi.
1360 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 1374 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1361 1375
1362 // Check the stack for overflow. We are not trying need to catch 1376 // Check the stack for overflow. We are not trying need to catch
1363 // interruptions (e.g. debug break and preemption) here, so the "real stack 1377 // interruptions (e.g. debug break and preemption) here, so the "real stack
1364 // limit" is checked. 1378 // limit" is checked.
1365 Label okay; 1379 Label okay;
1366 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 1380 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1367 // Make a2 the space we have left. The stack might already be overflowed 1381 // Make a2 the space we have left. The stack might already be overflowed
1368 // here which will cause a2 to become negative. 1382 // here which will cause a2 to become negative.
1369 __ subu(a2, sp, a2); 1383 __ subu(a2, sp, a2);
1370 // Check if the arguments will overflow the stack. 1384 // Check if the arguments will overflow the stack.
1371 __ sll(t0, v0, kPointerSizeLog2 - kSmiTagSize); 1385 __ sll(t0, v0, kPointerSizeLog2 - kSmiTagSize);
1372 __ Branch(&okay, gt, a2, Operand(t0)); // Signed comparison. 1386 __ Branch(&okay, gt, a2, Operand(t0)); // Signed comparison.
1373 1387
1374 // Out of stack space. 1388 // Out of stack space.
1375 __ lw(a1, MemOperand(fp, kFunctionOffset)); 1389 __ lw(a1, MemOperand(fp, kFunctionOffset));
1376 __ push(a1); 1390 __ push(a1);
1377 __ push(v0); 1391 __ push(v0);
1378 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 1392 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1379 // End of stack check. 1393 // End of stack check.
1380 1394
1381 // Push current limit and index. 1395 // Push current limit and index.
1382 __ bind(&okay); 1396 __ bind(&okay);
1383 __ push(v0); // Limit. 1397 __ push(v0); // Limit.
1384 __ mov(a1, zero_reg); // Initial index. 1398 __ mov(a1, zero_reg); // Initial index.
1385 __ push(a1); 1399 __ push(a1);
1386 1400
1387 // Change context eagerly to get the right global object if necessary. 1401 // Change context eagerly to get the right global object if necessary.
1388 __ lw(a0, MemOperand(fp, kFunctionOffset)); 1402 __ lw(a0, MemOperand(fp, kFunctionOffset));
1389 __ lw(cp, FieldMemOperand(a0, JSFunction::kContextOffset)); 1403 __ lw(cp, FieldMemOperand(a0, JSFunction::kContextOffset));
1390 // Load the shared function info while the function is still in a0. 1404 // Load the shared function info while the function is still in a0.
1391 __ lw(a1, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); 1405 __ lw(a1, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
1392 1406
1393 // Compute the receiver. 1407 // Compute the receiver.
1394 Label call_to_object, use_global_receiver, push_receiver; 1408 Label call_to_object, use_global_receiver, push_receiver;
1395 __ lw(a0, MemOperand(fp, kRecvOffset)); 1409 __ lw(a0, MemOperand(fp, kRecvOffset));
1396 1410
1397 // Do not transform the receiver for strict mode functions. 1411 // Do not transform the receiver for strict mode functions.
1398 __ lw(a2, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset)); 1412 __ lw(a2, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset));
1399 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1413 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1400 kSmiTagSize))); 1414 kSmiTagSize)));
1401 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); 1415 __ Branch(&push_receiver, ne, t0, Operand(zero_reg));
1402 1416
1403 // Do not transform the receiver for native (Compilerhints already in a2). 1417 // Do not transform the receiver for native (Compilerhints already in a2).
1404 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1418 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1405 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); 1419 __ Branch(&push_receiver, ne, t0, Operand(zero_reg));
1406 1420
1407 // Compute the receiver in non-strict mode. 1421 // Compute the receiver in non-strict mode.
1408 __ And(t0, a0, Operand(kSmiTagMask)); 1422 __ And(t0, a0, Operand(kSmiTagMask));
1409 __ Branch(&call_to_object, eq, t0, Operand(zero_reg)); 1423 __ Branch(&call_to_object, eq, t0, Operand(zero_reg));
1410 __ LoadRoot(a1, Heap::kNullValueRootIndex); 1424 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1411 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); 1425 __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1412 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 1426 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1413 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); 1427 __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1414 1428
1415 // Check if the receiver is already a JavaScript object. 1429 // Check if the receiver is already a JavaScript object.
1416 // a0: receiver 1430 // a0: receiver
1417 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1431 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1418 __ GetObjectType(a0, a1, a1); 1432 __ GetObjectType(a0, a1, a1);
1419 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 1433 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1420 1434
1421 // Convert the receiver to a regular object. 1435 // Convert the receiver to a regular object.
1422 // a0: receiver 1436 // a0: receiver
1423 __ bind(&call_to_object); 1437 __ bind(&call_to_object);
1424 __ push(a0); 1438 __ push(a0);
1425 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1439 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1426 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. 1440 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1427 __ Branch(&push_receiver); 1441 __ Branch(&push_receiver);
1428 1442
1429 // Use the current global receiver object as the receiver. 1443 // Use the current global receiver object as the receiver.
1430 __ bind(&use_global_receiver); 1444 __ bind(&use_global_receiver);
1431 const int kGlobalOffset = 1445 const int kGlobalOffset =
1432 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 1446 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1433 __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); 1447 __ lw(a0, FieldMemOperand(cp, kGlobalOffset));
1434 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); 1448 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
1435 __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); 1449 __ lw(a0, FieldMemOperand(a0, kGlobalOffset));
1436 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); 1450 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
1437 1451
1438 // Push the receiver. 1452 // Push the receiver.
1439 // a0: receiver 1453 // a0: receiver
1440 __ bind(&push_receiver); 1454 __ bind(&push_receiver);
1441 __ push(a0); 1455 __ push(a0);
1442 1456
1443 // Copy all arguments from the array to the stack. 1457 // Copy all arguments from the array to the stack.
1444 Label entry, loop; 1458 Label entry, loop;
1445 __ lw(a0, MemOperand(fp, kIndexOffset)); 1459 __ lw(a0, MemOperand(fp, kIndexOffset));
1446 __ Branch(&entry); 1460 __ Branch(&entry);
1447 1461
1448 // Load the current argument from the arguments array and push it to the 1462 // Load the current argument from the arguments array and push it to the
1449 // stack. 1463 // stack.
1450 // a0: current argument index 1464 // a0: current argument index
1451 __ bind(&loop); 1465 __ bind(&loop);
1452 __ lw(a1, MemOperand(fp, kArgsOffset)); 1466 __ lw(a1, MemOperand(fp, kArgsOffset));
1453 __ push(a1); 1467 __ push(a1);
1454 __ push(a0); 1468 __ push(a0);
1455 1469
1456 // Call the runtime to access the property in the arguments array. 1470 // Call the runtime to access the property in the arguments array.
1457 __ CallRuntime(Runtime::kGetProperty, 2); 1471 __ CallRuntime(Runtime::kGetProperty, 2);
1458 __ push(v0); 1472 __ push(v0);
1459 1473
1460 // Use inline caching to access the arguments. 1474 // Use inline caching to access the arguments.
1461 __ lw(a0, MemOperand(fp, kIndexOffset)); 1475 __ lw(a0, MemOperand(fp, kIndexOffset));
1462 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); 1476 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1463 __ sw(a0, MemOperand(fp, kIndexOffset)); 1477 __ sw(a0, MemOperand(fp, kIndexOffset));
1464 1478
1465 // Test if the copy loop has finished copying all the elements from the 1479 // Test if the copy loop has finished copying all the elements from the
1466 // arguments object. 1480 // arguments object.
1467 __ bind(&entry); 1481 __ bind(&entry);
1468 __ lw(a1, MemOperand(fp, kLimitOffset)); 1482 __ lw(a1, MemOperand(fp, kLimitOffset));
1469 __ Branch(&loop, ne, a0, Operand(a1)); 1483 __ Branch(&loop, ne, a0, Operand(a1));
1470 // Invoke the function. 1484 // Invoke the function.
1471 ParameterCount actual(a0); 1485 ParameterCount actual(a0);
1472 __ sra(a0, a0, kSmiTagSize); 1486 __ sra(a0, a0, kSmiTagSize);
1473 __ lw(a1, MemOperand(fp, kFunctionOffset)); 1487 __ lw(a1, MemOperand(fp, kFunctionOffset));
1474 __ InvokeFunction(a1, actual, CALL_FUNCTION, 1488 __ InvokeFunction(a1, actual, CALL_FUNCTION,
1475 NullCallWrapper(), CALL_AS_METHOD); 1489 NullCallWrapper(), CALL_AS_METHOD);
1476 1490
1477 // Tear down the internal frame and remove function, receiver and args. 1491 // Tear down the internal frame and remove function, receiver and args.
1478 __ LeaveInternalFrame(); 1492 }
1479 __ Addu(sp, sp, Operand(3 * kPointerSize)); 1493 __ Addu(sp, sp, Operand(3 * kPointerSize));
1480 __ Ret(); 1494 __ Ret();
1481 } 1495 }
1482 1496
1483 1497
1484 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1498 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1485 __ sll(a0, a0, kSmiTagSize); 1499 __ sll(a0, a0, kSmiTagSize);
1486 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1500 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1487 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); 1501 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1488 __ Addu(fp, sp, Operand(3 * kPointerSize)); 1502 __ Addu(fp, sp, Operand(3 * kPointerSize));
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
1618 __ bind(&dont_adapt_arguments); 1632 __ bind(&dont_adapt_arguments);
1619 __ Jump(a3); 1633 __ Jump(a3);
1620 } 1634 }
1621 1635
1622 1636
1623 #undef __ 1637 #undef __
1624 1638
1625 } } // namespace v8::internal 1639 } } // namespace v8::internal
1626 1640
1627 #endif // V8_TARGET_ARCH_MIPS 1641 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/macro-assembler.h ('k') | src/mips/code-stubs-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698