Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(168)

Side by Side Diff: src/arm/builtins-arm.cc

Issue 7891042: Add asserts to ensure that we: (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « include/v8.h ('k') | src/arm/code-stubs-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
575 __ tst(r3, Operand(kIsNotStringMask)); 575 __ tst(r3, Operand(kIsNotStringMask));
576 __ b(ne, &convert_argument); 576 __ b(ne, &convert_argument);
577 __ mov(argument, r0); 577 __ mov(argument, r0);
578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
579 __ b(&argument_is_string); 579 __ b(&argument_is_string);
580 580
581 // Invoke the conversion builtin and put the result into r2. 581 // Invoke the conversion builtin and put the result into r2.
582 __ bind(&convert_argument); 582 __ bind(&convert_argument);
583 __ push(function); // Preserve the function. 583 __ push(function); // Preserve the function.
584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
585 __ EnterInternalFrame(); 585 {
586 __ push(r0); 586 FrameScope scope(masm, StackFrame::INTERNAL);
587 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 587 __ push(r0);
588 __ LeaveInternalFrame(); 588 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
589 }
589 __ pop(function); 590 __ pop(function);
590 __ mov(argument, r0); 591 __ mov(argument, r0);
591 __ b(&argument_is_string); 592 __ b(&argument_is_string);
592 593
593 // Load the empty string into r2, remove the receiver from the 594 // Load the empty string into r2, remove the receiver from the
594 // stack, and jump back to the case where the argument is a string. 595 // stack, and jump back to the case where the argument is a string.
595 __ bind(&no_arguments); 596 __ bind(&no_arguments);
596 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); 597 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
597 __ Drop(1); 598 __ Drop(1);
598 __ b(&argument_is_string); 599 __ b(&argument_is_string);
599 600
600 // At this point the argument is already a string. Call runtime to 601 // At this point the argument is already a string. Call runtime to
601 // create a string wrapper. 602 // create a string wrapper.
602 __ bind(&gc_required); 603 __ bind(&gc_required);
603 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); 604 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
604 __ EnterInternalFrame(); 605 {
605 __ push(argument); 606 FrameScope scope(masm, StackFrame::INTERNAL);
606 __ CallRuntime(Runtime::kNewStringWrapper, 1); 607 __ push(argument);
607 __ LeaveInternalFrame(); 608 __ CallRuntime(Runtime::kNewStringWrapper, 1);
609 }
608 __ Ret(); 610 __ Ret();
609 } 611 }
610 612
611 613
612 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { 614 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
613 // ----------- S t a t e ------------- 615 // ----------- S t a t e -------------
614 // -- r0 : number of arguments 616 // -- r0 : number of arguments
615 // -- r1 : constructor function 617 // -- r1 : constructor function
616 // -- lr : return address 618 // -- lr : return address
617 // -- sp[...]: constructor arguments 619 // -- sp[...]: constructor arguments
(...skipping 25 matching lines...) Expand all
643 645
644 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 646 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
645 bool is_api_function, 647 bool is_api_function,
646 bool count_constructions) { 648 bool count_constructions) {
647 // Should never count constructions for api objects. 649 // Should never count constructions for api objects.
648 ASSERT(!is_api_function || !count_constructions); 650 ASSERT(!is_api_function || !count_constructions);
649 651
650 Isolate* isolate = masm->isolate(); 652 Isolate* isolate = masm->isolate();
651 653
652 // Enter a construct frame. 654 // Enter a construct frame.
653 __ EnterConstructFrame(); 655 {
654 656 FrameScope scope(masm, StackFrame::CONSTRUCT);
655 // Preserve the two incoming parameters on the stack. 657
656 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 658 // Preserve the two incoming parameters on the stack.
657 __ push(r0); // Smi-tagged arguments count. 659 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
658 __ push(r1); // Constructor function. 660 __ push(r0); // Smi-tagged arguments count.
659 661 __ push(r1); // Constructor function.
660 // Try to allocate the object without transitioning into C code. If any of the 662
661 // preconditions is not met, the code bails out to the runtime call. 663 // Try to allocate the object without transitioning into C code. If any of
662 Label rt_call, allocated; 664 // the preconditions is not met, the code bails out to the runtime call.
663 if (FLAG_inline_new) { 665 Label rt_call, allocated;
664 Label undo_allocation; 666 if (FLAG_inline_new) {
667 Label undo_allocation;
665 #ifdef ENABLE_DEBUGGER_SUPPORT 668 #ifdef ENABLE_DEBUGGER_SUPPORT
666 ExternalReference debug_step_in_fp = 669 ExternalReference debug_step_in_fp =
667 ExternalReference::debug_step_in_fp_address(isolate); 670 ExternalReference::debug_step_in_fp_address(isolate);
668 __ mov(r2, Operand(debug_step_in_fp)); 671 __ mov(r2, Operand(debug_step_in_fp));
669 __ ldr(r2, MemOperand(r2)); 672 __ ldr(r2, MemOperand(r2));
670 __ tst(r2, r2); 673 __ tst(r2, r2);
671 __ b(ne, &rt_call); 674 __ b(ne, &rt_call);
672 #endif 675 #endif
673 676
674 // Load the initial map and verify that it is in fact a map. 677 // Load the initial map and verify that it is in fact a map.
678 // r1: constructor function
679 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
680 __ JumpIfSmi(r2, &rt_call);
681 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
682 __ b(ne, &rt_call);
683
684 // Check that the constructor is not constructing a JSFunction (see
685 // comments in Runtime_NewObject in runtime.cc). In which case the
686 // initial map's instance type would be JS_FUNCTION_TYPE.
687 // r1: constructor function
688 // r2: initial map
689 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
690 __ b(eq, &rt_call);
691
692 if (count_constructions) {
693 Label allocate;
694 // Decrease generous allocation count.
695 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
696 MemOperand constructor_count =
697 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
698 __ ldrb(r4, constructor_count);
699 __ sub(r4, r4, Operand(1), SetCC);
700 __ strb(r4, constructor_count);
701 __ b(ne, &allocate);
702
703 __ Push(r1, r2);
704
705 __ push(r1); // constructor
706 // The call will replace the stub, so the countdown is only done once.
707 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
708
709 __ pop(r2);
710 __ pop(r1);
711
712 __ bind(&allocate);
713 }
714
715 // Now allocate the JSObject on the heap.
716 // r1: constructor function
717 // r2: initial map
718 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
719 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
720
721 // Allocated the JSObject, now initialize the fields. Map is set to
722 // initial map and properties and elements are set to empty fixed array.
723 // r1: constructor function
724 // r2: initial map
725 // r3: object size
726 // r4: JSObject (not tagged)
727 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
728 __ mov(r5, r4);
729 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
730 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
731 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
732 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
733 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
734 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
735
736 // Fill all the in-object properties with the appropriate filler.
737 // r1: constructor function
738 // r2: initial map
739 // r3: object size (in words)
740 // r4: JSObject (not tagged)
741 // r5: First in-object property of JSObject (not tagged)
742 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
743 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
744 { Label loop, entry;
745 if (count_constructions) {
746 // To allow for truncation.
747 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
748 } else {
749 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
750 }
751 __ b(&entry);
752 __ bind(&loop);
753 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
754 __ bind(&entry);
755 __ cmp(r5, r6);
756 __ b(lt, &loop);
757 }
758
759 // Add the object tag to make the JSObject real, so that we can continue
760 // and jump into the continuation code at any time from now on. Any
761 // failures need to undo the allocation, so that the heap is in a
762 // consistent state and verifiable.
763 __ add(r4, r4, Operand(kHeapObjectTag));
764
765 // Check if a non-empty properties array is needed. Continue with
766 // allocated object if not fall through to runtime call if it is.
767 // r1: constructor function
768 // r4: JSObject
769 // r5: start of next object (not tagged)
770 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
771 // The field instance sizes contains both pre-allocated property fields
772 // and in-object properties.
773 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
774 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
775 __ add(r3, r3, Operand(r6));
776 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
777 __ sub(r3, r3, Operand(r6), SetCC);
778
779 // Done if no extra properties are to be allocated.
780 __ b(eq, &allocated);
781 __ Assert(pl, "Property allocation count failed.");
782
783 // Scale the number of elements by pointer size and add the header for
784 // FixedArrays to the start of the next object calculation from above.
785 // r1: constructor
786 // r3: number of elements in properties array
787 // r4: JSObject
788 // r5: start of next object
789 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
790 __ AllocateInNewSpace(
791 r0,
792 r5,
793 r6,
794 r2,
795 &undo_allocation,
796 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
797
798 // Initialize the FixedArray.
799 // r1: constructor
800 // r3: number of elements in properties array
801 // r4: JSObject
802 // r5: FixedArray (not tagged)
803 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
804 __ mov(r2, r5);
805 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
806 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
807 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
808 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
809 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
810
811 // Initialize the fields to undefined.
812 // r1: constructor function
813 // r2: First element of FixedArray (not tagged)
814 // r3: number of elements in properties array
815 // r4: JSObject
816 // r5: FixedArray (not tagged)
817 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
818 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
819 { Label loop, entry;
820 if (count_constructions) {
821 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
822 } else if (FLAG_debug_code) {
823 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
824 __ cmp(r7, r8);
825 __ Assert(eq, "Undefined value not loaded.");
826 }
827 __ b(&entry);
828 __ bind(&loop);
829 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
830 __ bind(&entry);
831 __ cmp(r2, r6);
832 __ b(lt, &loop);
833 }
834
835 // Store the initialized FixedArray into the properties field of
836 // the JSObject
837 // r1: constructor function
838 // r4: JSObject
839 // r5: FixedArray (not tagged)
840 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
841 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
842
843 // Continue with JSObject being successfully allocated
844 // r1: constructor function
845 // r4: JSObject
846 __ jmp(&allocated);
847
848 // Undo the setting of the new top so that the heap is verifiable. For
849 // example, the map's unused properties potentially do not match the
850 // allocated objects unused properties.
851 // r4: JSObject (previous new top)
852 __ bind(&undo_allocation);
853 __ UndoAllocationInNewSpace(r4, r5);
854 }
855
856 // Allocate the new receiver object using the runtime call.
675 // r1: constructor function 857 // r1: constructor function
676 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 858 __ bind(&rt_call);
677 __ JumpIfSmi(r2, &rt_call); 859 __ push(r1); // argument for Runtime_NewObject
678 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 860 __ CallRuntime(Runtime::kNewObject, 1);
679 __ b(ne, &rt_call); 861 __ mov(r4, r0);
680 862
681 // Check that the constructor is not constructing a JSFunction (see comments 863 // Receiver for constructor call allocated.
682 // in Runtime_NewObject in runtime.cc). In which case the initial map's 864 // r4: JSObject
683 // instance type would be JS_FUNCTION_TYPE. 865 __ bind(&allocated);
866 __ push(r4);
867
868 // Push the function and the allocated receiver from the stack.
869 // sp[0]: receiver (newly allocated object)
870 // sp[1]: constructor function
871 // sp[2]: number of arguments (smi-tagged)
872 __ ldr(r1, MemOperand(sp, kPointerSize));
873 __ push(r1); // Constructor function.
874 __ push(r4); // Receiver.
875
876 // Reload the number of arguments from the stack.
684 // r1: constructor function 877 // r1: constructor function
685 // r2: initial map 878 // sp[0]: receiver
686 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); 879 // sp[1]: constructor function
687 __ b(eq, &rt_call); 880 // sp[2]: receiver
688 881 // sp[3]: constructor function
689 if (count_constructions) { 882 // sp[4]: number of arguments (smi-tagged)
690 Label allocate; 883 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
691 // Decrease generous allocation count. 884
692 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 885 // Setup pointer to last argument.
693 MemOperand constructor_count = 886 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
694 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset); 887
695 __ ldrb(r4, constructor_count); 888 // Setup number of arguments for function call below
696 __ sub(r4, r4, Operand(1), SetCC); 889 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
697 __ strb(r4, constructor_count); 890
698 __ b(ne, &allocate); 891 // Copy arguments and receiver to the expression stack.
699 892 // r0: number of arguments
700 __ Push(r1, r2); 893 // r2: address of last argument (caller sp)
701 894 // r1: constructor function
702 __ push(r1); // constructor 895 // r3: number of arguments (smi-tagged)
703 // The call will replace the stub, so the countdown is only done once. 896 // sp[0]: receiver
704 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 897 // sp[1]: constructor function
705 898 // sp[2]: receiver
706 __ pop(r2); 899 // sp[3]: constructor function
707 __ pop(r1); 900 // sp[4]: number of arguments (smi-tagged)
708 901 Label loop, entry;
709 __ bind(&allocate); 902 __ b(&entry);
903 __ bind(&loop);
904 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
905 __ push(ip);
906 __ bind(&entry);
907 __ sub(r3, r3, Operand(2), SetCC);
908 __ b(ge, &loop);
909
910 // Call the function.
911 // r0: number of arguments
912 // r1: constructor function
913 if (is_api_function) {
914 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
915 Handle<Code> code =
916 masm->isolate()->builtins()->HandleApiCallConstruct();
917 ParameterCount expected(0);
918 __ InvokeCode(code, expected, expected,
919 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
920 } else {
921 ParameterCount actual(r0);
922 __ InvokeFunction(r1, actual, CALL_FUNCTION,
923 NullCallWrapper(), CALL_AS_METHOD);
710 } 924 }
711 925
712 // Now allocate the JSObject on the heap. 926 // Pop the function from the stack.
713 // r1: constructor function 927 // sp[0]: constructor function
714 // r2: initial map 928 // sp[2]: receiver
715 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 929 // sp[3]: constructor function
716 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS); 930 // sp[4]: number of arguments (smi-tagged)
717 931 __ pop();
718 // Allocated the JSObject, now initialize the fields. Map is set to initial 932
719 // map and properties and elements are set to empty fixed array. 933 // Restore context from the frame.
720 // r1: constructor function 934 // r0: result
721 // r2: initial map 935 // sp[0]: receiver
722 // r3: object size 936 // sp[1]: constructor function
723 // r4: JSObject (not tagged) 937 // sp[2]: number of arguments (smi-tagged)
724 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 938 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
725 __ mov(r5, r4); 939
726 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 940 // If the result is an object (in the ECMA sense), we should get rid
727 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 941 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
728 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); 942 // on page 74.
729 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 943 Label use_receiver, exit;
730 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); 944
731 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 945 // If the result is a smi, it is *not* an object in the ECMA sense.
732 946 // r0: result
733 // Fill all the in-object properties with the appropriate filler. 947 // sp[0]: receiver (newly allocated object)
734 // r1: constructor function 948 // sp[1]: constructor function
735 // r2: initial map 949 // sp[2]: number of arguments (smi-tagged)
736 // r3: object size (in words) 950 __ JumpIfSmi(r0, &use_receiver);
737 // r4: JSObject (not tagged) 951
738 // r5: First in-object property of JSObject (not tagged) 952 // If the type of the result (stored in its map) is less than
739 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 953 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
740 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); 954 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
741 { Label loop, entry; 955 __ b(ge, &exit);
742 if (count_constructions) { 956
743 // To allow for truncation. 957 // Throw away the result of the constructor invocation and use the
744 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex); 958 // on-stack receiver as the result.
745 } else { 959 __ bind(&use_receiver);
746 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); 960 __ ldr(r0, MemOperand(sp));
747 } 961
748 __ b(&entry); 962 // Remove receiver from the stack, remove caller arguments, and
749 __ bind(&loop); 963 // return.
750 __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); 964 __ bind(&exit);
751 __ bind(&entry); 965 // r0: result
752 __ cmp(r5, r6); 966 // sp[0]: receiver (newly allocated object)
753 __ b(lt, &loop); 967 // sp[1]: constructor function
754 } 968 // sp[2]: number of arguments (smi-tagged)
755 969 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
756 // Add the object tag to make the JSObject real, so that we can continue and 970
757 // jump into the continuation code at any time from now on. Any failures 971 // Leave construct frame.
758 // need to undo the allocation, so that the heap is in a consistent state
759 // and verifiable.
760 __ add(r4, r4, Operand(kHeapObjectTag));
761
762 // Check if a non-empty properties array is needed. Continue with allocated
763 // object if not fall through to runtime call if it is.
764 // r1: constructor function
765 // r4: JSObject
766 // r5: start of next object (not tagged)
767 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
768 // The field instance sizes contains both pre-allocated property fields and
769 // in-object properties.
770 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
771 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
772 __ add(r3, r3, Operand(r6));
773 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
774 __ sub(r3, r3, Operand(r6), SetCC);
775
776 // Done if no extra properties are to be allocated.
777 __ b(eq, &allocated);
778 __ Assert(pl, "Property allocation count failed.");
779
780 // Scale the number of elements by pointer size and add the header for
781 // FixedArrays to the start of the next object calculation from above.
782 // r1: constructor
783 // r3: number of elements in properties array
784 // r4: JSObject
785 // r5: start of next object
786 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
787 __ AllocateInNewSpace(
788 r0,
789 r5,
790 r6,
791 r2,
792 &undo_allocation,
793 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
794
795 // Initialize the FixedArray.
796 // r1: constructor
797 // r3: number of elements in properties array
798 // r4: JSObject
799 // r5: FixedArray (not tagged)
800 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
801 __ mov(r2, r5);
802 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
803 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
804 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
805 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
806 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
807
808 // Initialize the fields to undefined.
809 // r1: constructor function
810 // r2: First element of FixedArray (not tagged)
811 // r3: number of elements in properties array
812 // r4: JSObject
813 // r5: FixedArray (not tagged)
814 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
815 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
816 { Label loop, entry;
817 if (count_constructions) {
818 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
819 } else if (FLAG_debug_code) {
820 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
821 __ cmp(r7, r8);
822 __ Assert(eq, "Undefined value not loaded.");
823 }
824 __ b(&entry);
825 __ bind(&loop);
826 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
827 __ bind(&entry);
828 __ cmp(r2, r6);
829 __ b(lt, &loop);
830 }
831
832 // Store the initialized FixedArray into the properties field of
833 // the JSObject
834 // r1: constructor function
835 // r4: JSObject
836 // r5: FixedArray (not tagged)
837 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
838 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
839
840 // Continue with JSObject being successfully allocated
841 // r1: constructor function
842 // r4: JSObject
843 __ jmp(&allocated);
844
845 // Undo the setting of the new top so that the heap is verifiable. For
846 // example, the map's unused properties potentially do not match the
847 // allocated objects unused properties.
848 // r4: JSObject (previous new top)
849 __ bind(&undo_allocation);
850 __ UndoAllocationInNewSpace(r4, r5);
851 } 972 }
852 973
853 // Allocate the new receiver object using the runtime call.
854 // r1: constructor function
855 __ bind(&rt_call);
856 __ push(r1); // argument for Runtime_NewObject
857 __ CallRuntime(Runtime::kNewObject, 1);
858 __ mov(r4, r0);
859
860 // Receiver for constructor call allocated.
861 // r4: JSObject
862 __ bind(&allocated);
863 __ push(r4);
864
865 // Push the function and the allocated receiver from the stack.
866 // sp[0]: receiver (newly allocated object)
867 // sp[1]: constructor function
868 // sp[2]: number of arguments (smi-tagged)
869 __ ldr(r1, MemOperand(sp, kPointerSize));
870 __ push(r1); // Constructor function.
871 __ push(r4); // Receiver.
872
873 // Reload the number of arguments from the stack.
874 // r1: constructor function
875 // sp[0]: receiver
876 // sp[1]: constructor function
877 // sp[2]: receiver
878 // sp[3]: constructor function
879 // sp[4]: number of arguments (smi-tagged)
880 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
881
882 // Setup pointer to last argument.
883 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
884
885 // Setup number of arguments for function call below
886 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
887
888 // Copy arguments and receiver to the expression stack.
889 // r0: number of arguments
890 // r2: address of last argument (caller sp)
891 // r1: constructor function
892 // r3: number of arguments (smi-tagged)
893 // sp[0]: receiver
894 // sp[1]: constructor function
895 // sp[2]: receiver
896 // sp[3]: constructor function
897 // sp[4]: number of arguments (smi-tagged)
898 Label loop, entry;
899 __ b(&entry);
900 __ bind(&loop);
901 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
902 __ push(ip);
903 __ bind(&entry);
904 __ sub(r3, r3, Operand(2), SetCC);
905 __ b(ge, &loop);
906
907 // Call the function.
908 // r0: number of arguments
909 // r1: constructor function
910 if (is_api_function) {
911 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
912 Handle<Code> code =
913 masm->isolate()->builtins()->HandleApiCallConstruct();
914 ParameterCount expected(0);
915 __ InvokeCode(code, expected, expected,
916 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
917 } else {
918 ParameterCount actual(r0);
919 __ InvokeFunction(r1, actual, CALL_FUNCTION,
920 NullCallWrapper(), CALL_AS_METHOD);
921 }
922
923 // Pop the function from the stack.
924 // sp[0]: constructor function
925 // sp[2]: receiver
926 // sp[3]: constructor function
927 // sp[4]: number of arguments (smi-tagged)
928 __ pop();
929
930 // Restore context from the frame.
931 // r0: result
932 // sp[0]: receiver
933 // sp[1]: constructor function
934 // sp[2]: number of arguments (smi-tagged)
935 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
936
937 // If the result is an object (in the ECMA sense), we should get rid
938 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
939 // on page 74.
940 Label use_receiver, exit;
941
942 // If the result is a smi, it is *not* an object in the ECMA sense.
943 // r0: result
944 // sp[0]: receiver (newly allocated object)
945 // sp[1]: constructor function
946 // sp[2]: number of arguments (smi-tagged)
947 __ JumpIfSmi(r0, &use_receiver);
948
949 // If the type of the result (stored in its map) is less than
950 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
951 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
952 __ b(ge, &exit);
953
954 // Throw away the result of the constructor invocation and use the
955 // on-stack receiver as the result.
956 __ bind(&use_receiver);
957 __ ldr(r0, MemOperand(sp));
958
959 // Remove receiver from the stack, remove caller arguments, and
960 // return.
961 __ bind(&exit);
962 // r0: result
963 // sp[0]: receiver (newly allocated object)
964 // sp[1]: constructor function
965 // sp[2]: number of arguments (smi-tagged)
966 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
967 __ LeaveConstructFrame();
968 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); 974 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
969 __ add(sp, sp, Operand(kPointerSize)); 975 __ add(sp, sp, Operand(kPointerSize));
970 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); 976 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
971 __ Jump(lr); 977 __ Jump(lr);
972 } 978 }
973 979
974 980
975 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 981 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
976 Generate_JSConstructStubHelper(masm, false, true); 982 Generate_JSConstructStubHelper(masm, false, true);
977 } 983 }
(...skipping 12 matching lines...) Expand all
990 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 996 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
991 bool is_construct) { 997 bool is_construct) {
992 // Called from Generate_JS_Entry 998 // Called from Generate_JS_Entry
993 // r0: code entry 999 // r0: code entry
994 // r1: function 1000 // r1: function
995 // r2: receiver 1001 // r2: receiver
996 // r3: argc 1002 // r3: argc
997 // r4: argv 1003 // r4: argv
998 // r5-r7, cp may be clobbered 1004 // r5-r7, cp may be clobbered
999 1005
1000 // Clear the context before we push it when entering the JS frame. 1006 // Clear the context before we push it when entering the internal frame.
1001 __ mov(cp, Operand(0, RelocInfo::NONE)); 1007 __ mov(cp, Operand(0, RelocInfo::NONE));
1002 1008
1003 // Enter an internal frame. 1009 // Enter an internal frame.
1004 __ EnterInternalFrame(); 1010 {
1011 FrameScope scope(masm, StackFrame::INTERNAL);
1005 1012
1006 // Set up the context from the function argument. 1013 // Set up the context from the function argument.
1007 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1014 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1008 1015
1009 // Set up the roots register. 1016 // Set up the roots register.
1010 ExternalReference roots_address = 1017 ExternalReference roots_address =
1011 ExternalReference::roots_address(masm->isolate()); 1018 ExternalReference::roots_address(masm->isolate());
1012 __ mov(r10, Operand(roots_address)); 1019 __ mov(r10, Operand(roots_address));
1013 1020
1014 // Push the function and the receiver onto the stack. 1021 // Push the function and the receiver onto the stack.
1015 __ push(r1); 1022 __ push(r1);
1016 __ push(r2); 1023 __ push(r2);
1017 1024
1018 // Copy arguments to the stack in a loop. 1025 // Copy arguments to the stack in a loop.
1019 // r1: function 1026 // r1: function
1020 // r3: argc 1027 // r3: argc
1021 // r4: argv, i.e. points to first arg 1028 // r4: argv, i.e. points to first arg
1022 Label loop, entry; 1029 Label loop, entry;
1023 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); 1030 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1024 // r2 points past last arg. 1031 // r2 points past last arg.
1025 __ b(&entry); 1032 __ b(&entry);
1026 __ bind(&loop); 1033 __ bind(&loop);
1027 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter 1034 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1028 __ ldr(r0, MemOperand(r0)); // dereference handle 1035 __ ldr(r0, MemOperand(r0)); // dereference handle
1029 __ push(r0); // push parameter 1036 __ push(r0); // push parameter
1030 __ bind(&entry); 1037 __ bind(&entry);
1031 __ cmp(r4, r2); 1038 __ cmp(r4, r2);
1032 __ b(ne, &loop); 1039 __ b(ne, &loop);
1033 1040
1034 // Initialize all JavaScript callee-saved registers, since they will be seen 1041 // Initialize all JavaScript callee-saved registers, since they will be seen
1035 // by the garbage collector as part of handlers. 1042 // by the garbage collector as part of handlers.
1036 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 1043 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1037 __ mov(r5, Operand(r4)); 1044 __ mov(r5, Operand(r4));
1038 __ mov(r6, Operand(r4)); 1045 __ mov(r6, Operand(r4));
1039 __ mov(r7, Operand(r4)); 1046 __ mov(r7, Operand(r4));
1040 if (kR9Available == 1) { 1047 if (kR9Available == 1) {
1041 __ mov(r9, Operand(r4)); 1048 __ mov(r9, Operand(r4));
1049 }
1050
1051 // Invoke the code and pass argc as r0.
1052 __ mov(r0, Operand(r3));
1053 if (is_construct) {
1054 __ Call(masm->isolate()->builtins()->JSConstructCall());
1055 } else {
1056 ParameterCount actual(r0);
1057 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1058 NullCallWrapper(), CALL_AS_METHOD);
1059 }
1060 // Exit the JS frame and remove the parameters (except function), and
1061 // return.
1062 // Respect ABI stack constraint.
1042 } 1063 }
1043
1044 // Invoke the code and pass argc as r0.
1045 __ mov(r0, Operand(r3));
1046 if (is_construct) {
1047 __ Call(masm->isolate()->builtins()->JSConstructCall());
1048 } else {
1049 ParameterCount actual(r0);
1050 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1051 NullCallWrapper(), CALL_AS_METHOD);
1052 }
1053
1054 // Exit the JS frame and remove the parameters (except function), and return.
1055 // Respect ABI stack constraint.
1056 __ LeaveInternalFrame();
1057 __ Jump(lr); 1064 __ Jump(lr);
1058 1065
1059 // r0: result 1066 // r0: result
1060 } 1067 }
1061 1068
1062 1069
1063 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1070 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1064 Generate_JSEntryTrampolineHelper(masm, false); 1071 Generate_JSEntryTrampolineHelper(masm, false);
1065 } 1072 }
1066 1073
1067 1074
1068 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 1075 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1069 Generate_JSEntryTrampolineHelper(masm, true); 1076 Generate_JSEntryTrampolineHelper(masm, true);
1070 } 1077 }
1071 1078
1072 1079
1073 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 1080 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1074 // Enter an internal frame. 1081 // Enter an internal frame.
1075 __ EnterInternalFrame(); 1082 {
1083 FrameScope scope(masm, StackFrame::INTERNAL);
1076 1084
1077 // Preserve the function. 1085 // Preserve the function.
1078 __ push(r1); 1086 __ push(r1);
1079 // Push call kind information. 1087 // Push call kind information.
1080 __ push(r5); 1088 __ push(r5);
1081 1089
1082 // Push the function on the stack as the argument to the runtime function. 1090 // Push the function on the stack as the argument to the runtime function.
1083 __ push(r1); 1091 __ push(r1);
1084 __ CallRuntime(Runtime::kLazyCompile, 1); 1092 __ CallRuntime(Runtime::kLazyCompile, 1);
1085 // Calculate the entry point. 1093 // Calculate the entry point.
1086 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1094 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1087 1095
1088 // Restore call kind information. 1096 // Restore call kind information.
1089 __ pop(r5); 1097 __ pop(r5);
1090 // Restore saved function. 1098 // Restore saved function.
1091 __ pop(r1); 1099 __ pop(r1);
1092 1100
1093 // Tear down temporary frame. 1101 // Tear down internal frame.
1094 __ LeaveInternalFrame(); 1102 }
1095 1103
1096 // Do a tail-call of the compiled function. 1104 // Do a tail-call of the compiled function.
1097 __ Jump(r2); 1105 __ Jump(r2);
1098 } 1106 }
1099 1107
1100 1108
1101 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 1109 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1102 // Enter an internal frame. 1110 // Enter an internal frame.
1103 __ EnterInternalFrame(); 1111 {
1112 FrameScope scope(masm, StackFrame::INTERNAL);
1104 1113
1105 // Preserve the function. 1114 // Preserve the function.
1106 __ push(r1); 1115 __ push(r1);
1107 // Push call kind information. 1116 // Push call kind information.
1108 __ push(r5); 1117 __ push(r5);
1109 1118
1110 // Push the function on the stack as the argument to the runtime function. 1119 // Push the function on the stack as the argument to the runtime function.
1111 __ push(r1); 1120 __ push(r1);
1112 __ CallRuntime(Runtime::kLazyRecompile, 1); 1121 __ CallRuntime(Runtime::kLazyRecompile, 1);
1113 // Calculate the entry point. 1122 // Calculate the entry point.
1114 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1123 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1115 1124
1116 // Restore call kind information. 1125 // Restore call kind information.
1117 __ pop(r5); 1126 __ pop(r5);
1118 // Restore saved function. 1127 // Restore saved function.
1119 __ pop(r1); 1128 __ pop(r1);
1120 1129
1121 // Tear down temporary frame. 1130 // Tear down internal frame.
1122 __ LeaveInternalFrame(); 1131 }
1123 1132
1124 // Do a tail-call of the compiled function. 1133 // Do a tail-call of the compiled function.
1125 __ Jump(r2); 1134 __ Jump(r2);
1126 } 1135 }
1127 1136
1128 1137
1129 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1138 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1130 Deoptimizer::BailoutType type) { 1139 Deoptimizer::BailoutType type) {
1131 __ EnterInternalFrame(); 1140 {
1132 // Pass the function and deoptimization type to the runtime system. 1141 FrameScope scope(masm, StackFrame::INTERNAL);
1133 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); 1142 // Pass the function and deoptimization type to the runtime system.
1134 __ push(r0); 1143 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1135 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 1144 __ push(r0);
1136 __ LeaveInternalFrame(); 1145 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1146 }
1137 1147
1138 // Get the full codegen state from the stack and untag it -> r6. 1148 // Get the full codegen state from the stack and untag it -> r6.
1139 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); 1149 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1140 __ SmiUntag(r6); 1150 __ SmiUntag(r6);
1141 // Switch on the state. 1151 // Switch on the state.
1142 Label with_tos_register, unknown_state; 1152 Label with_tos_register, unknown_state;
1143 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); 1153 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1144 __ b(ne, &with_tos_register); 1154 __ b(ne, &with_tos_register);
1145 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. 1155 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1146 __ Ret(); 1156 __ Ret();
(...skipping 19 matching lines...) Expand all
1166 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1176 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1167 } 1177 }
1168 1178
1169 1179
1170 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 1180 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1171 // For now, we are relying on the fact that Runtime::NotifyOSR 1181 // For now, we are relying on the fact that Runtime::NotifyOSR
1172 // doesn't do any garbage collection which allows us to save/restore 1182 // doesn't do any garbage collection which allows us to save/restore
1173 // the registers without worrying about which of them contain 1183 // the registers without worrying about which of them contain
1174 // pointers. This seems a bit fragile. 1184 // pointers. This seems a bit fragile.
1175 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1185 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1176 __ EnterInternalFrame(); 1186 {
1177 __ CallRuntime(Runtime::kNotifyOSR, 0); 1187 FrameScope scope(masm, StackFrame::INTERNAL);
1178 __ LeaveInternalFrame(); 1188 __ CallRuntime(Runtime::kNotifyOSR, 0);
1189 }
1179 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1190 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1180 __ Ret(); 1191 __ Ret();
1181 } 1192 }
1182 1193
1183 1194
1184 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1195 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1185 CpuFeatures::TryForceFeatureScope scope(VFP3); 1196 CpuFeatures::TryForceFeatureScope scope(VFP3);
1186 if (!CpuFeatures::IsSupported(VFP3)) { 1197 if (!CpuFeatures::IsSupported(VFP3)) {
1187 __ Abort("Unreachable code: Cannot optimize without VFP3 support."); 1198 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1188 return; 1199 return;
1189 } 1200 }
1190 1201
1191 // Lookup the function in the JavaScript frame and push it as an 1202 // Lookup the function in the JavaScript frame and push it as an
1192 // argument to the on-stack replacement function. 1203 // argument to the on-stack replacement function.
1193 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1204 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1194 __ EnterInternalFrame(); 1205 {
1195 __ push(r0); 1206 FrameScope scope(masm, StackFrame::INTERNAL);
1196 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1207 __ push(r0);
1197 __ LeaveInternalFrame(); 1208 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1209 }
1198 1210
1199 // If the result was -1 it means that we couldn't optimize the 1211 // If the result was -1 it means that we couldn't optimize the
1200 // function. Just return and continue in the unoptimized version. 1212 // function. Just return and continue in the unoptimized version.
1201 Label skip; 1213 Label skip;
1202 __ cmp(r0, Operand(Smi::FromInt(-1))); 1214 __ cmp(r0, Operand(Smi::FromInt(-1)));
1203 __ b(ne, &skip); 1215 __ b(ne, &skip);
1204 __ Ret(); 1216 __ Ret();
1205 1217
1206 __ bind(&skip); 1218 __ bind(&skip);
1207 // Untag the AST id and push it on the stack. 1219 // Untag the AST id and push it on the stack.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1269 __ b(eq, &use_global_receiver); 1281 __ b(eq, &use_global_receiver);
1270 __ LoadRoot(r3, Heap::kNullValueRootIndex); 1282 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1271 __ cmp(r2, r3); 1283 __ cmp(r2, r3);
1272 __ b(eq, &use_global_receiver); 1284 __ b(eq, &use_global_receiver);
1273 1285
1274 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1286 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1275 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1287 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1276 __ b(ge, &shift_arguments); 1288 __ b(ge, &shift_arguments);
1277 1289
1278 __ bind(&convert_to_object); 1290 __ bind(&convert_to_object);
1279 __ EnterInternalFrame(); // In order to preserve argument count.
1280 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1281 __ push(r0);
1282 1291
1283 __ push(r2); 1292 {
1284 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1293 // Enter an internal frame in order to preserve argument count.
1285 __ mov(r2, r0); 1294 FrameScope scope(masm, StackFrame::INTERNAL);
1295 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1296 __ push(r0);
1286 1297
1287 __ pop(r0); 1298 __ push(r2);
1288 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1299 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1289 __ LeaveInternalFrame(); 1300 __ mov(r2, r0);
1301
1302 __ pop(r0);
1303 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1304
1305 // Exit the internal frame.
1306 }
1307
1290 // Restore the function to r1, and the flag to r4. 1308 // Restore the function to r1, and the flag to r4.
1291 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1309 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1292 __ mov(r4, Operand(0, RelocInfo::NONE)); 1310 __ mov(r4, Operand(0, RelocInfo::NONE));
1293 __ jmp(&patch_receiver); 1311 __ jmp(&patch_receiver);
1294 1312
1295 // Use the global receiver object from the called function as the 1313 // Use the global receiver object from the called function as the
1296 // receiver. 1314 // receiver.
1297 __ bind(&use_global_receiver); 1315 __ bind(&use_global_receiver);
1298 const int kGlobalIndex = 1316 const int kGlobalIndex =
1299 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 1317 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1399 } 1417 }
1400 1418
1401 1419
1402 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1420 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1403 const int kIndexOffset = -5 * kPointerSize; 1421 const int kIndexOffset = -5 * kPointerSize;
1404 const int kLimitOffset = -4 * kPointerSize; 1422 const int kLimitOffset = -4 * kPointerSize;
1405 const int kArgsOffset = 2 * kPointerSize; 1423 const int kArgsOffset = 2 * kPointerSize;
1406 const int kRecvOffset = 3 * kPointerSize; 1424 const int kRecvOffset = 3 * kPointerSize;
1407 const int kFunctionOffset = 4 * kPointerSize; 1425 const int kFunctionOffset = 4 * kPointerSize;
1408 1426
1409 __ EnterInternalFrame(); 1427 {
1428 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1410 1429
1411 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function 1430 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1412 __ push(r0); 1431 __ push(r0);
1413 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array 1432 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1414 __ push(r0); 1433 __ push(r0);
1415 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 1434 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1416 1435
1417 // Check the stack for overflow. We are not trying to catch 1436 // Check the stack for overflow. We are not trying to catch
1418 // interruptions (e.g. debug break and preemption) here, so the "real stack 1437 // interruptions (e.g. debug break and preemption) here, so the "real stack
1419 // limit" is checked. 1438 // limit" is checked.
1420 Label okay; 1439 Label okay;
1421 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 1440 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1422 // Make r2 the space we have left. The stack might already be overflowed 1441 // Make r2 the space we have left. The stack might already be overflowed
1423 // here which will cause r2 to become negative. 1442 // here which will cause r2 to become negative.
1424 __ sub(r2, sp, r2); 1443 __ sub(r2, sp, r2);
1425 // Check if the arguments will overflow the stack. 1444 // Check if the arguments will overflow the stack.
1426 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1445 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1427 __ b(gt, &okay); // Signed comparison. 1446 __ b(gt, &okay); // Signed comparison.
1428 1447
1429 // Out of stack space. 1448 // Out of stack space.
1430 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1449 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1431 __ push(r1); 1450 __ push(r1);
1432 __ push(r0); 1451 __ push(r0);
1433 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 1452 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1434 // End of stack check. 1453 // End of stack check.
1435 1454
1436 // Push current limit and index. 1455 // Push current limit and index.
1437 __ bind(&okay); 1456 __ bind(&okay);
1438 __ push(r0); // limit 1457 __ push(r0); // limit
1439 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index 1458 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1440 __ push(r1); 1459 __ push(r1);
1441 1460
1442 // Get the receiver. 1461 // Get the receiver.
1443 __ ldr(r0, MemOperand(fp, kRecvOffset)); 1462 __ ldr(r0, MemOperand(fp, kRecvOffset));
1444 1463
1445 // Check that the function is a JS function (otherwise it must be a proxy). 1464 // Check that the function is a JS function (otherwise it must be a proxy).
1446 Label push_receiver; 1465 Label push_receiver;
1447 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1466 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1448 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1467 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1449 __ b(ne, &push_receiver); 1468 __ b(ne, &push_receiver);
1450 1469
1451 // Change context eagerly to get the right global object if necessary. 1470 // Change context eagerly to get the right global object if necessary.
1452 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1471 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1453 // Load the shared function info while the function is still in r1. 1472 // Load the shared function info while the function is still in r1.
1454 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1473 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1455 1474
1456 // Compute the receiver. 1475 // Compute the receiver.
1457 // Do not transform the receiver for strict mode functions. 1476 // Do not transform the receiver for strict mode functions.
1458 Label call_to_object, use_global_receiver; 1477 Label call_to_object, use_global_receiver;
1459 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); 1478 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1460 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1479 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1461 kSmiTagSize))); 1480 kSmiTagSize)));
1462 __ b(ne, &push_receiver); 1481 __ b(ne, &push_receiver);
1463 1482
1464 // Do not transform the receiver for strict mode functions. 1483 // Do not transform the receiver for strict mode functions.
1465 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1484 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1466 __ b(ne, &push_receiver); 1485 __ b(ne, &push_receiver);
1467 1486
1468 // Compute the receiver in non-strict mode. 1487 // Compute the receiver in non-strict mode.
1469 __ JumpIfSmi(r0, &call_to_object); 1488 __ JumpIfSmi(r0, &call_to_object);
1470 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1489 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1471 __ cmp(r0, r1); 1490 __ cmp(r0, r1);
1472 __ b(eq, &use_global_receiver); 1491 __ b(eq, &use_global_receiver);
1473 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 1492 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1474 __ cmp(r0, r1); 1493 __ cmp(r0, r1);
1475 __ b(eq, &use_global_receiver); 1494 __ b(eq, &use_global_receiver);
1476 1495
1477 // Check if the receiver is already a JavaScript object. 1496 // Check if the receiver is already a JavaScript object.
1478 // r0: receiver 1497 // r0: receiver
1479 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1498 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1480 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1499 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1481 __ b(ge, &push_receiver); 1500 __ b(ge, &push_receiver);
1482 1501
1483 // Convert the receiver to a regular object. 1502 // Convert the receiver to a regular object.
1484 // r0: receiver 1503 // r0: receiver
1485 __ bind(&call_to_object); 1504 __ bind(&call_to_object);
1486 __ push(r0); 1505 __ push(r0);
1487 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1506 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1488 __ b(&push_receiver); 1507 __ b(&push_receiver);
1489 1508
1490 // Use the current global receiver object as the receiver. 1509 // Use the current global receiver object as the receiver.
1491 __ bind(&use_global_receiver); 1510 __ bind(&use_global_receiver);
1492 const int kGlobalOffset = 1511 const int kGlobalOffset =
1493 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 1512 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1494 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset)); 1513 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1495 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset)); 1514 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1496 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset)); 1515 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1497 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); 1516 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1498 1517
1499 // Push the receiver. 1518 // Push the receiver.
1500 // r0: receiver 1519 // r0: receiver
1501 __ bind(&push_receiver); 1520 __ bind(&push_receiver);
1502 __ push(r0); 1521 __ push(r0);
1503 1522
1504 // Copy all arguments from the array to the stack. 1523 // Copy all arguments from the array to the stack.
1505 Label entry, loop; 1524 Label entry, loop;
1506 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1525 __ ldr(r0, MemOperand(fp, kIndexOffset));
1507 __ b(&entry); 1526 __ b(&entry);
1508 1527
1509 // Load the current argument from the arguments array and push it to the 1528 // Load the current argument from the arguments array and push it to the
1510 // stack. 1529 // stack.
1511 // r0: current argument index 1530 // r0: current argument index
1512 __ bind(&loop); 1531 __ bind(&loop);
1513 __ ldr(r1, MemOperand(fp, kArgsOffset)); 1532 __ ldr(r1, MemOperand(fp, kArgsOffset));
1514 __ push(r1); 1533 __ push(r1);
1515 __ push(r0); 1534 __ push(r0);
1516 1535
1517 // Call the runtime to access the property in the arguments array. 1536 // Call the runtime to access the property in the arguments array.
1518 __ CallRuntime(Runtime::kGetProperty, 2); 1537 __ CallRuntime(Runtime::kGetProperty, 2);
1519 __ push(r0); 1538 __ push(r0);
1520 1539
1521 // Use inline caching to access the arguments. 1540 // Use inline caching to access the arguments.
1522 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1541 __ ldr(r0, MemOperand(fp, kIndexOffset));
1523 __ add(r0, r0, Operand(1 << kSmiTagSize)); 1542 __ add(r0, r0, Operand(1 << kSmiTagSize));
1524 __ str(r0, MemOperand(fp, kIndexOffset)); 1543 __ str(r0, MemOperand(fp, kIndexOffset));
1525 1544
1526 // Test if the copy loop has finished copying all the elements from the 1545 // Test if the copy loop has finished copying all the elements from the
1527 // arguments object. 1546 // arguments object.
1528 __ bind(&entry); 1547 __ bind(&entry);
1529 __ ldr(r1, MemOperand(fp, kLimitOffset)); 1548 __ ldr(r1, MemOperand(fp, kLimitOffset));
1530 __ cmp(r0, r1); 1549 __ cmp(r0, r1);
1531 __ b(ne, &loop); 1550 __ b(ne, &loop);
1532 1551
1533 // Invoke the function. 1552 // Invoke the function.
1534 Label call_proxy; 1553 Label call_proxy;
1535 ParameterCount actual(r0); 1554 ParameterCount actual(r0);
1536 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1555 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1537 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1556 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1538 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1557 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1539 __ b(ne, &call_proxy); 1558 __ b(ne, &call_proxy);
1540 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1559 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1541 NullCallWrapper(), CALL_AS_METHOD); 1560 NullCallWrapper(), CALL_AS_METHOD);
1542 1561
1543 // Tear down the internal frame and remove function, receiver and args. 1562 frame_scope.GenerateLeaveFrame();
1544 __ LeaveInternalFrame(); 1563 __ add(sp, sp, Operand(3 * kPointerSize));
1545 __ add(sp, sp, Operand(3 * kPointerSize)); 1564 __ Jump(lr);
1546 __ Jump(lr);
1547 1565
1548 // Invoke the function proxy. 1566 // Invoke the function proxy.
1549 __ bind(&call_proxy); 1567 __ bind(&call_proxy);
1550 __ push(r1); // add function proxy as last argument 1568 __ push(r1); // add function proxy as last argument
1551 __ add(r0, r0, Operand(1)); 1569 __ add(r0, r0, Operand(1));
1552 __ mov(r2, Operand(0, RelocInfo::NONE)); 1570 __ mov(r2, Operand(0, RelocInfo::NONE));
1553 __ SetCallKind(r5, CALL_AS_METHOD); 1571 __ SetCallKind(r5, CALL_AS_METHOD);
1554 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY); 1572 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1555 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1573 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1556 RelocInfo::CODE_TARGET); 1574 RelocInfo::CODE_TARGET);
1557 1575
1558 __ LeaveInternalFrame(); 1576 // Tear down the internal frame and remove function, receiver and args.
1577 }
1559 __ add(sp, sp, Operand(3 * kPointerSize)); 1578 __ add(sp, sp, Operand(3 * kPointerSize));
1560 __ Jump(lr); 1579 __ Jump(lr);
1561 } 1580 }
1562 1581
1563 1582
1564 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1583 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1565 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 1584 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1566 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1585 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1567 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); 1586 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1568 __ add(fp, sp, Operand(3 * kPointerSize)); 1587 __ add(fp, sp, Operand(3 * kPointerSize));
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
1686 __ bind(&dont_adapt_arguments); 1705 __ bind(&dont_adapt_arguments);
1687 __ Jump(r3); 1706 __ Jump(r3);
1688 } 1707 }
1689 1708
1690 1709
1691 #undef __ 1710 #undef __
1692 1711
1693 } } // namespace v8::internal 1712 } } // namespace v8::internal
1694 1713
1695 #endif // V8_TARGET_ARCH_ARM 1714 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « include/v8.h ('k') | src/arm/code-stubs-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698