Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(92)

Side by Side Diff: src/arm/builtins-arm.cc

Issue 7050039: Revert 8122 (stub call asserts) while test failures are investigated. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « include/v8.h ('k') | src/arm/code-stubs-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
575 __ tst(r3, Operand(kIsNotStringMask)); 575 __ tst(r3, Operand(kIsNotStringMask));
576 __ b(ne, &convert_argument); 576 __ b(ne, &convert_argument);
577 __ mov(argument, r0); 577 __ mov(argument, r0);
578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
579 __ b(&argument_is_string); 579 __ b(&argument_is_string);
580 580
581 // Invoke the conversion builtin and put the result into r2. 581 // Invoke the conversion builtin and put the result into r2.
582 __ bind(&convert_argument); 582 __ bind(&convert_argument);
583 __ push(function); // Preserve the function. 583 __ push(function); // Preserve the function.
584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
585 { 585 __ EnterInternalFrame();
586 FrameScope scope(masm, StackFrame::INTERNAL); 586 __ push(r0);
587 __ push(r0); 587 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
588 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 588 __ LeaveInternalFrame();
589 }
590 __ pop(function); 589 __ pop(function);
591 __ mov(argument, r0); 590 __ mov(argument, r0);
592 __ b(&argument_is_string); 591 __ b(&argument_is_string);
593 592
594 // Load the empty string into r2, remove the receiver from the 593 // Load the empty string into r2, remove the receiver from the
595 // stack, and jump back to the case where the argument is a string. 594 // stack, and jump back to the case where the argument is a string.
596 __ bind(&no_arguments); 595 __ bind(&no_arguments);
597 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); 596 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
598 __ Drop(1); 597 __ Drop(1);
599 __ b(&argument_is_string); 598 __ b(&argument_is_string);
600 599
601 // At this point the argument is already a string. Call runtime to 600 // At this point the argument is already a string. Call runtime to
602 // create a string wrapper. 601 // create a string wrapper.
603 __ bind(&gc_required); 602 __ bind(&gc_required);
604 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); 603 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
605 { 604 __ EnterInternalFrame();
606 FrameScope scope(masm, StackFrame::INTERNAL); 605 __ push(argument);
607 __ push(argument); 606 __ CallRuntime(Runtime::kNewStringWrapper, 1);
608 __ CallRuntime(Runtime::kNewStringWrapper, 1); 607 __ LeaveInternalFrame();
609 }
610 __ Ret(); 608 __ Ret();
611 } 609 }
612 610
613 611
614 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { 612 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
615 // ----------- S t a t e ------------- 613 // ----------- S t a t e -------------
616 // -- r0 : number of arguments 614 // -- r0 : number of arguments
617 // -- r1 : constructor function 615 // -- r1 : constructor function
618 // -- lr : return address 616 // -- lr : return address
619 // -- sp[...]: constructor arguments 617 // -- sp[...]: constructor arguments
(...skipping 26 matching lines...) Expand all
646 644
647 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 645 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
648 bool is_api_function, 646 bool is_api_function,
649 bool count_constructions) { 647 bool count_constructions) {
650 // Should never count constructions for api objects. 648 // Should never count constructions for api objects.
651 ASSERT(!is_api_function || !count_constructions); 649 ASSERT(!is_api_function || !count_constructions);
652 650
653 Isolate* isolate = masm->isolate(); 651 Isolate* isolate = masm->isolate();
654 652
655 // Enter a construct frame. 653 // Enter a construct frame.
656 { 654 __ EnterConstructFrame();
657 FrameScope scope(masm, StackFrame::CONSTRUCT); 655
658 656 // Preserve the two incoming parameters on the stack.
659 // Preserve the two incoming parameters on the stack. 657 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
660 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 658 __ push(r0); // Smi-tagged arguments count.
661 __ push(r0); // Smi-tagged arguments count. 659 __ push(r1); // Constructor function.
662 __ push(r1); // Constructor function. 660
663 661 // Try to allocate the object without transitioning into C code. If any of the
664 // Try to allocate the object without transitioning into C code. If any of 662 // preconditions is not met, the code bails out to the runtime call.
665 // the preconditions is not met, the code bails out to the runtime call. 663 Label rt_call, allocated;
666 Label rt_call, allocated; 664 if (FLAG_inline_new) {
667 if (FLAG_inline_new) { 665 Label undo_allocation;
668 Label undo_allocation;
669 #ifdef ENABLE_DEBUGGER_SUPPORT 666 #ifdef ENABLE_DEBUGGER_SUPPORT
670 ExternalReference debug_step_in_fp = 667 ExternalReference debug_step_in_fp =
671 ExternalReference::debug_step_in_fp_address(isolate); 668 ExternalReference::debug_step_in_fp_address(isolate);
672 __ mov(r2, Operand(debug_step_in_fp)); 669 __ mov(r2, Operand(debug_step_in_fp));
673 __ ldr(r2, MemOperand(r2)); 670 __ ldr(r2, MemOperand(r2));
674 __ tst(r2, r2); 671 __ tst(r2, r2);
675 __ b(ne, &rt_call); 672 __ b(ne, &rt_call);
676 #endif 673 #endif
677 674
678 // Load the initial map and verify that it is in fact a map. 675 // Load the initial map and verify that it is in fact a map.
679 // r1: constructor function 676 // r1: constructor function
680 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 677 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
681 __ tst(r2, Operand(kSmiTagMask)); 678 __ tst(r2, Operand(kSmiTagMask));
682 __ b(eq, &rt_call); 679 __ b(eq, &rt_call);
683 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 680 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
684 __ b(ne, &rt_call); 681 __ b(ne, &rt_call);
685 682
686 // Check that the constructor is not constructing a JSFunction (see 683 // Check that the constructor is not constructing a JSFunction (see comments
687 // comments in Runtime_NewObject in runtime.cc). In which case the 684 // in Runtime_NewObject in runtime.cc). In which case the initial map's
688 // initial map's instance type would be JS_FUNCTION_TYPE. 685 // instance type would be JS_FUNCTION_TYPE.
689 // r1: constructor function 686 // r1: constructor function
690 // r2: initial map 687 // r2: initial map
691 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); 688 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
692 __ b(eq, &rt_call); 689 __ b(eq, &rt_call);
693 690
691 if (count_constructions) {
692 Label allocate;
693 // Decrease generous allocation count.
694 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
695 MemOperand constructor_count =
696 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
697 __ ldrb(r4, constructor_count);
698 __ sub(r4, r4, Operand(1), SetCC);
699 __ strb(r4, constructor_count);
700 __ b(ne, &allocate);
701
702 __ Push(r1, r2);
703
704 __ push(r1); // constructor
705 // The call will replace the stub, so the countdown is only done once.
706 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
707
708 __ pop(r2);
709 __ pop(r1);
710
711 __ bind(&allocate);
712 }
713
714 // Now allocate the JSObject on the heap.
715 // r1: constructor function
716 // r2: initial map
717 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
718 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
719
720 // Allocated the JSObject, now initialize the fields. Map is set to initial
721 // map and properties and elements are set to empty fixed array.
722 // r1: constructor function
723 // r2: initial map
724 // r3: object size
725 // r4: JSObject (not tagged)
726 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
727 __ mov(r5, r4);
728 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
729 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
730 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
731 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
732 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
733 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
734
735 // Fill all the in-object properties with the appropriate filler.
736 // r1: constructor function
737 // r2: initial map
738 // r3: object size (in words)
739 // r4: JSObject (not tagged)
740 // r5: First in-object property of JSObject (not tagged)
741 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
742 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
743 { Label loop, entry;
694 if (count_constructions) { 744 if (count_constructions) {
695 Label allocate; 745 // To allow for truncation.
696 // Decrease generous allocation count. 746 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
697 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 747 } else {
698 MemOperand constructor_count = 748 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
699 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
700 __ ldrb(r4, constructor_count);
701 __ sub(r4, r4, Operand(1), SetCC);
702 __ strb(r4, constructor_count);
703 __ b(ne, &allocate);
704
705 __ Push(r1, r2);
706
707 __ push(r1); // constructor
708 // The call will replace the stub, so the countdown is only done once.
709 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
710
711 __ pop(r2);
712 __ pop(r1);
713
714 __ bind(&allocate);
715 } 749 }
716 750 __ b(&entry);
717 // Now allocate the JSObject on the heap. 751 __ bind(&loop);
718 // r1: constructor function 752 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
719 // r2: initial map 753 __ bind(&entry);
720 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 754 __ cmp(r5, r6);
721 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS); 755 __ b(lt, &loop);
722 756 }
723 // Allocated the JSObject, now initialize the fields. Map is set to 757
724 // initial map and properties and elements are set to empty fixed array. 758 // Add the object tag to make the JSObject real, so that we can continue and
725 // r1: constructor function 759 // jump into the continuation code at any time from now on. Any failures
726 // r2: initial map 760 // need to undo the allocation, so that the heap is in a consistent state
727 // r3: object size 761 // and verifiable.
728 // r4: JSObject (not tagged) 762 __ add(r4, r4, Operand(kHeapObjectTag));
729 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 763
730 __ mov(r5, r4); 764 // Check if a non-empty properties array is needed. Continue with allocated
731 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 765 // object if not fall through to runtime call if it is.
732 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 766 // r1: constructor function
733 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); 767 // r4: JSObject
734 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 768 // r5: start of next object (not tagged)
735 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); 769 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
736 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 770 // The field instance sizes contains both pre-allocated property fields and
737 771 // in-object properties.
738 // Fill all the in-object properties with the appropriate filler. 772 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
739 // r1: constructor function 773 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
740 // r2: initial map 774 __ add(r3, r3, Operand(r6));
741 // r3: object size (in words) 775 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
742 // r4: JSObject (not tagged) 776 __ sub(r3, r3, Operand(r6), SetCC);
743 // r5: First in-object property of JSObject (not tagged) 777
744 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 778 // Done if no extra properties are to be allocated.
745 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); 779 __ b(eq, &allocated);
746 { Label loop, entry; 780 __ Assert(pl, "Property allocation count failed.");
747 if (count_constructions) { 781
748 // To allow for truncation. 782 // Scale the number of elements by pointer size and add the header for
749 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex); 783 // FixedArrays to the start of the next object calculation from above.
750 } else { 784 // r1: constructor
751 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); 785 // r3: number of elements in properties array
752 } 786 // r4: JSObject
753 __ b(&entry); 787 // r5: start of next object
754 __ bind(&loop); 788 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
755 __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); 789 __ AllocateInNewSpace(
756 __ bind(&entry); 790 r0,
757 __ cmp(r5, r6); 791 r5,
758 __ b(lt, &loop); 792 r6,
793 r2,
794 &undo_allocation,
795 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
796
797 // Initialize the FixedArray.
798 // r1: constructor
799 // r3: number of elements in properties array
800 // r4: JSObject
801 // r5: FixedArray (not tagged)
802 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
803 __ mov(r2, r5);
804 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
805 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
806 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
807 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
808 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
809
810 // Initialize the fields to undefined.
811 // r1: constructor function
812 // r2: First element of FixedArray (not tagged)
813 // r3: number of elements in properties array
814 // r4: JSObject
815 // r5: FixedArray (not tagged)
816 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
817 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
818 { Label loop, entry;
819 if (count_constructions) {
820 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
821 } else if (FLAG_debug_code) {
822 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
823 __ cmp(r7, r8);
824 __ Assert(eq, "Undefined value not loaded.");
759 } 825 }
760 826 __ b(&entry);
761 // Add the object tag to make the JSObject real, so that we can continue 827 __ bind(&loop);
762 // and jump into the continuation code at any time from now on. Any 828 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
763 // failures need to undo the allocation, so that the heap is in a 829 __ bind(&entry);
764 // consistent state and verifiable. 830 __ cmp(r2, r6);
765 __ add(r4, r4, Operand(kHeapObjectTag)); 831 __ b(lt, &loop);
766
767 // Check if a non-empty properties array is needed. Continue with
768 // allocated object if not fall through to runtime call if it is.
769 // r1: constructor function
770 // r4: JSObject
771 // r5: start of next object (not tagged)
772 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
773 // The field instance sizes contains both pre-allocated property fields
774 // and in-object properties.
775 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
776 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
777 __ add(r3, r3, Operand(r6));
778 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
779 __ sub(r3, r3, Operand(r6), SetCC);
780
781 // Done if no extra properties are to be allocated.
782 __ b(eq, &allocated);
783 __ Assert(pl, "Property allocation count failed.");
784
785 // Scale the number of elements by pointer size and add the header for
786 // FixedArrays to the start of the next object calculation from above.
787 // r1: constructor
788 // r3: number of elements in properties array
789 // r4: JSObject
790 // r5: start of next object
791 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
792 __ AllocateInNewSpace(
793 r0,
794 r5,
795 r6,
796 r2,
797 &undo_allocation,
798 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
799
800 // Initialize the FixedArray.
801 // r1: constructor
802 // r3: number of elements in properties array
803 // r4: JSObject
804 // r5: FixedArray (not tagged)
805 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
806 __ mov(r2, r5);
807 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
808 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
809 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
810 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
811 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
812
813 // Initialize the fields to undefined.
814 // r1: constructor function
815 // r2: First element of FixedArray (not tagged)
816 // r3: number of elements in properties array
817 // r4: JSObject
818 // r5: FixedArray (not tagged)
819 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
820 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
821 { Label loop, entry;
822 if (count_constructions) {
823 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
824 } else if (FLAG_debug_code) {
825 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
826 __ cmp(r7, r8);
827 __ Assert(eq, "Undefined value not loaded.");
828 }
829 __ b(&entry);
830 __ bind(&loop);
831 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
832 __ bind(&entry);
833 __ cmp(r2, r6);
834 __ b(lt, &loop);
835 }
836
837 // Store the initialized FixedArray into the properties field of
838 // the JSObject
839 // r1: constructor function
840 // r4: JSObject
841 // r5: FixedArray (not tagged)
842 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
843 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
844
845 // Continue with JSObject being successfully allocated
846 // r1: constructor function
847 // r4: JSObject
848 __ jmp(&allocated);
849
850 // Undo the setting of the new top so that the heap is verifiable. For
851 // example, the map's unused properties potentially do not match the
852 // allocated objects unused properties.
853 // r4: JSObject (previous new top)
854 __ bind(&undo_allocation);
855 __ UndoAllocationInNewSpace(r4, r5);
856 } 832 }
857 833
858 // Allocate the new receiver object using the runtime call. 834 // Store the initialized FixedArray into the properties field of
859 // r1: constructor function 835 // the JSObject
860 __ bind(&rt_call); 836 // r1: constructor function
861 __ push(r1); // argument for Runtime_NewObject 837 // r4: JSObject
862 __ CallRuntime(Runtime::kNewObject, 1); 838 // r5: FixedArray (not tagged)
863 __ mov(r4, r0); 839 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
864 840 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
865 // Receiver for constructor call allocated. 841
866 // r4: JSObject 842 // Continue with JSObject being successfully allocated
867 __ bind(&allocated); 843 // r1: constructor function
868 __ push(r4); 844 // r4: JSObject
869 845 __ jmp(&allocated);
870 // Push the function and the allocated receiver from the stack. 846
871 // sp[0]: receiver (newly allocated object) 847 // Undo the setting of the new top so that the heap is verifiable. For
872 // sp[1]: constructor function 848 // example, the map's unused properties potentially do not match the
873 // sp[2]: number of arguments (smi-tagged) 849 // allocated objects unused properties.
874 __ ldr(r1, MemOperand(sp, kPointerSize)); 850 // r4: JSObject (previous new top)
875 __ push(r1); // Constructor function. 851 __ bind(&undo_allocation);
876 __ push(r4); // Receiver. 852 __ UndoAllocationInNewSpace(r4, r5);
877
878 // Reload the number of arguments from the stack.
879 // r1: constructor function
880 // sp[0]: receiver
881 // sp[1]: constructor function
882 // sp[2]: receiver
883 // sp[3]: constructor function
884 // sp[4]: number of arguments (smi-tagged)
885 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
886
887 // Setup pointer to last argument.
888 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
889
890 // Setup number of arguments for function call below
891 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
892
893 // Copy arguments and receiver to the expression stack.
894 // r0: number of arguments
895 // r2: address of last argument (caller sp)
896 // r1: constructor function
897 // r3: number of arguments (smi-tagged)
898 // sp[0]: receiver
899 // sp[1]: constructor function
900 // sp[2]: receiver
901 // sp[3]: constructor function
902 // sp[4]: number of arguments (smi-tagged)
903 Label loop, entry;
904 __ b(&entry);
905 __ bind(&loop);
906 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
907 __ push(ip);
908 __ bind(&entry);
909 __ sub(r3, r3, Operand(2), SetCC);
910 __ b(ge, &loop);
911
912 // Call the function.
913 // r0: number of arguments
914 // r1: constructor function
915 if (is_api_function) {
916 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
917 Handle<Code> code =
918 masm->isolate()->builtins()->HandleApiCallConstruct();
919 ParameterCount expected(0);
920 __ InvokeCode(code, expected, expected,
921 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
922 } else {
923 ParameterCount actual(r0);
924 __ InvokeFunction(r1, actual, CALL_FUNCTION,
925 NullCallWrapper(), CALL_AS_METHOD);
926 }
927
928 // Pop the function from the stack.
929 // sp[0]: constructor function
930 // sp[2]: receiver
931 // sp[3]: constructor function
932 // sp[4]: number of arguments (smi-tagged)
933 __ pop();
934
935 // Restore context from the frame.
936 // r0: result
937 // sp[0]: receiver
938 // sp[1]: constructor function
939 // sp[2]: number of arguments (smi-tagged)
940 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
941
942 // If the result is an object (in the ECMA sense), we should get rid
943 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
944 // on page 74.
945 Label use_receiver, exit;
946
947 // If the result is a smi, it is *not* an object in the ECMA sense.
948 // r0: result
949 // sp[0]: receiver (newly allocated object)
950 // sp[1]: constructor function
951 // sp[2]: number of arguments (smi-tagged)
952 __ tst(r0, Operand(kSmiTagMask));
953 __ b(eq, &use_receiver);
954
955 // If the type of the result (stored in its map) is less than
956 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
957 __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
958 __ b(ge, &exit);
959
960 // Throw away the result of the constructor invocation and use the
961 // on-stack receiver as the result.
962 __ bind(&use_receiver);
963 __ ldr(r0, MemOperand(sp));
964
965 // Remove receiver from the stack, remove caller arguments, and
966 // return.
967 __ bind(&exit);
968 // r0: result
969 // sp[0]: receiver (newly allocated object)
970 // sp[1]: constructor function
971 // sp[2]: number of arguments (smi-tagged)
972 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
973
974 // Leave construct frame.
975 } 853 }
976 854
855 // Allocate the new receiver object using the runtime call.
856 // r1: constructor function
857 __ bind(&rt_call);
858 __ push(r1); // argument for Runtime_NewObject
859 __ CallRuntime(Runtime::kNewObject, 1);
860 __ mov(r4, r0);
861
862 // Receiver for constructor call allocated.
863 // r4: JSObject
864 __ bind(&allocated);
865 __ push(r4);
866
867 // Push the function and the allocated receiver from the stack.
868 // sp[0]: receiver (newly allocated object)
869 // sp[1]: constructor function
870 // sp[2]: number of arguments (smi-tagged)
871 __ ldr(r1, MemOperand(sp, kPointerSize));
872 __ push(r1); // Constructor function.
873 __ push(r4); // Receiver.
874
875 // Reload the number of arguments from the stack.
876 // r1: constructor function
877 // sp[0]: receiver
878 // sp[1]: constructor function
879 // sp[2]: receiver
880 // sp[3]: constructor function
881 // sp[4]: number of arguments (smi-tagged)
882 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
883
884 // Setup pointer to last argument.
885 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
886
887 // Setup number of arguments for function call below
888 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
889
890 // Copy arguments and receiver to the expression stack.
891 // r0: number of arguments
892 // r2: address of last argument (caller sp)
893 // r1: constructor function
894 // r3: number of arguments (smi-tagged)
895 // sp[0]: receiver
896 // sp[1]: constructor function
897 // sp[2]: receiver
898 // sp[3]: constructor function
899 // sp[4]: number of arguments (smi-tagged)
900 Label loop, entry;
901 __ b(&entry);
902 __ bind(&loop);
903 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
904 __ push(ip);
905 __ bind(&entry);
906 __ sub(r3, r3, Operand(2), SetCC);
907 __ b(ge, &loop);
908
909 // Call the function.
910 // r0: number of arguments
911 // r1: constructor function
912 if (is_api_function) {
913 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
914 Handle<Code> code =
915 masm->isolate()->builtins()->HandleApiCallConstruct();
916 ParameterCount expected(0);
917 __ InvokeCode(code, expected, expected,
918 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
919 } else {
920 ParameterCount actual(r0);
921 __ InvokeFunction(r1, actual, CALL_FUNCTION,
922 NullCallWrapper(), CALL_AS_METHOD);
923 }
924
925 // Pop the function from the stack.
926 // sp[0]: constructor function
927 // sp[2]: receiver
928 // sp[3]: constructor function
929 // sp[4]: number of arguments (smi-tagged)
930 __ pop();
931
932 // Restore context from the frame.
933 // r0: result
934 // sp[0]: receiver
935 // sp[1]: constructor function
936 // sp[2]: number of arguments (smi-tagged)
937 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
938
939 // If the result is an object (in the ECMA sense), we should get rid
940 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
941 // on page 74.
942 Label use_receiver, exit;
943
944 // If the result is a smi, it is *not* an object in the ECMA sense.
945 // r0: result
946 // sp[0]: receiver (newly allocated object)
947 // sp[1]: constructor function
948 // sp[2]: number of arguments (smi-tagged)
949 __ tst(r0, Operand(kSmiTagMask));
950 __ b(eq, &use_receiver);
951
952 // If the type of the result (stored in its map) is less than
953 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
954 __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
955 __ b(ge, &exit);
956
957 // Throw away the result of the constructor invocation and use the
958 // on-stack receiver as the result.
959 __ bind(&use_receiver);
960 __ ldr(r0, MemOperand(sp));
961
962 // Remove receiver from the stack, remove caller arguments, and
963 // return.
964 __ bind(&exit);
965 // r0: result
966 // sp[0]: receiver (newly allocated object)
967 // sp[1]: constructor function
968 // sp[2]: number of arguments (smi-tagged)
969 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
970 __ LeaveConstructFrame();
977 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); 971 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
978 __ add(sp, sp, Operand(kPointerSize)); 972 __ add(sp, sp, Operand(kPointerSize));
979 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); 973 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
980 __ Jump(lr); 974 __ Jump(lr);
981 } 975 }
982 976
983 977
984 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 978 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
985 Generate_JSConstructStubHelper(masm, false, true); 979 Generate_JSConstructStubHelper(masm, false, true);
986 } 980 }
(...skipping 12 matching lines...) Expand all
999 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 993 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1000 bool is_construct) { 994 bool is_construct) {
1001 // Called from Generate_JS_Entry 995 // Called from Generate_JS_Entry
1002 // r0: code entry 996 // r0: code entry
1003 // r1: function 997 // r1: function
1004 // r2: receiver 998 // r2: receiver
1005 // r3: argc 999 // r3: argc
1006 // r4: argv 1000 // r4: argv
1007 // r5-r7, cp may be clobbered 1001 // r5-r7, cp may be clobbered
1008 1002
1009 // Clear the context before we push it when entering the internal frame. 1003 // Clear the context before we push it when entering the JS frame.
1010 __ mov(cp, Operand(0, RelocInfo::NONE)); 1004 __ mov(cp, Operand(0, RelocInfo::NONE));
1011 1005
1012 // Enter an internal frame. 1006 // Enter an internal frame.
1013 { 1007 __ EnterInternalFrame();
1014 FrameScope scope(masm, StackFrame::INTERNAL);
1015 1008
1016 // Set up the context from the function argument. 1009 // Set up the context from the function argument.
1017 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1010 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1018 1011
1019 // Set up the roots register. 1012 // Set up the roots register.
1020 ExternalReference roots_address = 1013 ExternalReference roots_address =
1021 ExternalReference::roots_address(masm->isolate()); 1014 ExternalReference::roots_address(masm->isolate());
1022 __ mov(r10, Operand(roots_address)); 1015 __ mov(r10, Operand(roots_address));
1023 1016
1024 // Push the function and the receiver onto the stack. 1017 // Push the function and the receiver onto the stack.
1025 __ push(r1); 1018 __ push(r1);
1026 __ push(r2); 1019 __ push(r2);
1027 1020
1028 // Copy arguments to the stack in a loop. 1021 // Copy arguments to the stack in a loop.
1029 // r1: function 1022 // r1: function
1030 // r3: argc 1023 // r3: argc
1031 // r4: argv, i.e. points to first arg 1024 // r4: argv, i.e. points to first arg
1032 Label loop, entry; 1025 Label loop, entry;
1033 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); 1026 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1034 // r2 points past last arg. 1027 // r2 points past last arg.
1035 __ b(&entry); 1028 __ b(&entry);
1036 __ bind(&loop); 1029 __ bind(&loop);
1037 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter 1030 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1038 __ ldr(r0, MemOperand(r0)); // dereference handle 1031 __ ldr(r0, MemOperand(r0)); // dereference handle
1039 __ push(r0); // push parameter 1032 __ push(r0); // push parameter
1040 __ bind(&entry); 1033 __ bind(&entry);
1041 __ cmp(r4, r2); 1034 __ cmp(r4, r2);
1042 __ b(ne, &loop); 1035 __ b(ne, &loop);
1043 1036
1044 // Initialize all JavaScript callee-saved registers, since they will be seen 1037 // Initialize all JavaScript callee-saved registers, since they will be seen
1045 // by the garbage collector as part of handlers. 1038 // by the garbage collector as part of handlers.
1046 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 1039 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1047 __ mov(r5, Operand(r4)); 1040 __ mov(r5, Operand(r4));
1048 __ mov(r6, Operand(r4)); 1041 __ mov(r6, Operand(r4));
1049 __ mov(r7, Operand(r4)); 1042 __ mov(r7, Operand(r4));
1050 if (kR9Available == 1) { 1043 if (kR9Available == 1) {
1051 __ mov(r9, Operand(r4)); 1044 __ mov(r9, Operand(r4));
1052 } 1045 }
1053 1046
1054 // Invoke the code and pass argc as r0. 1047 // Invoke the code and pass argc as r0.
1055 __ mov(r0, Operand(r3)); 1048 __ mov(r0, Operand(r3));
1056 if (is_construct) { 1049 if (is_construct) {
1057 __ Call(masm->isolate()->builtins()->JSConstructCall(), 1050 __ Call(masm->isolate()->builtins()->JSConstructCall(),
1058 RelocInfo::CODE_TARGET); 1051 RelocInfo::CODE_TARGET);
1059 } else { 1052 } else {
1060 ParameterCount actual(r0); 1053 ParameterCount actual(r0);
1061 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1054 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1062 NullCallWrapper(), CALL_AS_METHOD); 1055 NullCallWrapper(), CALL_AS_METHOD);
1063 } 1056 }
1064 1057
1065 // Exit the JS frame and remove the parameters (except function), and 1058 // Exit the JS frame and remove the parameters (except function), and return.
1066 // return. 1059 // Respect ABI stack constraint.
1067 // Respect ABI stack constraint. 1060 __ LeaveInternalFrame();
1068 }
1069 __ Jump(lr); 1061 __ Jump(lr);
1070 1062
1071 // r0: result 1063 // r0: result
1072 } 1064 }
1073 1065
1074 1066
1075 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1067 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1076 Generate_JSEntryTrampolineHelper(masm, false); 1068 Generate_JSEntryTrampolineHelper(masm, false);
1077 } 1069 }
1078 1070
1079 1071
1080 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 1072 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1081 Generate_JSEntryTrampolineHelper(masm, true); 1073 Generate_JSEntryTrampolineHelper(masm, true);
1082 } 1074 }
1083 1075
1084 1076
1085 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 1077 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1086 // Enter an internal frame. 1078 // Enter an internal frame.
1087 { 1079 __ EnterInternalFrame();
1088 FrameScope scope(masm, StackFrame::INTERNAL);
1089 1080
1090 // Preserve the function. 1081 // Preserve the function.
1091 __ push(r1); 1082 __ push(r1);
1092 // Push call kind information. 1083 // Push call kind information.
1093 __ push(r5); 1084 __ push(r5);
1094 1085
1095 // Push the function on the stack as the argument to the runtime function. 1086 // Push the function on the stack as the argument to the runtime function.
1096 __ push(r1); 1087 __ push(r1);
1097 __ CallRuntime(Runtime::kLazyCompile, 1); 1088 __ CallRuntime(Runtime::kLazyCompile, 1);
1098 // Calculate the entry point. 1089 // Calculate the entry point.
1099 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1090 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1100 1091
1101 // Restore call kind information. 1092 // Restore call kind information.
1102 __ pop(r5); 1093 __ pop(r5);
1103 // Restore saved function. 1094 // Restore saved function.
1104 __ pop(r1); 1095 __ pop(r1);
1105 1096
1106 // Tear down internal frame. 1097 // Tear down temporary frame.
1107 } 1098 __ LeaveInternalFrame();
1108 1099
1109 // Do a tail-call of the compiled function. 1100 // Do a tail-call of the compiled function.
1110 __ Jump(r2); 1101 __ Jump(r2);
1111 } 1102 }
1112 1103
1113 1104
1114 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 1105 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1115 // Enter an internal frame. 1106 // Enter an internal frame.
1116 { 1107 __ EnterInternalFrame();
1117 FrameScope scope(masm, StackFrame::INTERNAL);
1118 1108
1119 // Preserve the function. 1109 // Preserve the function.
1120 __ push(r1); 1110 __ push(r1);
1121 // Push call kind information. 1111 // Push call kind information.
1122 __ push(r5); 1112 __ push(r5);
1123 1113
1124 // Push the function on the stack as the argument to the runtime function. 1114 // Push the function on the stack as the argument to the runtime function.
1125 __ push(r1); 1115 __ push(r1);
1126 __ CallRuntime(Runtime::kLazyRecompile, 1); 1116 __ CallRuntime(Runtime::kLazyRecompile, 1);
1127 // Calculate the entry point. 1117 // Calculate the entry point.
1128 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1118 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1129 1119
1130 // Restore call kind information. 1120 // Restore call kind information.
1131 __ pop(r5); 1121 __ pop(r5);
1132 // Restore saved function. 1122 // Restore saved function.
1133 __ pop(r1); 1123 __ pop(r1);
1134 1124
1135 // Tear down internal frame. 1125 // Tear down temporary frame.
1136 } 1126 __ LeaveInternalFrame();
1137 1127
1138 // Do a tail-call of the compiled function. 1128 // Do a tail-call of the compiled function.
1139 __ Jump(r2); 1129 __ Jump(r2);
1140 } 1130 }
1141 1131
1142 1132
1143 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1133 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1144 Deoptimizer::BailoutType type) { 1134 Deoptimizer::BailoutType type) {
1145 { 1135 __ EnterInternalFrame();
1146 FrameScope scope(masm, StackFrame::INTERNAL); 1136 // Pass the function and deoptimization type to the runtime system.
1147 // Pass the function and deoptimization type to the runtime system. 1137 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1148 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); 1138 __ push(r0);
1149 __ push(r0); 1139 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1150 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 1140 __ LeaveInternalFrame();
1151 }
1152 1141
1153 // Get the full codegen state from the stack and untag it -> r6. 1142 // Get the full codegen state from the stack and untag it -> r6.
1154 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); 1143 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1155 __ SmiUntag(r6); 1144 __ SmiUntag(r6);
1156 // Switch on the state. 1145 // Switch on the state.
1157 Label with_tos_register, unknown_state; 1146 Label with_tos_register, unknown_state;
1158 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); 1147 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1159 __ b(ne, &with_tos_register); 1148 __ b(ne, &with_tos_register);
1160 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. 1149 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1161 __ Ret(); 1150 __ Ret();
(...skipping 19 matching lines...) Expand all
1181 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1170 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1182 } 1171 }
1183 1172
1184 1173
1185 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 1174 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1186 // For now, we are relying on the fact that Runtime::NotifyOSR 1175 // For now, we are relying on the fact that Runtime::NotifyOSR
1187 // doesn't do any garbage collection which allows us to save/restore 1176 // doesn't do any garbage collection which allows us to save/restore
1188 // the registers without worrying about which of them contain 1177 // the registers without worrying about which of them contain
1189 // pointers. This seems a bit fragile. 1178 // pointers. This seems a bit fragile.
1190 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1179 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1191 { 1180 __ EnterInternalFrame();
1192 FrameScope scope(masm, StackFrame::INTERNAL); 1181 __ CallRuntime(Runtime::kNotifyOSR, 0);
1193 __ CallRuntime(Runtime::kNotifyOSR, 0); 1182 __ LeaveInternalFrame();
1194 }
1195 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1183 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1196 __ Ret(); 1184 __ Ret();
1197 } 1185 }
1198 1186
1199 1187
1200 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1188 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1201 CpuFeatures::TryForceFeatureScope scope(VFP3); 1189 CpuFeatures::TryForceFeatureScope scope(VFP3);
1202 if (!CpuFeatures::IsSupported(VFP3)) { 1190 if (!CpuFeatures::IsSupported(VFP3)) {
1203 __ Abort("Unreachable code: Cannot optimize without VFP3 support."); 1191 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1204 return; 1192 return;
1205 } 1193 }
1206 1194
1207 // Lookup the function in the JavaScript frame and push it as an 1195 // Lookup the function in the JavaScript frame and push it as an
1208 // argument to the on-stack replacement function. 1196 // argument to the on-stack replacement function.
1209 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1197 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1210 { 1198 __ EnterInternalFrame();
1211 FrameScope scope(masm, StackFrame::INTERNAL); 1199 __ push(r0);
1212 __ push(r0); 1200 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1213 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1201 __ LeaveInternalFrame();
1214 }
1215 1202
1216 // If the result was -1 it means that we couldn't optimize the 1203 // If the result was -1 it means that we couldn't optimize the
1217 // function. Just return and continue in the unoptimized version. 1204 // function. Just return and continue in the unoptimized version.
1218 Label skip; 1205 Label skip;
1219 __ cmp(r0, Operand(Smi::FromInt(-1))); 1206 __ cmp(r0, Operand(Smi::FromInt(-1)));
1220 __ b(ne, &skip); 1207 __ b(ne, &skip);
1221 __ Ret(); 1208 __ Ret();
1222 1209
1223 __ bind(&skip); 1210 __ bind(&skip);
1224 // Untag the AST id and push it on the stack. 1211 // Untag the AST id and push it on the stack.
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 __ LoadRoot(r3, Heap::kNullValueRootIndex); 1275 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1289 __ cmp(r2, r3); 1276 __ cmp(r2, r3);
1290 __ b(eq, &use_global_receiver); 1277 __ b(eq, &use_global_receiver);
1291 1278
1292 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); 1279 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
1293 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1280 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1294 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE); 1281 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
1295 __ b(ge, &shift_arguments); 1282 __ b(ge, &shift_arguments);
1296 1283
1297 __ bind(&convert_to_object); 1284 __ bind(&convert_to_object);
1285 __ EnterInternalFrame(); // In order to preserve argument count.
1286 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1287 __ push(r0);
1298 1288
1299 { 1289 __ push(r2);
1300 // Enter an internal frame in order to preserve argument count. 1290 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1301 FrameScope scope(masm, StackFrame::INTERNAL); 1291 __ mov(r2, r0);
1302 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1303 __ push(r0);
1304 1292
1305 __ push(r2); 1293 __ pop(r0);
1306 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1294 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1307 __ mov(r2, r0); 1295 __ LeaveInternalFrame();
1308
1309 __ pop(r0);
1310 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1311
1312 // Exit the internal frame.
1313 }
1314
1315 // Restore the function to r1. 1296 // Restore the function to r1.
1316 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1297 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1317 __ jmp(&patch_receiver); 1298 __ jmp(&patch_receiver);
1318 1299
1319 // Use the global receiver object from the called function as the 1300 // Use the global receiver object from the called function as the
1320 // receiver. 1301 // receiver.
1321 __ bind(&use_global_receiver); 1302 __ bind(&use_global_receiver);
1322 const int kGlobalIndex = 1303 const int kGlobalIndex =
1323 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 1304 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1324 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); 1305 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
1404 } 1385 }
1405 1386
1406 1387
1407 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1388 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1408 const int kIndexOffset = -5 * kPointerSize; 1389 const int kIndexOffset = -5 * kPointerSize;
1409 const int kLimitOffset = -4 * kPointerSize; 1390 const int kLimitOffset = -4 * kPointerSize;
1410 const int kArgsOffset = 2 * kPointerSize; 1391 const int kArgsOffset = 2 * kPointerSize;
1411 const int kRecvOffset = 3 * kPointerSize; 1392 const int kRecvOffset = 3 * kPointerSize;
1412 const int kFunctionOffset = 4 * kPointerSize; 1393 const int kFunctionOffset = 4 * kPointerSize;
1413 1394
1414 { 1395 __ EnterInternalFrame();
1415 FrameScope scope(masm, StackFrame::INTERNAL);
1416 1396
1417 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function 1397 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1418 __ push(r0); 1398 __ push(r0);
1419 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array 1399 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1420 __ push(r0); 1400 __ push(r0);
1421 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 1401 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1422 1402
1423 // Check the stack for overflow. We are not trying need to catch 1403 // Check the stack for overflow. We are not trying need to catch
1424 // interruptions (e.g. debug break and preemption) here, so the "real stack 1404 // interruptions (e.g. debug break and preemption) here, so the "real stack
1425 // limit" is checked. 1405 // limit" is checked.
1426 Label okay; 1406 Label okay;
1427 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 1407 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1428 // Make r2 the space we have left. The stack might already be overflowed 1408 // Make r2 the space we have left. The stack might already be overflowed
1429 // here which will cause r2 to become negative. 1409 // here which will cause r2 to become negative.
1430 __ sub(r2, sp, r2); 1410 __ sub(r2, sp, r2);
1431 // Check if the arguments will overflow the stack. 1411 // Check if the arguments will overflow the stack.
1432 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1412 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1433 __ b(gt, &okay); // Signed comparison. 1413 __ b(gt, &okay); // Signed comparison.
1434 1414
1435 // Out of stack space. 1415 // Out of stack space.
1436 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1416 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1437 __ push(r1); 1417 __ push(r1);
1438 __ push(r0); 1418 __ push(r0);
1439 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 1419 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1440 // End of stack check. 1420 // End of stack check.
1441 1421
1442 // Push current limit and index. 1422 // Push current limit and index.
1443 __ bind(&okay); 1423 __ bind(&okay);
1444 __ push(r0); // limit 1424 __ push(r0); // limit
1445 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index 1425 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1446 __ push(r1); 1426 __ push(r1);
1447 1427
1448 // Change context eagerly to get the right global object if necessary. 1428 // Change context eagerly to get the right global object if necessary.
1449 __ ldr(r0, MemOperand(fp, kFunctionOffset)); 1429 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1450 __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); 1430 __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
1451 // Load the shared function info while the function is still in r0. 1431 // Load the shared function info while the function is still in r0.
1452 __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 1432 __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
1453 1433
1454 // Compute the receiver. 1434 // Compute the receiver.
1455 Label call_to_object, use_global_receiver, push_receiver; 1435 Label call_to_object, use_global_receiver, push_receiver;
1456 __ ldr(r0, MemOperand(fp, kRecvOffset)); 1436 __ ldr(r0, MemOperand(fp, kRecvOffset));
1457 1437
1458 // Do not transform the receiver for strict mode functions. 1438 // Do not transform the receiver for strict mode functions.
1459 __ ldr(r2, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset)); 1439 __ ldr(r2, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
1460 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1440 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1461 kSmiTagSize))); 1441 kSmiTagSize)));
1462 __ b(ne, &push_receiver); 1442 __ b(ne, &push_receiver);
1463 1443
1464 // Do not transform the receiver for strict mode functions. 1444 // Do not transform the receiver for strict mode functions.
1465 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1445 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1466 __ b(ne, &push_receiver); 1446 __ b(ne, &push_receiver);
1467 1447
1468 // Compute the receiver in non-strict mode. 1448 // Compute the receiver in non-strict mode.
1469 __ tst(r0, Operand(kSmiTagMask)); 1449 __ tst(r0, Operand(kSmiTagMask));
1470 __ b(eq, &call_to_object); 1450 __ b(eq, &call_to_object);
1471 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1451 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1472 __ cmp(r0, r1); 1452 __ cmp(r0, r1);
1473 __ b(eq, &use_global_receiver); 1453 __ b(eq, &use_global_receiver);
1474 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 1454 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1475 __ cmp(r0, r1); 1455 __ cmp(r0, r1);
1476 __ b(eq, &use_global_receiver); 1456 __ b(eq, &use_global_receiver);
1477 1457
1478 // Check if the receiver is already a JavaScript object. 1458 // Check if the receiver is already a JavaScript object.
1479 // r0: receiver 1459 // r0: receiver
1480 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); 1460 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
1481 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1461 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1482 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE); 1462 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1483 __ b(ge, &push_receiver); 1463 __ b(ge, &push_receiver);
1484 1464
1485 // Convert the receiver to a regular object. 1465 // Convert the receiver to a regular object.
1486 // r0: receiver 1466 // r0: receiver
1487 __ bind(&call_to_object); 1467 __ bind(&call_to_object);
1488 __ push(r0); 1468 __ push(r0);
1489 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1469 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1490 __ b(&push_receiver); 1470 __ b(&push_receiver);
1491 1471
1492 // Use the current global receiver object as the receiver. 1472 // Use the current global receiver object as the receiver.
1493 __ bind(&use_global_receiver); 1473 __ bind(&use_global_receiver);
1494 const int kGlobalOffset = 1474 const int kGlobalOffset =
1495 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 1475 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1496 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset)); 1476 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1497 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset)); 1477 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1498 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset)); 1478 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1499 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); 1479 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1500 1480
1501 // Push the receiver. 1481 // Push the receiver.
1502 // r0: receiver 1482 // r0: receiver
1503 __ bind(&push_receiver); 1483 __ bind(&push_receiver);
1504 __ push(r0); 1484 __ push(r0);
1505 1485
1506 // Copy all arguments from the array to the stack. 1486 // Copy all arguments from the array to the stack.
1507 Label entry, loop; 1487 Label entry, loop;
1508 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1488 __ ldr(r0, MemOperand(fp, kIndexOffset));
1509 __ b(&entry); 1489 __ b(&entry);
1510 1490
1511 // Load the current argument from the arguments array and push it to the 1491 // Load the current argument from the arguments array and push it to the
1512 // stack. 1492 // stack.
1513 // r0: current argument index 1493 // r0: current argument index
1514 __ bind(&loop); 1494 __ bind(&loop);
1515 __ ldr(r1, MemOperand(fp, kArgsOffset)); 1495 __ ldr(r1, MemOperand(fp, kArgsOffset));
1516 __ push(r1); 1496 __ push(r1);
1517 __ push(r0); 1497 __ push(r0);
1518 1498
1519 // Call the runtime to access the property in the arguments array. 1499 // Call the runtime to access the property in the arguments array.
1520 __ CallRuntime(Runtime::kGetProperty, 2); 1500 __ CallRuntime(Runtime::kGetProperty, 2);
1521 __ push(r0); 1501 __ push(r0);
1522 1502
1523 // Use inline caching to access the arguments. 1503 // Use inline caching to access the arguments.
1524 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1504 __ ldr(r0, MemOperand(fp, kIndexOffset));
1525 __ add(r0, r0, Operand(1 << kSmiTagSize)); 1505 __ add(r0, r0, Operand(1 << kSmiTagSize));
1526 __ str(r0, MemOperand(fp, kIndexOffset)); 1506 __ str(r0, MemOperand(fp, kIndexOffset));
1527 1507
1528 // Test if the copy loop has finished copying all the elements from the 1508 // Test if the copy loop has finished copying all the elements from the
1529 // arguments object. 1509 // arguments object.
1530 __ bind(&entry); 1510 __ bind(&entry);
1531 __ ldr(r1, MemOperand(fp, kLimitOffset)); 1511 __ ldr(r1, MemOperand(fp, kLimitOffset));
1532 __ cmp(r0, r1); 1512 __ cmp(r0, r1);
1533 __ b(ne, &loop); 1513 __ b(ne, &loop);
1534 1514
1535 // Invoke the function. 1515 // Invoke the function.
1536 ParameterCount actual(r0); 1516 ParameterCount actual(r0);
1537 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1517 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1538 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1518 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1539 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1519 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1540 NullCallWrapper(), CALL_AS_METHOD); 1520 NullCallWrapper(), CALL_AS_METHOD);
1541 1521
1542 // Tear down the internal frame and remove function, receiver and args. 1522 // Tear down the internal frame and remove function, receiver and args.
1543 } 1523 __ LeaveInternalFrame();
1544
1545 __ add(sp, sp, Operand(3 * kPointerSize)); 1524 __ add(sp, sp, Operand(3 * kPointerSize));
1546 __ Jump(lr); 1525 __ Jump(lr);
1547 } 1526 }
1548 1527
1549 1528
1550 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1529 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1551 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 1530 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1552 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1531 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1553 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); 1532 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1554 __ add(fp, sp, Operand(3 * kPointerSize)); 1533 __ add(fp, sp, Operand(3 * kPointerSize));
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
1672 __ bind(&dont_adapt_arguments); 1651 __ bind(&dont_adapt_arguments);
1673 __ Jump(r3); 1652 __ Jump(r3);
1674 } 1653 }
1675 1654
1676 1655
1677 #undef __ 1656 #undef __
1678 1657
1679 } } // namespace v8::internal 1658 } } // namespace v8::internal
1680 1659
1681 #endif // V8_TARGET_ARCH_ARM 1660 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « include/v8.h ('k') | src/arm/code-stubs-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698