OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
167 LSL, | 167 LSL, |
168 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize)); | 168 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize)); |
169 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * sizeof(Instr)); | 169 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * sizeof(Instr)); |
170 nop(); // Jump table alignment. | 170 nop(); // Jump table alignment. |
171 for (int i = 0; i < targets.length(); i++) { | 171 for (int i = 0; i < targets.length(); i++) { |
172 b(targets[i]); | 172 b(targets[i]); |
173 } | 173 } |
174 } | 174 } |
175 | 175 |
176 | 176 |
177 void MacroAssembler::LoadRoot(Register destination, | |
178 Heap::RootListIndex index, | |
179 Condition cond) { | |
180 ldr(destination, MemOperand(r10, index << kPointerSizeLog2), cond); | |
181 } | |
182 | |
183 | |
184 // Will clobber 4 registers: object, offset, scratch, ip. The | 177 // Will clobber 4 registers: object, offset, scratch, ip. The |
185 // register 'object' contains a heap object pointer. The heap object | 178 // register 'object' contains a heap object pointer. The heap object |
186 // tag is shifted away. | 179 // tag is shifted away. |
187 void MacroAssembler::RecordWrite(Register object, Register offset, | 180 void MacroAssembler::RecordWrite(Register object, Register offset, |
188 Register scratch) { | 181 Register scratch) { |
189 // This is how much we shift the remembered set bit offset to get the | 182 // This is how much we shift the remembered set bit offset to get the |
190 // offset of the word in the remembered set. We divide by kBitsPerInt (32, | 183 // offset of the word in the remembered set. We divide by kBitsPerInt (32, |
191 // shift right 5) and then multiply by kIntSize (4, shift left 2). | 184 // shift right 5) and then multiply by kIntSize (4, shift left 2). |
192 const int kRSetWordShift = 3; | 185 const int kRSetWordShift = 3; |
193 | 186 |
(...skipping 520 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
714 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); | 707 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); |
715 | 708 |
716 // Check the context is a global context. | 709 // Check the context is a global context. |
717 if (FLAG_debug_code) { | 710 if (FLAG_debug_code) { |
718 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 711 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
719 // Cannot use ip as a temporary in this verification code. Due to the fact | 712 // Cannot use ip as a temporary in this verification code. Due to the fact |
720 // that ip is clobbered as part of cmp with an object Operand. | 713 // that ip is clobbered as part of cmp with an object Operand. |
721 push(holder_reg); // Temporarily save holder on the stack. | 714 push(holder_reg); // Temporarily save holder on the stack. |
722 // Read the first word and compare to the global_context_map. | 715 // Read the first word and compare to the global_context_map. |
723 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 716 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
724 LoadRoot(ip, Heap::kGlobalContextMapRootIndex); | 717 cmp(holder_reg, Operand(Factory::global_context_map())); |
725 cmp(holder_reg, ip); | |
726 Check(eq, "JSGlobalObject::global_context should be a global context."); | 718 Check(eq, "JSGlobalObject::global_context should be a global context."); |
727 pop(holder_reg); // Restore holder. | 719 pop(holder_reg); // Restore holder. |
728 } | 720 } |
729 | 721 |
730 // Check if both contexts are the same. | 722 // Check if both contexts are the same. |
731 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 723 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
732 cmp(scratch, Operand(ip)); | 724 cmp(scratch, Operand(ip)); |
733 b(eq, &same_contexts); | 725 b(eq, &same_contexts); |
734 | 726 |
735 // Check the context is a global context. | 727 // Check the context is a global context. |
736 if (FLAG_debug_code) { | 728 if (FLAG_debug_code) { |
737 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 729 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
738 // Cannot use ip as a temporary in this verification code. Due to the fact | 730 // Cannot use ip as a temporary in this verification code. Due to the fact |
739 // that ip is clobbered as part of cmp with an object Operand. | 731 // that ip is clobbered as part of cmp with an object Operand. |
740 push(holder_reg); // Temporarily save holder on the stack. | 732 push(holder_reg); // Temporarily save holder on the stack. |
741 mov(holder_reg, ip); // Move ip to its holding place. | 733 mov(holder_reg, ip); // Move ip to its holding place. |
742 LoadRoot(ip, Heap::kNullValueRootIndex); | 734 cmp(holder_reg, Operand(Factory::null_value())); |
743 cmp(holder_reg, ip); | |
744 Check(ne, "JSGlobalProxy::context() should not be null."); | 735 Check(ne, "JSGlobalProxy::context() should not be null."); |
745 | 736 |
746 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); | 737 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); |
747 LoadRoot(ip, Heap::kGlobalContextMapRootIndex); | 738 cmp(holder_reg, Operand(Factory::global_context_map())); |
748 cmp(holder_reg, ip); | |
749 Check(eq, "JSGlobalObject::global_context should be a global context."); | 739 Check(eq, "JSGlobalObject::global_context should be a global context."); |
750 // Restore ip is not needed. ip is reloaded below. | 740 // Restore ip is not needed. ip is reloaded below. |
751 pop(holder_reg); // Restore holder. | 741 pop(holder_reg); // Restore holder. |
752 // Restore ip to holder's context. | 742 // Restore ip to holder's context. |
753 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 743 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
754 } | 744 } |
755 | 745 |
756 // Check that the security token in the calling global object is | 746 // Check that the security token in the calling global object is |
757 // compatible with the security token in the receiving global | 747 // compatible with the security token in the receiving global |
758 // object. | 748 // object. |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
795 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype)); | 785 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype)); |
796 b(ne, &non_instance); | 786 b(ne, &non_instance); |
797 | 787 |
798 // Get the prototype or initial map from the function. | 788 // Get the prototype or initial map from the function. |
799 ldr(result, | 789 ldr(result, |
800 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 790 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
801 | 791 |
802 // If the prototype or initial map is the hole, don't return it and | 792 // If the prototype or initial map is the hole, don't return it and |
803 // simply miss the cache instead. This will allow us to allocate a | 793 // simply miss the cache instead. This will allow us to allocate a |
804 // prototype object on-demand in the runtime system. | 794 // prototype object on-demand in the runtime system. |
805 LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 795 cmp(result, Operand(Factory::the_hole_value())); |
806 cmp(result, ip); | |
807 b(eq, miss); | 796 b(eq, miss); |
808 | 797 |
809 // If the function does not have an initial map, we're done. | 798 // If the function does not have an initial map, we're done. |
810 Label done; | 799 Label done; |
811 CompareObjectType(result, scratch, scratch, MAP_TYPE); | 800 CompareObjectType(result, scratch, scratch, MAP_TYPE); |
812 b(ne, &done); | 801 b(ne, &done); |
813 | 802 |
814 // Get the prototype from the initial map. | 803 // Get the prototype from the initial map. |
815 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 804 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset)); |
816 jmp(&done); | 805 jmp(&done); |
(...skipping 19 matching lines...) Expand all Loading... |
836 if (argc > 1) | 825 if (argc > 1) |
837 add(sp, sp, Operand((argc - 1) * kPointerSize)); | 826 add(sp, sp, Operand((argc - 1) * kPointerSize)); |
838 Ret(); | 827 Ret(); |
839 } | 828 } |
840 | 829 |
841 | 830 |
842 void MacroAssembler::IllegalOperation(int num_arguments) { | 831 void MacroAssembler::IllegalOperation(int num_arguments) { |
843 if (num_arguments > 0) { | 832 if (num_arguments > 0) { |
844 add(sp, sp, Operand(num_arguments * kPointerSize)); | 833 add(sp, sp, Operand(num_arguments * kPointerSize)); |
845 } | 834 } |
846 LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 835 mov(r0, Operand(Factory::undefined_value())); |
847 } | 836 } |
848 | 837 |
849 | 838 |
850 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 839 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { |
851 // All parameters are on the stack. r0 has the return value after call. | 840 // All parameters are on the stack. r0 has the return value after call. |
852 | 841 |
853 // If the expected number of arguments of the runtime function is | 842 // If the expected number of arguments of the runtime function is |
854 // constant, we check that the actual number of arguments match the | 843 // constant, we check that the actual number of arguments match the |
855 // expectation. | 844 // expectation. |
856 if (f->nargs >= 0 && f->nargs != num_arguments) { | 845 if (f->nargs >= 0 && f->nargs != num_arguments) { |
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1016 #endif | 1005 #endif |
1017 mov(r0, Operand(p0)); | 1006 mov(r0, Operand(p0)); |
1018 push(r0); | 1007 push(r0); |
1019 mov(r0, Operand(Smi::FromInt(p1 - p0))); | 1008 mov(r0, Operand(Smi::FromInt(p1 - p0))); |
1020 push(r0); | 1009 push(r0); |
1021 CallRuntime(Runtime::kAbort, 2); | 1010 CallRuntime(Runtime::kAbort, 2); |
1022 // will not return here | 1011 // will not return here |
1023 } | 1012 } |
1024 | 1013 |
1025 } } // namespace v8::internal | 1014 } } // namespace v8::internal |
OLD | NEW |