Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(265)

Side by Side Diff: src/codegen-arm.cc

Issue 6075: Move code for code generator static member functions, code generation... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 12 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 653 matching lines...) Expand 10 before | Expand all | Expand 10 after
664 __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize)); 664 __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize));
665 __ mov(pc, lr); 665 __ mov(pc, lr);
666 666
667 // Code generation state must be reset. 667 // Code generation state must be reset.
668 scope_ = NULL; 668 scope_ = NULL;
669 ASSERT(!has_cc()); 669 ASSERT(!has_cc());
670 ASSERT(state_ == NULL); 670 ASSERT(state_ == NULL);
671 } 671 }
672 672
673 673
674 #undef __
675 #define __ masm->
676
677 MemOperand ArmCodeGenerator::SlotOperand(CodeGenerator* cgen,
678 Slot* slot,
679 Register tmp) {
680 // Currently, this assertion will fail if we try to assign to
681 // a constant variable that is constant because it is read-only
682 // (such as the variable referring to a named function expression).
683 // We need to implement assignments to read-only variables.
684 // Ideally, we should do this during AST generation (by converting
685 // such assignments into expression statements); however, in general
686 // we may not be able to make the decision until past AST generation,
687 // that is when the entire program is known.
688 ASSERT(slot != NULL);
689 int index = slot->index();
690 switch (slot->type()) {
691 case Slot::PARAMETER:
692 return ParameterOperand(cgen, index);
693
694 case Slot::LOCAL: {
695 ASSERT(0 <= index &&
696 index < cgen->scope()->num_stack_slots() &&
697 index >= 0);
698 int local_offset = JavaScriptFrameConstants::kLocal0Offset -
699 index * kPointerSize;
700 return MemOperand(fp, local_offset);
701 }
702
703 case Slot::CONTEXT: {
704 MacroAssembler* masm = cgen->masm();
705 // Follow the context chain if necessary.
706 ASSERT(!tmp.is(cp)); // do not overwrite context register
707 Register context = cp;
708 int chain_length =
709 cgen->scope()->ContextChainLength(slot->var()->scope());
710 for (int i = chain_length; i-- > 0;) {
711 // Load the closure.
712 // (All contexts, even 'with' contexts, have a closure,
713 // and it is the same for all contexts inside a function.
714 // There is no need to go to the function context first.)
715 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
716 // Load the function context (which is the incoming, outer context).
717 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
718 context = tmp;
719 }
720 // We may have a 'with' context now. Get the function context.
721 // (In fact this mov may never be the needed, since the scope analysis
722 // may not permit a direct context access in this case and thus we are
723 // always at a function context. However it is safe to dereference be-
724 // cause the function context of a function context is itself. Before
725 // deleting this mov we should try to create a counter-example first,
726 // though...)
727 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
728 return ContextOperand(tmp, index);
729 }
730
731 default:
732 UNREACHABLE();
733 return MemOperand(r0, 0);
734 }
735 }
736
737
738 #undef __
739 #define __ masm_->
740
741 // Loads a value on the stack. If it is a boolean value, the result may have 674 // Loads a value on the stack. If it is a boolean value, the result may have
742 // been (partially) translated into branches, or it may have set the condition 675 // been (partially) translated into branches, or it may have set the condition
743 // code register. If force_cc is set, the value is forced to set the condition 676 // code register. If force_cc is set, the value is forced to set the condition
744 // code register and no value is pushed. If the condition code register was set, 677 // code register and no value is pushed. If the condition code register was set,
745 // has_cc() is true and cc_reg_ contains the condition to test for 'true'. 678 // has_cc() is true and cc_reg_ contains the condition to test for 'true'.
746 void ArmCodeGenerator::LoadCondition(Expression* x, 679 void ArmCodeGenerator::LoadCondition(Expression* x,
747 CodeGenState::AccessType access, 680 CodeGenState::AccessType access,
748 Label* true_target, 681 Label* true_target,
749 Label* false_target, 682 Label* false_target,
750 bool force_cc) { 683 bool force_cc) {
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
895 if (size <= 0) { 828 if (size <= 0) {
896 // Do nothing. No popping is necessary. 829 // Do nothing. No popping is necessary.
897 } else { 830 } else {
898 __ pop(r0); 831 __ pop(r0);
899 __ add(sp, sp, Operand(size * kPointerSize)); 832 __ add(sp, sp, Operand(size * kPointerSize));
900 __ push(r0); 833 __ push(r0);
901 } 834 }
902 } 835 }
903 836
904 837
905 #undef __
906 #define __ masm->
907
908 void Property::GenerateStoreCode(CodeGenerator* cgen,
909 Reference* ref,
910 InitState init_state) {
911 MacroAssembler* masm = cgen->masm();
912 Comment cmnt(masm, "[ Store to Property");
913 __ RecordPosition(position());
914 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key());
915 }
916
917
918 void VariableProxy::GenerateStoreCode(CodeGenerator* cgen,
919 Reference* ref,
920 InitState init_state) {
921 MacroAssembler* masm = cgen->masm();
922 Comment cmnt(masm, "[ Store to VariableProxy");
923 Variable* node = var();
924
925 Expression* expr = node->rewrite();
926 if (expr != NULL) {
927 expr->GenerateStoreCode(cgen, ref, init_state);
928 } else {
929 ASSERT(node->is_global());
930 if (node->AsProperty() != NULL) {
931 __ RecordPosition(node->AsProperty()->position());
932 }
933 Expression* key = new Literal(node->name());
934 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key);
935 }
936 }
937
938
939 void Slot::GenerateStoreCode(CodeGenerator* cgen,
940 Reference* ref,
941 InitState init_state) {
942 MacroAssembler* masm = cgen->masm();
943 Comment cmnt(masm, "[ Store to Slot");
944
945 if (type() == Slot::LOOKUP) {
946 ASSERT(var()->mode() == Variable::DYNAMIC);
947
948 // For now, just do a runtime call.
949 __ push(cp);
950 __ mov(r0, Operand(var()->name()));
951 __ push(r0);
952
953 if (init_state == CONST_INIT) {
954 // Same as the case for a normal store, but ignores attribute
955 // (e.g. READ_ONLY) of context slot so that we can initialize const
956 // properties (introduced via eval("const foo = (some expr);")). Also,
957 // uses the current function context instead of the top context.
958 //
959 // Note that we must declare the foo upon entry of eval(), via a
960 // context slot declaration, but we cannot initialize it at the same
961 // time, because the const declaration may be at the end of the eval
962 // code (sigh...) and the const variable may have been used before
963 // (where its value is 'undefined'). Thus, we can only do the
964 // initialization when we actually encounter the expression and when
965 // the expression operands are defined and valid, and thus we need the
966 // split into 2 operations: declaration of the context slot followed
967 // by initialization.
968 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
969 } else {
970 __ CallRuntime(Runtime::kStoreContextSlot, 3);
971 }
972 // Storing a variable must keep the (new) value on the expression
973 // stack. This is necessary for compiling assignment expressions.
974 __ push(r0);
975
976 } else {
977 ASSERT(var()->mode() != Variable::DYNAMIC);
978
979 Label exit;
980 if (init_state == CONST_INIT) {
981 ASSERT(var()->mode() == Variable::CONST);
982 // Only the first const initialization must be executed (the slot
983 // still contains 'the hole' value). When the assignment is executed,
984 // the code is identical to a normal store (see below).
985 Comment cmnt(masm, "[ Init const");
986 __ ldr(r2, ArmCodeGenerator::SlotOperand(cgen, this, r2));
987 __ cmp(r2, Operand(Factory::the_hole_value()));
988 __ b(ne, &exit);
989 }
990
991 // We must execute the store.
992 // r2 may be loaded with context; used below in RecordWrite.
993 // Storing a variable must keep the (new) value on the stack. This is
994 // necessary for compiling assignment expressions.
995 //
996 // Note: We will reach here even with var()->mode() == Variable::CONST
997 // because of const declarations which will initialize consts to 'the
998 // hole' value and by doing so, end up calling this code. r2 may be
999 // loaded with context; used below in RecordWrite.
1000 __ pop(r0);
1001 __ str(r0, ArmCodeGenerator::SlotOperand(cgen, this, r2));
1002 __ push(r0);
1003
1004 if (type() == Slot::CONTEXT) {
1005 // Skip write barrier if the written value is a smi.
1006 __ tst(r0, Operand(kSmiTagMask));
1007 __ b(eq, &exit);
1008 // r2 is loaded with context when calling SlotOperand above.
1009 int offset = FixedArray::kHeaderSize + index() * kPointerSize;
1010 __ mov(r3, Operand(offset));
1011 __ RecordWrite(r2, r3, r1);
1012 }
1013 // If we definitely did not jump over the assignment, we do not need to
1014 // bind the exit label. Doing so can defeat peephole optimization.
1015 if (init_state == CONST_INIT || type() == Slot::CONTEXT) {
1016 __ bind(&exit);
1017 }
1018 }
1019 }
1020
1021
1022 #undef __
1023 #define __ masm_->
1024
1025 // ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given 838 // ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
1026 // register to a boolean in the condition code register. The code 839 // register to a boolean in the condition code register. The code
1027 // may jump to 'false_target' in case the register converts to 'false'. 840 // may jump to 'false_target' in case the register converts to 'false'.
1028 void ArmCodeGenerator::ToBoolean(Label* true_target, 841 void ArmCodeGenerator::ToBoolean(Label* true_target,
1029 Label* false_target) { 842 Label* false_target) {
1030 // Note: The generated code snippet does not change stack variables. 843 // Note: The generated code snippet does not change stack variables.
1031 // Only the condition code should be set. 844 // Only the condition code should be set.
1032 __ pop(r0); 845 __ pop(r0);
1033 846
1034 // Fast case checks 847 // Fast case checks
(...skipping 20 matching lines...) Expand all
1055 __ push(r0); 868 __ push(r0);
1056 __ CallRuntime(Runtime::kToBool, 1); 869 __ CallRuntime(Runtime::kToBool, 1);
1057 870
1058 // Convert result (r0) to condition code 871 // Convert result (r0) to condition code
1059 __ cmp(r0, Operand(Factory::false_value())); 872 __ cmp(r0, Operand(Factory::false_value()));
1060 873
1061 cc_reg_ = ne; 874 cc_reg_ = ne;
1062 } 875 }
1063 876
1064 877
1065 #undef __
1066 #define __ masm->
1067
1068 class GetPropertyStub : public CodeStub { 878 class GetPropertyStub : public CodeStub {
1069 public: 879 public:
1070 GetPropertyStub() { } 880 GetPropertyStub() { }
1071 881
1072 private: 882 private:
1073 Major MajorKey() { return GetProperty; } 883 Major MajorKey() { return GetProperty; }
1074 int MinorKey() { return 0; } 884 int MinorKey() { return 0; }
1075 void Generate(MacroAssembler* masm); 885 void Generate(MacroAssembler* masm);
1076 886
1077 const char* GetName() { return "GetPropertyStub"; } 887 const char* GetName() { return "GetPropertyStub"; }
1078 }; 888 };
1079 889
1080 890
1081 void GetPropertyStub::Generate(MacroAssembler* masm) {
1082 // sp[0]: key
1083 // sp[1]: receiver
1084 Label slow, fast;
1085 // Get the key and receiver object from the stack.
1086 __ ldm(ia, sp, r0.bit() | r1.bit());
1087 // Check that the key is a smi.
1088 __ tst(r0, Operand(kSmiTagMask));
1089 __ b(ne, &slow);
1090 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1091 // Check that the object isn't a smi.
1092 __ tst(r1, Operand(kSmiTagMask));
1093 __ b(eq, &slow);
1094
1095 // Check that the object is some kind of JS object EXCEPT JS Value type.
1096 // In the case that the object is a value-wrapper object,
1097 // we enter the runtime system to make sure that indexing into string
1098 // objects work as intended.
1099 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
1100 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1101 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1102 __ cmp(r2, Operand(JS_OBJECT_TYPE));
1103 __ b(lt, &slow);
1104
1105 // Get the elements array of the object.
1106 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
1107 // Check that the object is in fast mode (not dictionary).
1108 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1109 __ cmp(r3, Operand(Factory::hash_table_map()));
1110 __ b(eq, &slow);
1111 // Check that the key (index) is within bounds.
1112 __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
1113 __ cmp(r0, Operand(r3));
1114 __ b(lo, &fast);
1115
1116 // Slow case: Push extra copies of the arguments (2).
1117 __ bind(&slow);
1118 __ ldm(ia, sp, r0.bit() | r1.bit());
1119 __ stm(db_w, sp, r0.bit() | r1.bit());
1120 // Do tail-call to runtime routine.
1121 __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
1122
1123 // Fast case: Do the load.
1124 __ bind(&fast);
1125 __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
1126 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
1127 __ cmp(r0, Operand(Factory::the_hole_value()));
1128 // In case the loaded value is the_hole we have to consult GetProperty
1129 // to ensure the prototype chain is searched.
1130 __ b(eq, &slow);
1131
1132 masm->StubReturn(1);
1133 }
1134
1135
1136 class SetPropertyStub : public CodeStub { 891 class SetPropertyStub : public CodeStub {
1137 public: 892 public:
1138 SetPropertyStub() { } 893 SetPropertyStub() { }
1139 894
1140 private: 895 private:
1141 Major MajorKey() { return SetProperty; } 896 Major MajorKey() { return SetProperty; }
1142 int MinorKey() { return 0; } 897 int MinorKey() { return 0; }
1143 void Generate(MacroAssembler* masm); 898 void Generate(MacroAssembler* masm);
1144 899
1145 const char* GetName() { return "GetPropertyStub"; } 900 const char* GetName() { return "GetPropertyStub"; }
1146 }; 901 };
1147 902
1148 903
1149
1150 void SetPropertyStub::Generate(MacroAssembler* masm) {
1151 // r0 : value
1152 // sp[0] : key
1153 // sp[1] : receiver
1154
1155 Label slow, fast, array, extra, exit;
1156 // Get the key and the object from the stack.
1157 __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
1158 // Check that the key is a smi.
1159 __ tst(r1, Operand(kSmiTagMask));
1160 __ b(ne, &slow);
1161 // Check that the object isn't a smi.
1162 __ tst(r3, Operand(kSmiTagMask));
1163 __ b(eq, &slow);
1164 // Get the type of the object from its map.
1165 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1166 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1167 // Check if the object is a JS array or not.
1168 __ cmp(r2, Operand(JS_ARRAY_TYPE));
1169 __ b(eq, &array);
1170 // Check that the object is some kind of JS object.
1171 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
1172 __ b(lt, &slow);
1173
1174
1175 // Object case: Check key against length in the elements array.
1176 __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
1177 // Check that the object is in fast mode (not dictionary).
1178 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1179 __ cmp(r2, Operand(Factory::hash_table_map()));
1180 __ b(eq, &slow);
1181 // Untag the key (for checking against untagged length in the fixed array).
1182 __ mov(r1, Operand(r1, ASR, kSmiTagSize));
1183 // Compute address to store into and check array bounds.
1184 __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
1185 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
1186 __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
1187 __ cmp(r1, Operand(ip));
1188 __ b(lo, &fast);
1189
1190
1191 // Slow case: Push extra copies of the arguments (3).
1192 __ bind(&slow);
1193 __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
1194 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
1195 // Do tail-call to runtime routine.
1196 __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
1197
1198
1199 // Extra capacity case: Check if there is extra capacity to
1200 // perform the store and update the length. Used for adding one
1201 // element to the array by writing to array[array.length].
1202 // r0 == value, r1 == key, r2 == elements, r3 == object
1203 __ bind(&extra);
1204 __ b(ne, &slow); // do not leave holes in the array
1205 __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
1206 __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
1207 __ cmp(r1, Operand(ip));
1208 __ b(hs, &slow);
1209 __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
1210 __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
1211 __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
1212 __ mov(r3, Operand(r2));
1213 // NOTE: Computing the address to store into must take the fact
1214 // that the key has been incremented into account.
1215 int displacement = Array::kHeaderSize - kHeapObjectTag -
1216 ((1 << kSmiTagSize) * 2);
1217 __ add(r2, r2, Operand(displacement));
1218 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1219 __ b(&fast);
1220
1221
1222 // Array case: Get the length and the elements array from the JS
1223 // array. Check that the array is in fast mode; if it is the
1224 // length is always a smi.
1225 // r0 == value, r3 == object
1226 __ bind(&array);
1227 __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
1228 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
1229 __ cmp(r1, Operand(Factory::hash_table_map()));
1230 __ b(eq, &slow);
1231
1232 // Check the key against the length in the array, compute the
1233 // address to store into and fall through to fast case.
1234 __ ldr(r1, MemOperand(sp));
1235 // r0 == value, r1 == key, r2 == elements, r3 == object.
1236 __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
1237 __ cmp(r1, Operand(ip));
1238 __ b(hs, &extra);
1239 __ mov(r3, Operand(r2));
1240 __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
1241 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1242
1243
1244 // Fast case: Do the store.
1245 // r0 == value, r2 == address to store into, r3 == elements
1246 __ bind(&fast);
1247 __ str(r0, MemOperand(r2));
1248 // Skip write barrier if the written value is a smi.
1249 __ tst(r0, Operand(kSmiTagMask));
1250 __ b(eq, &exit);
1251 // Update write barrier for the elements array address.
1252 __ sub(r1, r2, Operand(r3));
1253 __ RecordWrite(r3, r1, r2);
1254 __ bind(&exit);
1255 masm->StubReturn(1);
1256 }
1257
1258
1259 class GenericBinaryOpStub : public CodeStub { 904 class GenericBinaryOpStub : public CodeStub {
1260 public: 905 public:
1261 explicit GenericBinaryOpStub(Token::Value op) : op_(op) { } 906 explicit GenericBinaryOpStub(Token::Value op) : op_(op) { }
1262 907
1263 private: 908 private:
1264 Token::Value op_; 909 Token::Value op_;
1265 910
1266 Major MajorKey() { return GenericBinaryOp; } 911 Major MajorKey() { return GenericBinaryOp; }
1267 int MinorKey() { return static_cast<int>(op_); } 912 int MinorKey() { return static_cast<int>(op_); }
1268 void Generate(MacroAssembler* masm); 913 void Generate(MacroAssembler* masm);
(...skipping 13 matching lines...) Expand all
1282 default: return "GenericBinaryOpStub"; 927 default: return "GenericBinaryOpStub";
1283 } 928 }
1284 } 929 }
1285 930
1286 #ifdef DEBUG 931 #ifdef DEBUG
1287 void Print() { PrintF("GenericBinaryOpStub (%s)\n", Token::String(op_)); } 932 void Print() { PrintF("GenericBinaryOpStub (%s)\n", Token::String(op_)); }
1288 #endif 933 #endif
1289 }; 934 };
1290 935
1291 936
1292 void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
1293 // r1 : x
1294 // r0 : y
1295 // result : r0
1296
1297 switch (op_) {
1298 case Token::ADD: {
1299 Label slow, exit;
1300 // fast path
1301 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
1302 __ add(r0, r1, Operand(r0), SetCC); // add y optimistically
1303 // go slow-path in case of overflow
1304 __ b(vs, &slow);
1305 // go slow-path in case of non-smi operands
1306 ASSERT(kSmiTag == 0); // adjust code below
1307 __ tst(r2, Operand(kSmiTagMask));
1308 __ b(eq, &exit);
1309 // slow path
1310 __ bind(&slow);
1311 __ sub(r0, r0, Operand(r1)); // revert optimistic add
1312 __ push(r1);
1313 __ push(r0);
1314 __ mov(r0, Operand(1)); // set number of arguments
1315 __ InvokeBuiltin(Builtins::ADD, JUMP_JS);
1316 // done
1317 __ bind(&exit);
1318 break;
1319 }
1320
1321 case Token::SUB: {
1322 Label slow, exit;
1323 // fast path
1324 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
1325 __ sub(r3, r1, Operand(r0), SetCC); // subtract y optimistically
1326 // go slow-path in case of overflow
1327 __ b(vs, &slow);
1328 // go slow-path in case of non-smi operands
1329 ASSERT(kSmiTag == 0); // adjust code below
1330 __ tst(r2, Operand(kSmiTagMask));
1331 __ mov(r0, Operand(r3), LeaveCC, eq); // conditionally set r0 to result
1332 __ b(eq, &exit);
1333 // slow path
1334 __ bind(&slow);
1335 __ push(r1);
1336 __ push(r0);
1337 __ mov(r0, Operand(1)); // set number of arguments
1338 __ InvokeBuiltin(Builtins::SUB, JUMP_JS);
1339 // done
1340 __ bind(&exit);
1341 break;
1342 }
1343
1344 case Token::MUL: {
1345 Label slow, exit;
1346 // tag check
1347 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
1348 ASSERT(kSmiTag == 0); // adjust code below
1349 __ tst(r2, Operand(kSmiTagMask));
1350 __ b(ne, &slow);
1351 // remove tag from one operand (but keep sign), so that result is smi
1352 __ mov(ip, Operand(r0, ASR, kSmiTagSize));
1353 // do multiplication
1354 __ smull(r3, r2, r1, ip); // r3 = lower 32 bits of ip*r1
1355 // go slow on overflows (overflow bit is not set)
1356 __ mov(ip, Operand(r3, ASR, 31));
1357 __ cmp(ip, Operand(r2)); // no overflow if higher 33 bits are identical
1358 __ b(ne, &slow);
1359 // go slow on zero result to handle -0
1360 __ tst(r3, Operand(r3));
1361 __ mov(r0, Operand(r3), LeaveCC, ne);
1362 __ b(ne, &exit);
1363 // slow case
1364 __ bind(&slow);
1365 __ push(r1);
1366 __ push(r0);
1367 __ mov(r0, Operand(1)); // set number of arguments
1368 __ InvokeBuiltin(Builtins::MUL, JUMP_JS);
1369 // done
1370 __ bind(&exit);
1371 break;
1372 }
1373
1374 case Token::BIT_OR:
1375 case Token::BIT_AND:
1376 case Token::BIT_XOR: {
1377 Label slow, exit;
1378 // tag check
1379 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
1380 ASSERT(kSmiTag == 0); // adjust code below
1381 __ tst(r2, Operand(kSmiTagMask));
1382 __ b(ne, &slow);
1383 switch (op_) {
1384 case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break;
1385 case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
1386 case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
1387 default: UNREACHABLE();
1388 }
1389 __ b(&exit);
1390 __ bind(&slow);
1391 __ push(r1); // restore stack
1392 __ push(r0);
1393 __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
1394 switch (op_) {
1395 case Token::BIT_OR:
1396 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
1397 break;
1398 case Token::BIT_AND:
1399 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
1400 break;
1401 case Token::BIT_XOR:
1402 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
1403 break;
1404 default:
1405 UNREACHABLE();
1406 }
1407 __ bind(&exit);
1408 break;
1409 }
1410
1411 case Token::SHL:
1412 case Token::SHR:
1413 case Token::SAR: {
1414 Label slow, exit;
1415 // tag check
1416 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
1417 ASSERT(kSmiTag == 0); // adjust code below
1418 __ tst(r2, Operand(kSmiTagMask));
1419 __ b(ne, &slow);
1420 // remove tags from operands (but keep sign)
1421 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
1422 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y
1423 // use only the 5 least significant bits of the shift count
1424 __ and_(r2, r2, Operand(0x1f));
1425 // perform operation
1426 switch (op_) {
1427 case Token::SAR:
1428 __ mov(r3, Operand(r3, ASR, r2));
1429 // no checks of result necessary
1430 break;
1431
1432 case Token::SHR:
1433 __ mov(r3, Operand(r3, LSR, r2));
1434 // check that the *unsigned* result fits in a smi
1435 // neither of the two high-order bits can be set:
1436 // - 0x80000000: high bit would be lost when smi tagging
1437 // - 0x40000000: this number would convert to negative when
1438 // smi tagging these two cases can only happen with shifts
1439 // by 0 or 1 when handed a valid smi
1440 __ and_(r2, r3, Operand(0xc0000000), SetCC);
1441 __ b(ne, &slow);
1442 break;
1443
1444 case Token::SHL:
1445 __ mov(r3, Operand(r3, LSL, r2));
1446 // check that the *signed* result fits in a smi
1447 __ add(r2, r3, Operand(0x40000000), SetCC);
1448 __ b(mi, &slow);
1449 break;
1450
1451 default: UNREACHABLE();
1452 }
1453 // tag result and store it in r0
1454 ASSERT(kSmiTag == 0); // adjust code below
1455 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
1456 __ b(&exit);
1457 // slow case
1458 __ bind(&slow);
1459 __ push(r1); // restore stack
1460 __ push(r0);
1461 __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
1462 switch (op_) {
1463 case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
1464 case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
1465 case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
1466 default: UNREACHABLE();
1467 }
1468 __ bind(&exit);
1469 break;
1470 }
1471
1472 default: UNREACHABLE();
1473 }
1474 __ Ret();
1475 }
1476
1477
1478 void StackCheckStub::Generate(MacroAssembler* masm) {
1479 Label within_limit;
1480 __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
1481 __ ldr(ip, MemOperand(ip));
1482 __ cmp(sp, Operand(ip));
1483 __ b(hs, &within_limit);
1484 // Do tail-call to runtime routine.
1485 __ push(r0);
1486 __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
1487 __ bind(&within_limit);
1488
1489 masm->StubReturn(1);
1490 }
1491
1492
1493 void UnarySubStub::Generate(MacroAssembler* masm) {
1494 Label undo;
1495 Label slow;
1496 Label done;
1497
1498 // Enter runtime system if the value is not a smi.
1499 __ tst(r0, Operand(kSmiTagMask));
1500 __ b(ne, &slow);
1501
1502 // Enter runtime system if the value of the expression is zero
1503 // to make sure that we switch between 0 and -0.
1504 __ cmp(r0, Operand(0));
1505 __ b(eq, &slow);
1506
1507 // The value of the expression is a smi that is not zero. Try
1508 // optimistic subtraction '0 - value'.
1509 __ rsb(r1, r0, Operand(0), SetCC);
1510 __ b(vs, &slow);
1511
1512 // If result is a smi we are done.
1513 __ tst(r1, Operand(kSmiTagMask));
1514 __ mov(r0, Operand(r1), LeaveCC, eq); // conditionally set r0 to result
1515 __ b(eq, &done);
1516
1517 // Enter runtime system.
1518 __ bind(&slow);
1519 __ push(r0);
1520 __ mov(r0, Operand(0)); // set number of arguments
1521 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
1522
1523 __ bind(&done);
1524 masm->StubReturn(1);
1525 }
1526
1527
1528 class InvokeBuiltinStub : public CodeStub { 937 class InvokeBuiltinStub : public CodeStub {
1529 public: 938 public:
1530 enum Kind { Inc, Dec, ToNumber }; 939 enum Kind { Inc, Dec, ToNumber };
1531 InvokeBuiltinStub(Kind kind, int argc) : kind_(kind), argc_(argc) { } 940 InvokeBuiltinStub(Kind kind, int argc) : kind_(kind), argc_(argc) { }
1532 941
1533 private: 942 private:
1534 Kind kind_; 943 Kind kind_;
1535 int argc_; 944 int argc_;
1536 945
1537 Major MajorKey() { return InvokeBuiltin; } 946 Major MajorKey() { return InvokeBuiltin; }
1538 int MinorKey() { return (argc_ << 3) | static_cast<int>(kind_); } 947 int MinorKey() { return (argc_ << 3) | static_cast<int>(kind_); }
1539 void Generate(MacroAssembler* masm); 948 void Generate(MacroAssembler* masm);
1540 949
1541 const char* GetName() { return "InvokeBuiltinStub"; } 950 const char* GetName() { return "InvokeBuiltinStub"; }
1542 951
1543 #ifdef DEBUG 952 #ifdef DEBUG
1544 void Print() { 953 void Print() {
1545 PrintF("InvokeBuiltinStub (kind %d, argc, %d)\n", 954 PrintF("InvokeBuiltinStub (kind %d, argc, %d)\n",
1546 static_cast<int>(kind_), 955 static_cast<int>(kind_),
1547 argc_); 956 argc_);
1548 } 957 }
1549 #endif 958 #endif
1550 }; 959 };
1551 960
1552 961
1553 void InvokeBuiltinStub::Generate(MacroAssembler* masm) {
1554 __ push(r0);
1555 __ mov(r0, Operand(0)); // set number of arguments
1556 switch (kind_) {
1557 case ToNumber: __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); break;
1558 case Inc: __ InvokeBuiltin(Builtins::INC, JUMP_JS); break;
1559 case Dec: __ InvokeBuiltin(Builtins::DEC, JUMP_JS); break;
1560 default: UNREACHABLE();
1561 }
1562 masm->StubReturn(argc_);
1563 }
1564
1565
1566 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
1567 // r0 holds exception
1568 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
1569 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
1570 __ ldr(sp, MemOperand(r3));
1571 __ pop(r2); // pop next in chain
1572 __ str(r2, MemOperand(r3));
1573 // restore parameter- and frame-pointer and pop state.
1574 __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
1575 // Before returning we restore the context from the frame pointer if not NULL.
1576 // The frame pointer is NULL in the exception handler of a JS entry frame.
1577 __ cmp(fp, Operand(0));
1578 // Set cp to NULL if fp is NULL.
1579 __ mov(cp, Operand(0), LeaveCC, eq);
1580 // Restore cp otherwise.
1581 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
1582 if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
1583 __ pop(pc);
1584 }
1585
1586
1587 void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
1588 // Fetch top stack handler.
1589 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
1590 __ ldr(r3, MemOperand(r3));
1591
1592 // Unwind the handlers until the ENTRY handler is found.
1593 Label loop, done;
1594 __ bind(&loop);
1595 // Load the type of the current stack handler.
1596 const int kStateOffset = StackHandlerConstants::kAddressDisplacement +
1597 StackHandlerConstants::kStateOffset;
1598 __ ldr(r2, MemOperand(r3, kStateOffset));
1599 __ cmp(r2, Operand(StackHandler::ENTRY));
1600 __ b(eq, &done);
1601 // Fetch the next handler in the list.
1602 const int kNextOffset = StackHandlerConstants::kAddressDisplacement +
1603 StackHandlerConstants::kNextOffset;
1604 __ ldr(r3, MemOperand(r3, kNextOffset));
1605 __ jmp(&loop);
1606 __ bind(&done);
1607
1608 // Set the top handler address to next handler past the current ENTRY handler.
1609 __ ldr(r0, MemOperand(r3, kNextOffset));
1610 __ mov(r2, Operand(ExternalReference(Top::k_handler_address)));
1611 __ str(r0, MemOperand(r2));
1612
1613 // Set external caught exception to false.
1614 __ mov(r0, Operand(false));
1615 ExternalReference external_caught(Top::k_external_caught_exception_address);
1616 __ mov(r2, Operand(external_caught));
1617 __ str(r0, MemOperand(r2));
1618
1619 // Set pending exception and r0 to out of memory exception.
1620 Failure* out_of_memory = Failure::OutOfMemoryException();
1621 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
1622 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
1623 __ str(r0, MemOperand(r2));
1624
1625 // Restore the stack to the address of the ENTRY handler
1626 __ mov(sp, Operand(r3));
1627
1628 // restore parameter- and frame-pointer and pop state.
1629 __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
1630 // Before returning we restore the context from the frame pointer if not NULL.
1631 // The frame pointer is NULL in the exception handler of a JS entry frame.
1632 __ cmp(fp, Operand(0));
1633 // Set cp to NULL if fp is NULL.
1634 __ mov(cp, Operand(0), LeaveCC, eq);
1635 // Restore cp otherwise.
1636 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
1637 if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
1638 __ pop(pc);
1639 }
1640
1641
1642 void CEntryStub::GenerateCore(MacroAssembler* masm,
1643 Label* throw_normal_exception,
1644 Label* throw_out_of_memory_exception,
1645 StackFrame::Type frame_type,
1646 bool do_gc) {
1647 // r0: result parameter for PerformGC, if any
1648 // r4: number of arguments including receiver (C callee-saved)
1649 // r5: pointer to builtin function (C callee-saved)
1650 // r6: pointer to the first argument (C callee-saved)
1651
1652 if (do_gc) {
1653 // Passing r0.
1654 __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
1655 }
1656
1657 // Call C built-in.
1658 // r0 = argc, r1 = argv
1659 __ mov(r0, Operand(r4));
1660 __ mov(r1, Operand(r6));
1661
1662 // TODO(1242173): To let the GC traverse the return address of the exit
1663 // frames, we need to know where the return address is. Right now,
1664 // we push it on the stack to be able to find it again, but we never
1665 // restore from it in case of changes, which makes it impossible to
1666 // support moving the C entry code stub. This should be fixed, but currently
1667 // this is OK because the CEntryStub gets generated so early in the V8 boot
1668 // sequence that it is not moving ever.
1669 __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
1670 __ push(lr);
1671 #if !defined(__arm__)
1672 // Notify the simulator of the transition to C code.
1673 __ swi(assembler::arm::call_rt_r5);
1674 #else /* !defined(__arm__) */
1675 __ mov(pc, Operand(r5));
1676 #endif /* !defined(__arm__) */
1677 // result is in r0 or r0:r1 - do not destroy these registers!
1678
1679 // check for failure result
1680 Label failure_returned;
1681 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
1682 // Lower 2 bits of r2 are 0 iff r0 has failure tag.
1683 __ add(r2, r0, Operand(1));
1684 __ tst(r2, Operand(kFailureTagMask));
1685 __ b(eq, &failure_returned);
1686
1687 // Exit C frame and return.
1688 // r0:r1: result
1689 // sp: stack pointer
1690 // fp: frame pointer
1691 // pp: caller's parameter pointer pp (restored as C callee-saved)
1692 __ LeaveExitFrame(frame_type);
1693
1694 // check if we should retry or throw exception
1695 Label retry;
1696 __ bind(&failure_returned);
1697 ASSERT(Failure::RETRY_AFTER_GC == 0);
1698 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
1699 __ b(eq, &retry);
1700
1701 Label continue_exception;
1702 // If the returned failure is EXCEPTION then promote Top::pending_exception().
1703 __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
1704 __ b(ne, &continue_exception);
1705
1706 // Retrieve the pending exception and clear the variable.
1707 __ mov(ip, Operand(Factory::the_hole_value().location()));
1708 __ ldr(r3, MemOperand(ip));
1709 __ mov(ip, Operand(Top::pending_exception_address()));
1710 __ ldr(r0, MemOperand(ip));
1711 __ str(r3, MemOperand(ip));
1712
1713 __ bind(&continue_exception);
1714 // Special handling of out of memory exception.
1715 Failure* out_of_memory = Failure::OutOfMemoryException();
1716 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
1717 __ b(eq, throw_out_of_memory_exception);
1718
1719 // Handle normal exception.
1720 __ jmp(throw_normal_exception);
1721
1722 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
1723 }
1724
1725
1726 void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
1727 // Called from JavaScript; parameters are on stack as if calling JS function
1728 // r0: number of arguments including receiver
1729 // r1: pointer to builtin function
1730 // fp: frame pointer (restored after C call)
1731 // sp: stack pointer (restored as callee's pp after C call)
1732 // cp: current context (C callee-saved)
1733 // pp: caller's parameter pointer pp (C callee-saved)
1734
1735 // NOTE: Invocations of builtins may return failure objects
1736 // instead of a proper result. The builtin entry handles
1737 // this by performing a garbage collection and retrying the
1738 // builtin once.
1739
1740 StackFrame::Type frame_type = is_debug_break
1741 ? StackFrame::EXIT_DEBUG
1742 : StackFrame::EXIT;
1743
1744 // Enter the exit frame that transitions from JavaScript to C++.
1745 __ EnterExitFrame(frame_type);
1746
1747 // r4: number of arguments (C callee-saved)
1748 // r5: pointer to builtin function (C callee-saved)
1749 // r6: pointer to first argument (C callee-saved)
1750
1751 Label throw_out_of_memory_exception;
1752 Label throw_normal_exception;
1753
1754 #ifdef DEBUG
1755 if (FLAG_gc_greedy) {
1756 Failure* failure = Failure::RetryAfterGC(0, NEW_SPACE);
1757 __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure)));
1758 }
1759 GenerateCore(masm,
1760 &throw_normal_exception,
1761 &throw_out_of_memory_exception,
1762 frame_type,
1763 FLAG_gc_greedy);
1764 #else
1765 GenerateCore(masm,
1766 &throw_normal_exception,
1767 &throw_out_of_memory_exception,
1768 frame_type,
1769 false);
1770 #endif
1771 GenerateCore(masm,
1772 &throw_normal_exception,
1773 &throw_out_of_memory_exception,
1774 frame_type,
1775 true);
1776
1777 __ bind(&throw_out_of_memory_exception);
1778 GenerateThrowOutOfMemory(masm);
1779 // control flow for generated will not return.
1780
1781 __ bind(&throw_normal_exception);
1782 GenerateThrowTOS(masm);
1783 }
1784
1785
1786 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
1787 // r0: code entry
1788 // r1: function
1789 // r2: receiver
1790 // r3: argc
1791 // [sp+0]: argv
1792
1793 Label invoke, exit;
1794
1795 // Called from C, so do not pop argc and args on exit (preserve sp)
1796 // No need to save register-passed args
1797 // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
1798 __ stm(db_w, sp, kCalleeSaved | lr.bit());
1799
1800 // Get address of argv, see stm above.
1801 // r0: code entry
1802 // r1: function
1803 // r2: receiver
1804 // r3: argc
1805 __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
1806 __ ldr(r4, MemOperand(r4)); // argv
1807
1808 // Push a frame with special values setup to mark it as an entry frame.
1809 // r0: code entry
1810 // r1: function
1811 // r2: receiver
1812 // r3: argc
1813 // r4: argv
1814 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1815 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1816 __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
1817 __ mov(r6, Operand(Smi::FromInt(marker)));
1818 __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
1819 __ ldr(r5, MemOperand(r5));
1820 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
1821
1822 // Setup frame pointer for the frame to be pushed.
1823 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1824
1825 // Call a faked try-block that does the invoke.
1826 __ bl(&invoke);
1827
1828 // Caught exception: Store result (exception) in the pending
1829 // exception field in the JSEnv and return a failure sentinel.
1830 // Coming in here the fp will be invalid because the PushTryHandler below
1831 // sets it to 0 to signal the existence of the JSEntry frame.
1832 __ mov(ip, Operand(Top::pending_exception_address()));
1833 __ str(r0, MemOperand(ip));
1834 __ mov(r0, Operand(Handle<Failure>(Failure::Exception())));
1835 __ b(&exit);
1836
1837 // Invoke: Link this frame into the handler chain.
1838 __ bind(&invoke);
1839 // Must preserve r0-r4, r5-r7 are available.
1840 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
1841 // If an exception not caught by another handler occurs, this handler returns
1842 // control to the code after the bl(&invoke) above, which restores all
1843 // kCalleeSaved registers (including cp, pp and fp) to their saved values
1844 // before returning a failure to C.
1845
1846 // Clear any pending exceptions.
1847 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
1848 __ ldr(r5, MemOperand(ip));
1849 __ mov(ip, Operand(Top::pending_exception_address()));
1850 __ str(r5, MemOperand(ip));
1851
1852 // Invoke the function by calling through JS entry trampoline builtin.
1853 // Notice that we cannot store a reference to the trampoline code directly in
1854 // this stub, because runtime stubs are not traversed when doing GC.
1855
1856 // Expected registers by Builtins::JSEntryTrampoline
1857 // r0: code entry
1858 // r1: function
1859 // r2: receiver
1860 // r3: argc
1861 // r4: argv
1862 if (is_construct) {
1863 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
1864 __ mov(ip, Operand(construct_entry));
1865 } else {
1866 ExternalReference entry(Builtins::JSEntryTrampoline);
1867 __ mov(ip, Operand(entry));
1868 }
1869 __ ldr(ip, MemOperand(ip)); // deref address
1870
1871 // Branch and link to JSEntryTrampoline
1872 __ mov(lr, Operand(pc));
1873 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1874
1875 // Unlink this frame from the handler chain. When reading the
1876 // address of the next handler, there is no need to use the address
1877 // displacement since the current stack pointer (sp) points directly
1878 // to the stack handler.
1879 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
1880 __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
1881 __ str(r3, MemOperand(ip));
1882 // No need to restore registers
1883 __ add(sp, sp, Operand(StackHandlerConstants::kSize));
1884
1885 __ bind(&exit); // r0 holds result
1886 // Restore the top frame descriptors from the stack.
1887 __ pop(r3);
1888 __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
1889 __ str(r3, MemOperand(ip));
1890
1891 // Reset the stack to the callee saved registers.
1892 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1893
1894 // Restore callee-saved registers and return.
1895 #ifdef DEBUG
1896 if (FLAG_debug_code) __ mov(lr, Operand(pc));
1897 #endif
1898 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1899 }
1900
1901
1902 class ArgumentsAccessStub: public CodeStub { 962 class ArgumentsAccessStub: public CodeStub {
1903 public: 963 public:
1904 explicit ArgumentsAccessStub(bool is_length) : is_length_(is_length) { } 964 explicit ArgumentsAccessStub(bool is_length) : is_length_(is_length) { }
1905 965
1906 private: 966 private:
1907 bool is_length_; 967 bool is_length_;
1908 968
1909 Major MajorKey() { return ArgumentsAccess; } 969 Major MajorKey() { return ArgumentsAccess; }
1910 int MinorKey() { return is_length_ ? 1 : 0; } 970 int MinorKey() { return is_length_ ? 1 : 0; }
1911 void Generate(MacroAssembler* masm); 971 void Generate(MacroAssembler* masm);
1912 972
1913 const char* GetName() { return "ArgumentsAccessStub"; } 973 const char* GetName() { return "ArgumentsAccessStub"; }
1914 974
1915 #ifdef DEBUG 975 #ifdef DEBUG
1916 void Print() { 976 void Print() {
1917 PrintF("ArgumentsAccessStub (is_length %s)\n", 977 PrintF("ArgumentsAccessStub (is_length %s)\n",
1918 is_length_ ? "true" : "false"); 978 is_length_ ? "true" : "false");
1919 } 979 }
1920 #endif 980 #endif
1921 }; 981 };
1922 982
1923 983
1924 void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
1925 // ----------- S t a t e -------------
1926 // -- r0: formal number of parameters for the calling function
1927 // -- r1: key (if value access)
1928 // -- lr: return address
1929 // -----------------------------------
1930
1931 // Check that the key is a smi for non-length accesses.
1932 Label slow;
1933 if (!is_length_) {
1934 __ tst(r1, Operand(kSmiTagMask));
1935 __ b(ne, &slow);
1936 }
1937
1938 // Check if the calling frame is an arguments adaptor frame.
1939 // r0: formal number of parameters
1940 // r1: key (if access)
1941 Label adaptor;
1942 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1943 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1944 __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
1945 __ b(eq, &adaptor);
1946
1947 static const int kParamDisplacement =
1948 StandardFrameConstants::kCallerSPOffset - kPointerSize;
1949
1950 if (is_length_) {
1951 // Nothing to do: the formal length of parameters has been passed in r0
1952 // by the calling function.
1953 } else {
1954 // Check index against formal parameter count. Use unsigned comparison to
1955 // get the negative check for free.
1956 // r0: formal number of parameters
1957 // r1: index
1958 __ cmp(r1, r0);
1959 __ b(cs, &slow);
1960
1961 // Read the argument from the current frame.
1962 __ sub(r3, r0, r1);
1963 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
1964 __ ldr(r0, MemOperand(r3, kParamDisplacement));
1965 }
1966
1967 // Return to the calling function.
1968 __ mov(pc, lr);
1969
1970 // An arguments adaptor frame is present. Find the length or the actual
1971 // argument in the calling frame.
1972 // r0: formal number of parameters
1973 // r1: key
1974 // r2: adaptor frame pointer
1975 __ bind(&adaptor);
1976 // Read the arguments length from the adaptor frame. This is the result if
1977 // only accessing the length, otherwise it is used in accessing the value
1978 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1979
1980 if (!is_length_) {
1981 // Check index against actual arguments count. Use unsigned comparison to
1982 // get the negative check for free.
1983 // r0: actual number of parameter
1984 // r1: index
1985 // r2: adaptor frame point
1986 __ cmp(r1, r0);
1987 __ b(cs, &slow);
1988
1989 // Read the argument from the adaptor frame.
1990 __ sub(r3, r0, r1);
1991 __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
1992 __ ldr(r0, MemOperand(r3, kParamDisplacement));
1993 }
1994
1995 // Return to the calling function.
1996 __ mov(pc, lr);
1997
1998 if (!is_length_) {
1999 __ bind(&slow);
2000 __ push(r1);
2001 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
2002 }
2003 }
2004
2005
2006 #undef __
2007 #define __ masm_->
2008
2009 void ArmCodeGenerator::GetReferenceProperty(Expression* key) { 984 void ArmCodeGenerator::GetReferenceProperty(Expression* key) {
2010 ASSERT(!ref()->is_illegal()); 985 ASSERT(!ref()->is_illegal());
2011 Reference::Type type = ref()->type(); 986 Reference::Type type = ref()->type();
2012 987
2013 // TODO(1241834): Make sure that this it is safe to ignore the distinction 988 // TODO(1241834): Make sure that this it is safe to ignore the distinction
2014 // between access types LOAD and LOAD_TYPEOF_EXPR. If there is a chance 989 // between access types LOAD and LOAD_TYPEOF_EXPR. If there is a chance
2015 // that reference errors can be thrown below, we must distinguish between 990 // that reference errors can be thrown below, we must distinguish between
2016 // the two kinds of loads (typeof expression loads must not throw a 991 // the two kinds of loads (typeof expression loads must not throw a
2017 // reference error). 992 // reference error).
2018 if (type == Reference::NAMED) { 993 if (type == Reference::NAMED) {
(...skipping 18 matching lines...) Expand all
2037 ASSERT(type == Reference::KEYED); 1012 ASSERT(type == Reference::KEYED);
2038 1013
2039 // TODO(1224671): Implement inline caching for keyed loads as on ia32. 1014 // TODO(1224671): Implement inline caching for keyed loads as on ia32.
2040 GetPropertyStub stub; 1015 GetPropertyStub stub;
2041 __ CallStub(&stub); 1016 __ CallStub(&stub);
2042 } 1017 }
2043 __ push(r0); 1018 __ push(r0);
2044 } 1019 }
2045 1020
2046 1021
2047 #undef __
2048 #define __ masm->
2049
2050 void ArmCodeGenerator::SetReferenceProperty(CodeGenerator* cgen,
2051 Reference* ref,
2052 Expression* key) {
2053 ASSERT(!ref->is_illegal());
2054 MacroAssembler* masm = cgen->masm();
2055
2056 if (ref->type() == Reference::NAMED) {
2057 // Compute the name of the property.
2058 Literal* literal = key->AsLiteral();
2059 Handle<String> name(String::cast(*literal->handle()));
2060
2061 // Call the appropriate IC code.
2062 masm->pop(r0); // value
2063 // Setup the name register.
2064 masm->mov(r2, Operand(name));
2065 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2066 masm->Call(ic, RelocInfo::CODE_TARGET);
2067
2068 } else {
2069 // Access keyed property.
2070 ASSERT(ref->type() == Reference::KEYED);
2071
2072 masm->pop(r0); // value
2073 SetPropertyStub stub;
2074 masm->CallStub(&stub);
2075 }
2076 masm->push(r0);
2077 }
2078
2079
2080 #undef __
2081 #define __ masm_->
2082
2083 void ArmCodeGenerator::GenericBinaryOperation(Token::Value op) { 1022 void ArmCodeGenerator::GenericBinaryOperation(Token::Value op) {
2084 // sp[0] : y 1023 // sp[0] : y
2085 // sp[1] : x 1024 // sp[1] : x
2086 // result : r0 1025 // result : r0
2087 1026
2088 // Stub is entered with a call: 'return address' is in lr. 1027 // Stub is entered with a call: 'return address' is in lr.
2089 switch (op) { 1028 switch (op) {
2090 case Token::ADD: // fall through. 1029 case Token::ADD: // fall through.
2091 case Token::SUB: // fall through. 1030 case Token::SUB: // fall through.
2092 case Token::MUL: 1031 case Token::MUL:
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
2415 1354
2416 #if defined(DEBUG) 1355 #if defined(DEBUG)
2417 void Print() { PrintF("CallFunctionStub (argc %d)\n", argc_); } 1356 void Print() { PrintF("CallFunctionStub (argc %d)\n", argc_); }
2418 #endif // defined(DEBUG) 1357 #endif // defined(DEBUG)
2419 1358
2420 Major MajorKey() { return CallFunction; } 1359 Major MajorKey() { return CallFunction; }
2421 int MinorKey() { return argc_; } 1360 int MinorKey() { return argc_; }
2422 }; 1361 };
2423 1362
2424 1363
2425 void CallFunctionStub::Generate(MacroAssembler* masm) {
2426 Label slow;
2427 // Get the function to call from the stack.
2428 // function, receiver [, arguments]
2429 masm->ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
2430
2431 // Check that the function is really a JavaScript function.
2432 // r1: pushed function (to be verified)
2433 masm->tst(r1, Operand(kSmiTagMask));
2434 masm->b(eq, &slow);
2435 // Get the map of the function object.
2436 masm->ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
2437 masm->ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2438 masm->cmp(r2, Operand(JS_FUNCTION_TYPE));
2439 masm->b(ne, &slow);
2440
2441 // Fast-case: Invoke the function now.
2442 // r1: pushed function
2443 ParameterCount actual(argc_);
2444 masm->InvokeFunction(r1, actual, JUMP_FUNCTION);
2445
2446 // Slow-case: Non-function called.
2447 masm->bind(&slow);
2448 masm->mov(r0, Operand(argc_)); // Setup the number of arguments.
2449 masm->InvokeBuiltin(Builtins::CALL_NON_FUNCTION, JUMP_JS);
2450 }
2451
2452
2453 // Call the function on the stack with the given arguments. 1364 // Call the function on the stack with the given arguments.
2454 void ArmCodeGenerator::CallWithArguments(ZoneList<Expression*>* args, 1365 void ArmCodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
2455 int position) { 1366 int position) {
2456 // Push the arguments ("left-to-right") on the stack. 1367 // Push the arguments ("left-to-right") on the stack.
2457 for (int i = 0; i < args->length(); i++) { 1368 for (int i = 0; i < args->length(); i++) {
2458 Load(args->at(i)); 1369 Load(args->at(i));
2459 } 1370 }
2460 1371
2461 // Record the position for debugging purposes. 1372 // Record the position for debugging purposes.
2462 __ RecordPosition(position); 1373 __ RecordPosition(position);
(...skipping 2062 matching lines...) Expand 10 before | Expand all | Expand 10 after
4525 3436
4526 void ArmCodeGenerator::ExitJSFrame() { 3437 void ArmCodeGenerator::ExitJSFrame() {
4527 // Drop the execution stack down to the frame pointer and restore the caller 3438 // Drop the execution stack down to the frame pointer and restore the caller
4528 // frame pointer and return address. 3439 // frame pointer and return address.
4529 __ mov(sp, fp); 3440 __ mov(sp, fp);
4530 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 3441 __ ldm(ia_w, sp, fp.bit() | lr.bit());
4531 } 3442 }
4532 3443
4533 3444
4534 #undef __ 3445 #undef __
4535 3446 #define __ masm->
3447
3448 MemOperand ArmCodeGenerator::SlotOperand(CodeGenerator* cgen,
3449 Slot* slot,
3450 Register tmp) {
3451 // Currently, this assertion will fail if we try to assign to
3452 // a constant variable that is constant because it is read-only
3453 // (such as the variable referring to a named function expression).
3454 // We need to implement assignments to read-only variables.
3455 // Ideally, we should do this during AST generation (by converting
3456 // such assignments into expression statements); however, in general
3457 // we may not be able to make the decision until past AST generation,
3458 // that is when the entire program is known.
3459 ASSERT(slot != NULL);
3460 int index = slot->index();
3461 switch (slot->type()) {
3462 case Slot::PARAMETER:
3463 return ParameterOperand(cgen, index);
3464
3465 case Slot::LOCAL: {
3466 ASSERT(0 <= index &&
3467 index < cgen->scope()->num_stack_slots() &&
3468 index >= 0);
3469 int local_offset = JavaScriptFrameConstants::kLocal0Offset -
3470 index * kPointerSize;
3471 return MemOperand(fp, local_offset);
3472 }
3473
3474 case Slot::CONTEXT: {
3475 MacroAssembler* masm = cgen->masm();
3476 // Follow the context chain if necessary.
3477 ASSERT(!tmp.is(cp)); // do not overwrite context register
3478 Register context = cp;
3479 int chain_length =
3480 cgen->scope()->ContextChainLength(slot->var()->scope());
3481 for (int i = chain_length; i-- > 0;) {
3482 // Load the closure.
3483 // (All contexts, even 'with' contexts, have a closure,
3484 // and it is the same for all contexts inside a function.
3485 // There is no need to go to the function context first.)
3486 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3487 // Load the function context (which is the incoming, outer context).
3488 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3489 context = tmp;
3490 }
3491 // We may have a 'with' context now. Get the function context.
3492 // (In fact this mov may never be the needed, since the scope analysis
3493 // may not permit a direct context access in this case and thus we are
3494 // always at a function context. However it is safe to dereference be-
3495 // cause the function context of a function context is itself. Before
3496 // deleting this mov we should try to create a counter-example first,
3497 // though...)
3498 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
3499 return ContextOperand(tmp, index);
3500 }
3501
3502 default:
3503 UNREACHABLE();
3504 return MemOperand(r0, 0);
3505 }
3506 }
3507
3508
3509 void Property::GenerateStoreCode(CodeGenerator* cgen,
3510 Reference* ref,
3511 InitState init_state) {
3512 MacroAssembler* masm = cgen->masm();
3513 Comment cmnt(masm, "[ Store to Property");
3514 __ RecordPosition(position());
3515 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key());
3516 }
3517
3518
3519 void VariableProxy::GenerateStoreCode(CodeGenerator* cgen,
3520 Reference* ref,
3521 InitState init_state) {
3522 MacroAssembler* masm = cgen->masm();
3523 Comment cmnt(masm, "[ Store to VariableProxy");
3524 Variable* node = var();
3525
3526 Expression* expr = node->rewrite();
3527 if (expr != NULL) {
3528 expr->GenerateStoreCode(cgen, ref, init_state);
3529 } else {
3530 ASSERT(node->is_global());
3531 if (node->AsProperty() != NULL) {
3532 __ RecordPosition(node->AsProperty()->position());
3533 }
3534 Expression* key = new Literal(node->name());
3535 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key);
3536 }
3537 }
3538
3539
3540 void Slot::GenerateStoreCode(CodeGenerator* cgen,
3541 Reference* ref,
3542 InitState init_state) {
3543 MacroAssembler* masm = cgen->masm();
3544 Comment cmnt(masm, "[ Store to Slot");
3545
3546 if (type() == Slot::LOOKUP) {
3547 ASSERT(var()->mode() == Variable::DYNAMIC);
3548
3549 // For now, just do a runtime call.
3550 __ push(cp);
3551 __ mov(r0, Operand(var()->name()));
3552 __ push(r0);
3553
3554 if (init_state == CONST_INIT) {
3555 // Same as the case for a normal store, but ignores attribute
3556 // (e.g. READ_ONLY) of context slot so that we can initialize const
3557 // properties (introduced via eval("const foo = (some expr);")). Also,
3558 // uses the current function context instead of the top context.
3559 //
3560 // Note that we must declare the foo upon entry of eval(), via a
3561 // context slot declaration, but we cannot initialize it at the same
3562 // time, because the const declaration may be at the end of the eval
3563 // code (sigh...) and the const variable may have been used before
3564 // (where its value is 'undefined'). Thus, we can only do the
3565 // initialization when we actually encounter the expression and when
3566 // the expression operands are defined and valid, and thus we need the
3567 // split into 2 operations: declaration of the context slot followed
3568 // by initialization.
3569 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3570 } else {
3571 __ CallRuntime(Runtime::kStoreContextSlot, 3);
3572 }
3573 // Storing a variable must keep the (new) value on the expression
3574 // stack. This is necessary for compiling assignment expressions.
3575 __ push(r0);
3576
3577 } else {
3578 ASSERT(var()->mode() != Variable::DYNAMIC);
3579
3580 Label exit;
3581 if (init_state == CONST_INIT) {
3582 ASSERT(var()->mode() == Variable::CONST);
3583 // Only the first const initialization must be executed (the slot
3584 // still contains 'the hole' value). When the assignment is executed,
3585 // the code is identical to a normal store (see below).
3586 Comment cmnt(masm, "[ Init const");
3587 __ ldr(r2, ArmCodeGenerator::SlotOperand(cgen, this, r2));
3588 __ cmp(r2, Operand(Factory::the_hole_value()));
3589 __ b(ne, &exit);
3590 }
3591
3592 // We must execute the store.
3593 // r2 may be loaded with context; used below in RecordWrite.
3594 // Storing a variable must keep the (new) value on the stack. This is
3595 // necessary for compiling assignment expressions.
3596 //
3597 // Note: We will reach here even with var()->mode() == Variable::CONST
3598 // because of const declarations which will initialize consts to 'the
3599 // hole' value and by doing so, end up calling this code. r2 may be
3600 // loaded with context; used below in RecordWrite.
3601 __ pop(r0);
3602 __ str(r0, ArmCodeGenerator::SlotOperand(cgen, this, r2));
3603 __ push(r0);
3604
3605 if (type() == Slot::CONTEXT) {
3606 // Skip write barrier if the written value is a smi.
3607 __ tst(r0, Operand(kSmiTagMask));
3608 __ b(eq, &exit);
3609 // r2 is loaded with context when calling SlotOperand above.
3610 int offset = FixedArray::kHeaderSize + index() * kPointerSize;
3611 __ mov(r3, Operand(offset));
3612 __ RecordWrite(r2, r3, r1);
3613 }
3614 // If we definitely did not jump over the assignment, we do not need to
3615 // bind the exit label. Doing so can defeat peephole optimization.
3616 if (init_state == CONST_INIT || type() == Slot::CONTEXT) {
3617 __ bind(&exit);
3618 }
3619 }
3620 }
3621
3622
3623 void GetPropertyStub::Generate(MacroAssembler* masm) {
3624 // sp[0]: key
3625 // sp[1]: receiver
3626 Label slow, fast;
3627 // Get the key and receiver object from the stack.
3628 __ ldm(ia, sp, r0.bit() | r1.bit());
3629 // Check that the key is a smi.
3630 __ tst(r0, Operand(kSmiTagMask));
3631 __ b(ne, &slow);
3632 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
3633 // Check that the object isn't a smi.
3634 __ tst(r1, Operand(kSmiTagMask));
3635 __ b(eq, &slow);
3636
3637 // Check that the object is some kind of JS object EXCEPT JS Value type.
3638 // In the case that the object is a value-wrapper object,
3639 // we enter the runtime system to make sure that indexing into string
3640 // objects work as intended.
3641 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
3642 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3643 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3644 __ cmp(r2, Operand(JS_OBJECT_TYPE));
3645 __ b(lt, &slow);
3646
3647 // Get the elements array of the object.
3648 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3649 // Check that the object is in fast mode (not dictionary).
3650 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
3651 __ cmp(r3, Operand(Factory::hash_table_map()));
3652 __ b(eq, &slow);
3653 // Check that the key (index) is within bounds.
3654 __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
3655 __ cmp(r0, Operand(r3));
3656 __ b(lo, &fast);
3657
3658 // Slow case: Push extra copies of the arguments (2).
3659 __ bind(&slow);
3660 __ ldm(ia, sp, r0.bit() | r1.bit());
3661 __ stm(db_w, sp, r0.bit() | r1.bit());
3662 // Do tail-call to runtime routine.
3663 __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
3664
3665 // Fast case: Do the load.
3666 __ bind(&fast);
3667 __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
3668 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
3669 __ cmp(r0, Operand(Factory::the_hole_value()));
3670 // In case the loaded value is the_hole we have to consult GetProperty
3671 // to ensure the prototype chain is searched.
3672 __ b(eq, &slow);
3673
3674 __ StubReturn(1);
3675 }
3676
3677
3678 void SetPropertyStub::Generate(MacroAssembler* masm) {
3679 // r0 : value
3680 // sp[0] : key
3681 // sp[1] : receiver
3682
3683 Label slow, fast, array, extra, exit;
3684 // Get the key and the object from the stack.
3685 __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
3686 // Check that the key is a smi.
3687 __ tst(r1, Operand(kSmiTagMask));
3688 __ b(ne, &slow);
3689 // Check that the object isn't a smi.
3690 __ tst(r3, Operand(kSmiTagMask));
3691 __ b(eq, &slow);
3692 // Get the type of the object from its map.
3693 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
3694 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3695 // Check if the object is a JS array or not.
3696 __ cmp(r2, Operand(JS_ARRAY_TYPE));
3697 __ b(eq, &array);
3698 // Check that the object is some kind of JS object.
3699 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
3700 __ b(lt, &slow);
3701
3702
3703 // Object case: Check key against length in the elements array.
3704 __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
3705 // Check that the object is in fast mode (not dictionary).
3706 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
3707 __ cmp(r2, Operand(Factory::hash_table_map()));
3708 __ b(eq, &slow);
3709 // Untag the key (for checking against untagged length in the fixed array).
3710 __ mov(r1, Operand(r1, ASR, kSmiTagSize));
3711 // Compute address to store into and check array bounds.
3712 __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
3713 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
3714 __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
3715 __ cmp(r1, Operand(ip));
3716 __ b(lo, &fast);
3717
3718
3719 // Slow case: Push extra copies of the arguments (3).
3720 __ bind(&slow);
3721 __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
3722 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
3723 // Do tail-call to runtime routine.
3724 __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
3725
3726
3727 // Extra capacity case: Check if there is extra capacity to
3728 // perform the store and update the length. Used for adding one
3729 // element to the array by writing to array[array.length].
3730 // r0 == value, r1 == key, r2 == elements, r3 == object
3731 __ bind(&extra);
3732 __ b(ne, &slow); // do not leave holes in the array
3733 __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
3734 __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
3735 __ cmp(r1, Operand(ip));
3736 __ b(hs, &slow);
3737 __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
3738 __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
3739 __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
3740 __ mov(r3, Operand(r2));
3741 // NOTE: Computing the address to store into must take the fact
3742 // that the key has been incremented into account.
3743 int displacement = Array::kHeaderSize - kHeapObjectTag -
3744 ((1 << kSmiTagSize) * 2);
3745 __ add(r2, r2, Operand(displacement));
3746 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
3747 __ b(&fast);
3748
3749
3750 // Array case: Get the length and the elements array from the JS
3751 // array. Check that the array is in fast mode; if it is the
3752 // length is always a smi.
3753 // r0 == value, r3 == object
3754 __ bind(&array);
3755 __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
3756 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3757 __ cmp(r1, Operand(Factory::hash_table_map()));
3758 __ b(eq, &slow);
3759
3760 // Check the key against the length in the array, compute the
3761 // address to store into and fall through to fast case.
3762 __ ldr(r1, MemOperand(sp));
3763 // r0 == value, r1 == key, r2 == elements, r3 == object.
3764 __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
3765 __ cmp(r1, Operand(ip));
3766 __ b(hs, &extra);
3767 __ mov(r3, Operand(r2));
3768 __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
3769 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
3770
3771
3772 // Fast case: Do the store.
3773 // r0 == value, r2 == address to store into, r3 == elements
3774 __ bind(&fast);
3775 __ str(r0, MemOperand(r2));
3776 // Skip write barrier if the written value is a smi.
3777 __ tst(r0, Operand(kSmiTagMask));
3778 __ b(eq, &exit);
3779 // Update write barrier for the elements array address.
3780 __ sub(r1, r2, Operand(r3));
3781 __ RecordWrite(r3, r1, r2);
3782 __ bind(&exit);
3783 __ StubReturn(1);
3784 }
3785
3786
3787 void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
3788 // r1 : x
3789 // r0 : y
3790 // result : r0
3791
3792 switch (op_) {
3793 case Token::ADD: {
3794 Label slow, exit;
3795 // fast path
3796 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
3797 __ add(r0, r1, Operand(r0), SetCC); // add y optimistically
3798 // go slow-path in case of overflow
3799 __ b(vs, &slow);
3800 // go slow-path in case of non-smi operands
3801 ASSERT(kSmiTag == 0); // adjust code below
3802 __ tst(r2, Operand(kSmiTagMask));
3803 __ b(eq, &exit);
3804 // slow path
3805 __ bind(&slow);
3806 __ sub(r0, r0, Operand(r1)); // revert optimistic add
3807 __ push(r1);
3808 __ push(r0);
3809 __ mov(r0, Operand(1)); // set number of arguments
3810 __ InvokeBuiltin(Builtins::ADD, JUMP_JS);
3811 // done
3812 __ bind(&exit);
3813 break;
3814 }
3815
3816 case Token::SUB: {
3817 Label slow, exit;
3818 // fast path
3819 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
3820 __ sub(r3, r1, Operand(r0), SetCC); // subtract y optimistically
3821 // go slow-path in case of overflow
3822 __ b(vs, &slow);
3823 // go slow-path in case of non-smi operands
3824 ASSERT(kSmiTag == 0); // adjust code below
3825 __ tst(r2, Operand(kSmiTagMask));
3826 __ mov(r0, Operand(r3), LeaveCC, eq); // conditionally set r0 to result
3827 __ b(eq, &exit);
3828 // slow path
3829 __ bind(&slow);
3830 __ push(r1);
3831 __ push(r0);
3832 __ mov(r0, Operand(1)); // set number of arguments
3833 __ InvokeBuiltin(Builtins::SUB, JUMP_JS);
3834 // done
3835 __ bind(&exit);
3836 break;
3837 }
3838
3839 case Token::MUL: {
3840 Label slow, exit;
3841 // tag check
3842 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
3843 ASSERT(kSmiTag == 0); // adjust code below
3844 __ tst(r2, Operand(kSmiTagMask));
3845 __ b(ne, &slow);
3846 // remove tag from one operand (but keep sign), so that result is smi
3847 __ mov(ip, Operand(r0, ASR, kSmiTagSize));
3848 // do multiplication
3849 __ smull(r3, r2, r1, ip); // r3 = lower 32 bits of ip*r1
3850 // go slow on overflows (overflow bit is not set)
3851 __ mov(ip, Operand(r3, ASR, 31));
3852 __ cmp(ip, Operand(r2)); // no overflow if higher 33 bits are identical
3853 __ b(ne, &slow);
3854 // go slow on zero result to handle -0
3855 __ tst(r3, Operand(r3));
3856 __ mov(r0, Operand(r3), LeaveCC, ne);
3857 __ b(ne, &exit);
3858 // slow case
3859 __ bind(&slow);
3860 __ push(r1);
3861 __ push(r0);
3862 __ mov(r0, Operand(1)); // set number of arguments
3863 __ InvokeBuiltin(Builtins::MUL, JUMP_JS);
3864 // done
3865 __ bind(&exit);
3866 break;
3867 }
3868
3869 case Token::BIT_OR:
3870 case Token::BIT_AND:
3871 case Token::BIT_XOR: {
3872 Label slow, exit;
3873 // tag check
3874 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
3875 ASSERT(kSmiTag == 0); // adjust code below
3876 __ tst(r2, Operand(kSmiTagMask));
3877 __ b(ne, &slow);
3878 switch (op_) {
3879 case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break;
3880 case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
3881 case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
3882 default: UNREACHABLE();
3883 }
3884 __ b(&exit);
3885 __ bind(&slow);
3886 __ push(r1); // restore stack
3887 __ push(r0);
3888 __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
3889 switch (op_) {
3890 case Token::BIT_OR:
3891 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
3892 break;
3893 case Token::BIT_AND:
3894 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
3895 break;
3896 case Token::BIT_XOR:
3897 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
3898 break;
3899 default:
3900 UNREACHABLE();
3901 }
3902 __ bind(&exit);
3903 break;
3904 }
3905
3906 case Token::SHL:
3907 case Token::SHR:
3908 case Token::SAR: {
3909 Label slow, exit;
3910 // tag check
3911 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
3912 ASSERT(kSmiTag == 0); // adjust code below
3913 __ tst(r2, Operand(kSmiTagMask));
3914 __ b(ne, &slow);
3915 // remove tags from operands (but keep sign)
3916 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
3917 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y
3918 // use only the 5 least significant bits of the shift count
3919 __ and_(r2, r2, Operand(0x1f));
3920 // perform operation
3921 switch (op_) {
3922 case Token::SAR:
3923 __ mov(r3, Operand(r3, ASR, r2));
3924 // no checks of result necessary
3925 break;
3926
3927 case Token::SHR:
3928 __ mov(r3, Operand(r3, LSR, r2));
3929 // check that the *unsigned* result fits in a smi
3930 // neither of the two high-order bits can be set:
3931 // - 0x80000000: high bit would be lost when smi tagging
3932 // - 0x40000000: this number would convert to negative when
3933 // smi tagging these two cases can only happen with shifts
3934 // by 0 or 1 when handed a valid smi
3935 __ and_(r2, r3, Operand(0xc0000000), SetCC);
3936 __ b(ne, &slow);
3937 break;
3938
3939 case Token::SHL:
3940 __ mov(r3, Operand(r3, LSL, r2));
3941 // check that the *signed* result fits in a smi
3942 __ add(r2, r3, Operand(0x40000000), SetCC);
3943 __ b(mi, &slow);
3944 break;
3945
3946 default: UNREACHABLE();
3947 }
3948 // tag result and store it in r0
3949 ASSERT(kSmiTag == 0); // adjust code below
3950 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
3951 __ b(&exit);
3952 // slow case
3953 __ bind(&slow);
3954 __ push(r1); // restore stack
3955 __ push(r0);
3956 __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
3957 switch (op_) {
3958 case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
3959 case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
3960 case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
3961 default: UNREACHABLE();
3962 }
3963 __ bind(&exit);
3964 break;
3965 }
3966
3967 default: UNREACHABLE();
3968 }
3969 __ Ret();
3970 }
3971
3972
3973 void StackCheckStub::Generate(MacroAssembler* masm) {
3974 Label within_limit;
3975 __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
3976 __ ldr(ip, MemOperand(ip));
3977 __ cmp(sp, Operand(ip));
3978 __ b(hs, &within_limit);
3979 // Do tail-call to runtime routine.
3980 __ push(r0);
3981 __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
3982 __ bind(&within_limit);
3983
3984 __ StubReturn(1);
3985 }
3986
3987
3988 void UnarySubStub::Generate(MacroAssembler* masm) {
3989 Label undo;
3990 Label slow;
3991 Label done;
3992
3993 // Enter runtime system if the value is not a smi.
3994 __ tst(r0, Operand(kSmiTagMask));
3995 __ b(ne, &slow);
3996
3997 // Enter runtime system if the value of the expression is zero
3998 // to make sure that we switch between 0 and -0.
3999 __ cmp(r0, Operand(0));
4000 __ b(eq, &slow);
4001
4002 // The value of the expression is a smi that is not zero. Try
4003 // optimistic subtraction '0 - value'.
4004 __ rsb(r1, r0, Operand(0), SetCC);
4005 __ b(vs, &slow);
4006
4007 // If result is a smi we are done.
4008 __ tst(r1, Operand(kSmiTagMask));
4009 __ mov(r0, Operand(r1), LeaveCC, eq); // conditionally set r0 to result
4010 __ b(eq, &done);
4011
4012 // Enter runtime system.
4013 __ bind(&slow);
4014 __ push(r0);
4015 __ mov(r0, Operand(0)); // set number of arguments
4016 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
4017
4018 __ bind(&done);
4019 __ StubReturn(1);
4020 }
4021
4022
4023 void InvokeBuiltinStub::Generate(MacroAssembler* masm) {
4024 __ push(r0);
4025 __ mov(r0, Operand(0)); // set number of arguments
4026 switch (kind_) {
4027 case ToNumber: __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); break;
4028 case Inc: __ InvokeBuiltin(Builtins::INC, JUMP_JS); break;
4029 case Dec: __ InvokeBuiltin(Builtins::DEC, JUMP_JS); break;
4030 default: UNREACHABLE();
4031 }
4032 __ StubReturn(argc_);
4033 }
4034
4035
4036 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
4037 // r0 holds exception
4038 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
4039 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
4040 __ ldr(sp, MemOperand(r3));
4041 __ pop(r2); // pop next in chain
4042 __ str(r2, MemOperand(r3));
4043 // restore parameter- and frame-pointer and pop state.
4044 __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
4045 // Before returning we restore the context from the frame pointer if not NULL.
4046 // The frame pointer is NULL in the exception handler of a JS entry frame.
4047 __ cmp(fp, Operand(0));
4048 // Set cp to NULL if fp is NULL.
4049 __ mov(cp, Operand(0), LeaveCC, eq);
4050 // Restore cp otherwise.
4051 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
4052 if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
4053 __ pop(pc);
4054 }
4055
4056
4057 void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
4058 // Fetch top stack handler.
4059 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
4060 __ ldr(r3, MemOperand(r3));
4061
4062 // Unwind the handlers until the ENTRY handler is found.
4063 Label loop, done;
4064 __ bind(&loop);
4065 // Load the type of the current stack handler.
4066 const int kStateOffset = StackHandlerConstants::kAddressDisplacement +
4067 StackHandlerConstants::kStateOffset;
4068 __ ldr(r2, MemOperand(r3, kStateOffset));
4069 __ cmp(r2, Operand(StackHandler::ENTRY));
4070 __ b(eq, &done);
4071 // Fetch the next handler in the list.
4072 const int kNextOffset = StackHandlerConstants::kAddressDisplacement +
4073 StackHandlerConstants::kNextOffset;
4074 __ ldr(r3, MemOperand(r3, kNextOffset));
4075 __ jmp(&loop);
4076 __ bind(&done);
4077
4078 // Set the top handler address to next handler past the current ENTRY handler.
4079 __ ldr(r0, MemOperand(r3, kNextOffset));
4080 __ mov(r2, Operand(ExternalReference(Top::k_handler_address)));
4081 __ str(r0, MemOperand(r2));
4082
4083 // Set external caught exception to false.
4084 __ mov(r0, Operand(false));
4085 ExternalReference external_caught(Top::k_external_caught_exception_address);
4086 __ mov(r2, Operand(external_caught));
4087 __ str(r0, MemOperand(r2));
4088
4089 // Set pending exception and r0 to out of memory exception.
4090 Failure* out_of_memory = Failure::OutOfMemoryException();
4091 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
4092 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
4093 __ str(r0, MemOperand(r2));
4094
4095 // Restore the stack to the address of the ENTRY handler
4096 __ mov(sp, Operand(r3));
4097
4098 // restore parameter- and frame-pointer and pop state.
4099 __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
4100 // Before returning we restore the context from the frame pointer if not NULL.
4101 // The frame pointer is NULL in the exception handler of a JS entry frame.
4102 __ cmp(fp, Operand(0));
4103 // Set cp to NULL if fp is NULL.
4104 __ mov(cp, Operand(0), LeaveCC, eq);
4105 // Restore cp otherwise.
4106 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
4107 if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
4108 __ pop(pc);
4109 }
4110
4111
4112 void CEntryStub::GenerateCore(MacroAssembler* masm,
4113 Label* throw_normal_exception,
4114 Label* throw_out_of_memory_exception,
4115 StackFrame::Type frame_type,
4116 bool do_gc) {
4117 // r0: result parameter for PerformGC, if any
4118 // r4: number of arguments including receiver (C callee-saved)
4119 // r5: pointer to builtin function (C callee-saved)
4120 // r6: pointer to the first argument (C callee-saved)
4121
4122 if (do_gc) {
4123 // Passing r0.
4124 __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4125 }
4126
4127 // Call C built-in.
4128 // r0 = argc, r1 = argv
4129 __ mov(r0, Operand(r4));
4130 __ mov(r1, Operand(r6));
4131
4132 // TODO(1242173): To let the GC traverse the return address of the exit
4133 // frames, we need to know where the return address is. Right now,
4134 // we push it on the stack to be able to find it again, but we never
4135 // restore from it in case of changes, which makes it impossible to
4136 // support moving the C entry code stub. This should be fixed, but currently
4137 // this is OK because the CEntryStub gets generated so early in the V8 boot
4138 // sequence that it is not moving ever.
4139 __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
4140 __ push(lr);
4141 #if !defined(__arm__)
4142 // Notify the simulator of the transition to C code.
4143 __ swi(assembler::arm::call_rt_r5);
4144 #else /* !defined(__arm__) */
4145 __ mov(pc, Operand(r5));
4146 #endif /* !defined(__arm__) */
4147 // result is in r0 or r0:r1 - do not destroy these registers!
4148
4149 // check for failure result
4150 Label failure_returned;
4151 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4152 // Lower 2 bits of r2 are 0 iff r0 has failure tag.
4153 __ add(r2, r0, Operand(1));
4154 __ tst(r2, Operand(kFailureTagMask));
4155 __ b(eq, &failure_returned);
4156
4157 // Exit C frame and return.
4158 // r0:r1: result
4159 // sp: stack pointer
4160 // fp: frame pointer
4161 // pp: caller's parameter pointer pp (restored as C callee-saved)
4162 __ LeaveExitFrame(frame_type);
4163
4164 // check if we should retry or throw exception
4165 Label retry;
4166 __ bind(&failure_returned);
4167 ASSERT(Failure::RETRY_AFTER_GC == 0);
4168 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4169 __ b(eq, &retry);
4170
4171 Label continue_exception;
4172 // If the returned failure is EXCEPTION then promote Top::pending_exception().
4173 __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
4174 __ b(ne, &continue_exception);
4175
4176 // Retrieve the pending exception and clear the variable.
4177 __ mov(ip, Operand(Factory::the_hole_value().location()));
4178 __ ldr(r3, MemOperand(ip));
4179 __ mov(ip, Operand(Top::pending_exception_address()));
4180 __ ldr(r0, MemOperand(ip));
4181 __ str(r3, MemOperand(ip));
4182
4183 __ bind(&continue_exception);
4184 // Special handling of out of memory exception.
4185 Failure* out_of_memory = Failure::OutOfMemoryException();
4186 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
4187 __ b(eq, throw_out_of_memory_exception);
4188
4189 // Handle normal exception.
4190 __ jmp(throw_normal_exception);
4191
4192 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
4193 }
4194
4195
4196 void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
4197 // Called from JavaScript; parameters are on stack as if calling JS function
4198 // r0: number of arguments including receiver
4199 // r1: pointer to builtin function
4200 // fp: frame pointer (restored after C call)
4201 // sp: stack pointer (restored as callee's pp after C call)
4202 // cp: current context (C callee-saved)
4203 // pp: caller's parameter pointer pp (C callee-saved)
4204
4205 // NOTE: Invocations of builtins may return failure objects
4206 // instead of a proper result. The builtin entry handles
4207 // this by performing a garbage collection and retrying the
4208 // builtin once.
4209
4210 StackFrame::Type frame_type = is_debug_break
4211 ? StackFrame::EXIT_DEBUG
4212 : StackFrame::EXIT;
4213
4214 // Enter the exit frame that transitions from JavaScript to C++.
4215 __ EnterExitFrame(frame_type);
4216
4217 // r4: number of arguments (C callee-saved)
4218 // r5: pointer to builtin function (C callee-saved)
4219 // r6: pointer to first argument (C callee-saved)
4220
4221 Label throw_out_of_memory_exception;
4222 Label throw_normal_exception;
4223
4224 #ifdef DEBUG
4225 if (FLAG_gc_greedy) {
4226 Failure* failure = Failure::RetryAfterGC(0, NEW_SPACE);
4227 __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure)));
4228 }
4229 GenerateCore(masm,
4230 &throw_normal_exception,
4231 &throw_out_of_memory_exception,
4232 frame_type,
4233 FLAG_gc_greedy);
4234 #else
4235 GenerateCore(masm,
4236 &throw_normal_exception,
4237 &throw_out_of_memory_exception,
4238 frame_type,
4239 false);
4240 #endif
4241 GenerateCore(masm,
4242 &throw_normal_exception,
4243 &throw_out_of_memory_exception,
4244 frame_type,
4245 true);
4246
4247 __ bind(&throw_out_of_memory_exception);
4248 GenerateThrowOutOfMemory(masm);
4249 // control flow for generated will not return.
4250
4251 __ bind(&throw_normal_exception);
4252 GenerateThrowTOS(masm);
4253 }
4254
4255
4256 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4257 // r0: code entry
4258 // r1: function
4259 // r2: receiver
4260 // r3: argc
4261 // [sp+0]: argv
4262
4263 Label invoke, exit;
4264
4265 // Called from C, so do not pop argc and args on exit (preserve sp)
4266 // No need to save register-passed args
4267 // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
4268 __ stm(db_w, sp, kCalleeSaved | lr.bit());
4269
4270 // Get address of argv, see stm above.
4271 // r0: code entry
4272 // r1: function
4273 // r2: receiver
4274 // r3: argc
4275 __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
4276 __ ldr(r4, MemOperand(r4)); // argv
4277
4278 // Push a frame with special values setup to mark it as an entry frame.
4279 // r0: code entry
4280 // r1: function
4281 // r2: receiver
4282 // r3: argc
4283 // r4: argv
4284 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4285 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
4286 __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
4287 __ mov(r6, Operand(Smi::FromInt(marker)));
4288 __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
4289 __ ldr(r5, MemOperand(r5));
4290 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
4291
4292 // Setup frame pointer for the frame to be pushed.
4293 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
4294
4295 // Call a faked try-block that does the invoke.
4296 __ bl(&invoke);
4297
4298 // Caught exception: Store result (exception) in the pending
4299 // exception field in the JSEnv and return a failure sentinel.
4300 // Coming in here the fp will be invalid because the PushTryHandler below
4301 // sets it to 0 to signal the existence of the JSEntry frame.
4302 __ mov(ip, Operand(Top::pending_exception_address()));
4303 __ str(r0, MemOperand(ip));
4304 __ mov(r0, Operand(Handle<Failure>(Failure::Exception())));
4305 __ b(&exit);
4306
4307 // Invoke: Link this frame into the handler chain.
4308 __ bind(&invoke);
4309 // Must preserve r0-r4, r5-r7 are available.
4310 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4311 // If an exception not caught by another handler occurs, this handler returns
4312 // control to the code after the bl(&invoke) above, which restores all
4313 // kCalleeSaved registers (including cp, pp and fp) to their saved values
4314 // before returning a failure to C.
4315
4316 // Clear any pending exceptions.
4317 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
4318 __ ldr(r5, MemOperand(ip));
4319 __ mov(ip, Operand(Top::pending_exception_address()));
4320 __ str(r5, MemOperand(ip));
4321
4322 // Invoke the function by calling through JS entry trampoline builtin.
4323 // Notice that we cannot store a reference to the trampoline code directly in
4324 // this stub, because runtime stubs are not traversed when doing GC.
4325
4326 // Expected registers by Builtins::JSEntryTrampoline
4327 // r0: code entry
4328 // r1: function
4329 // r2: receiver
4330 // r3: argc
4331 // r4: argv
4332 if (is_construct) {
4333 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
4334 __ mov(ip, Operand(construct_entry));
4335 } else {
4336 ExternalReference entry(Builtins::JSEntryTrampoline);
4337 __ mov(ip, Operand(entry));
4338 }
4339 __ ldr(ip, MemOperand(ip)); // deref address
4340
4341 // Branch and link to JSEntryTrampoline
4342 __ mov(lr, Operand(pc));
4343 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
4344
4345 // Unlink this frame from the handler chain. When reading the
4346 // address of the next handler, there is no need to use the address
4347 // displacement since the current stack pointer (sp) points directly
4348 // to the stack handler.
4349 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
4350 __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
4351 __ str(r3, MemOperand(ip));
4352 // No need to restore registers
4353 __ add(sp, sp, Operand(StackHandlerConstants::kSize));
4354
4355 __ bind(&exit); // r0 holds result
4356 // Restore the top frame descriptors from the stack.
4357 __ pop(r3);
4358 __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
4359 __ str(r3, MemOperand(ip));
4360
4361 // Reset the stack to the callee saved registers.
4362 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
4363
4364 // Restore callee-saved registers and return.
4365 #ifdef DEBUG
4366 if (FLAG_debug_code) __ mov(lr, Operand(pc));
4367 #endif
4368 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
4369 }
4370
4371
4372 void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
4373 // ----------- S t a t e -------------
4374 // -- r0: formal number of parameters for the calling function
4375 // -- r1: key (if value access)
4376 // -- lr: return address
4377 // -----------------------------------
4378
4379 // Check that the key is a smi for non-length accesses.
4380 Label slow;
4381 if (!is_length_) {
4382 __ tst(r1, Operand(kSmiTagMask));
4383 __ b(ne, &slow);
4384 }
4385
4386 // Check if the calling frame is an arguments adaptor frame.
4387 // r0: formal number of parameters
4388 // r1: key (if access)
4389 Label adaptor;
4390 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4391 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
4392 __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
4393 __ b(eq, &adaptor);
4394
4395 static const int kParamDisplacement =
4396 StandardFrameConstants::kCallerSPOffset - kPointerSize;
4397
4398 if (is_length_) {
4399 // Nothing to do: the formal length of parameters has been passed in r0
4400 // by the calling function.
4401 } else {
4402 // Check index against formal parameter count. Use unsigned comparison to
4403 // get the negative check for free.
4404 // r0: formal number of parameters
4405 // r1: index
4406 __ cmp(r1, r0);
4407 __ b(cs, &slow);
4408
4409 // Read the argument from the current frame.
4410 __ sub(r3, r0, r1);
4411 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
4412 __ ldr(r0, MemOperand(r3, kParamDisplacement));
4413 }
4414
4415 // Return to the calling function.
4416 __ mov(pc, lr);
4417
4418 // An arguments adaptor frame is present. Find the length or the actual
4419 // argument in the calling frame.
4420 // r0: formal number of parameters
4421 // r1: key
4422 // r2: adaptor frame pointer
4423 __ bind(&adaptor);
4424 // Read the arguments length from the adaptor frame. This is the result if
4425 // only accessing the length, otherwise it is used in accessing the value
4426 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4427
4428 if (!is_length_) {
4429 // Check index against actual arguments count. Use unsigned comparison to
4430 // get the negative check for free.
4431 // r0: actual number of parameter
4432 // r1: index
4433 // r2: adaptor frame point
4434 __ cmp(r1, r0);
4435 __ b(cs, &slow);
4436
4437 // Read the argument from the adaptor frame.
4438 __ sub(r3, r0, r1);
4439 __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
4440 __ ldr(r0, MemOperand(r3, kParamDisplacement));
4441 }
4442
4443 // Return to the calling function.
4444 __ mov(pc, lr);
4445
4446 if (!is_length_) {
4447 __ bind(&slow);
4448 __ push(r1);
4449 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
4450 }
4451 }
4452
4453
4454 void ArmCodeGenerator::SetReferenceProperty(CodeGenerator* cgen,
4455 Reference* ref,
4456 Expression* key) {
4457 ASSERT(!ref->is_illegal());
4458 MacroAssembler* masm = cgen->masm();
4459
4460 if (ref->type() == Reference::NAMED) {
4461 // Compute the name of the property.
4462 Literal* literal = key->AsLiteral();
4463 Handle<String> name(String::cast(*literal->handle()));
4464
4465 // Call the appropriate IC code.
4466 __ pop(r0); // value
4467 // Setup the name register.
4468 __ mov(r2, Operand(name));
4469 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
4470 __ Call(ic, RelocInfo::CODE_TARGET);
4471
4472 } else {
4473 // Access keyed property.
4474 ASSERT(ref->type() == Reference::KEYED);
4475
4476 __ pop(r0); // value
4477 SetPropertyStub stub;
4478 __ CallStub(&stub);
4479 }
4480 __ push(r0);
4481 }
4482
4483
4484 void CallFunctionStub::Generate(MacroAssembler* masm) {
4485 Label slow;
4486 // Get the function to call from the stack.
4487 // function, receiver [, arguments]
4488 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
4489
4490 // Check that the function is really a JavaScript function.
4491 // r1: pushed function (to be verified)
4492 __ tst(r1, Operand(kSmiTagMask));
4493 __ b(eq, &slow);
4494 // Get the map of the function object.
4495 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
4496 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
4497 __ cmp(r2, Operand(JS_FUNCTION_TYPE));
4498 __ b(ne, &slow);
4499
4500 // Fast-case: Invoke the function now.
4501 // r1: pushed function
4502 ParameterCount actual(argc_);
4503 __ InvokeFunction(r1, actual, JUMP_FUNCTION);
4504
4505 // Slow-case: Non-function called.
4506 __ bind(&slow);
4507 __ mov(r0, Operand(argc_)); // Setup the number of arguments.
4508 __ InvokeBuiltin(Builtins::CALL_NON_FUNCTION, JUMP_JS);
4509 }
4510
4511
4512 #undef __
4536 4513
4537 // ----------------------------------------------------------------------------- 4514 // -----------------------------------------------------------------------------
4538 // CodeGenerator interface 4515 // CodeGenerator interface
4539 4516
4540 // MakeCode() is just a wrapper for CodeGenerator::MakeCode() 4517 // MakeCode() is just a wrapper for CodeGenerator::MakeCode()
4541 // so we don't have to expose the entire CodeGenerator class in 4518 // so we don't have to expose the entire CodeGenerator class in
4542 // the .h file. 4519 // the .h file.
4543 Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* fun, 4520 Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* fun,
4544 Handle<Script> script, 4521 Handle<Script> script,
4545 bool is_eval) { 4522 bool is_eval) {
4546 Handle<Code> code = ArmCodeGenerator::MakeCode(fun, script, is_eval); 4523 Handle<Code> code = ArmCodeGenerator::MakeCode(fun, script, is_eval);
4547 if (!code.is_null()) { 4524 if (!code.is_null()) {
4548 Counters::total_compiled_code_size.Increment(code->instruction_size()); 4525 Counters::total_compiled_code_size.Increment(code->instruction_size());
4549 } 4526 }
4550 return code; 4527 return code;
4551 } 4528 }
4552 4529
4553 4530
4554 } } // namespace v8::internal 4531 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698