OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
575 __ tst(r3, Operand(kIsNotStringMask)); | 575 __ tst(r3, Operand(kIsNotStringMask)); |
576 __ b(ne, &convert_argument); | 576 __ b(ne, &convert_argument); |
577 __ mov(argument, r0); | 577 __ mov(argument, r0); |
578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
579 __ b(&argument_is_string); | 579 __ b(&argument_is_string); |
580 | 580 |
581 // Invoke the conversion builtin and put the result into r2. | 581 // Invoke the conversion builtin and put the result into r2. |
582 __ bind(&convert_argument); | 582 __ bind(&convert_argument); |
583 __ push(function); // Preserve the function. | 583 __ push(function); // Preserve the function. |
584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
585 __ EnterInternalFrame(); | 585 { |
586 __ push(r0); | 586 FrameScope scope(masm, StackFrame::INTERNAL); |
587 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 587 __ push(r0); |
588 __ LeaveInternalFrame(); | 588 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 589 } |
589 __ pop(function); | 590 __ pop(function); |
590 __ mov(argument, r0); | 591 __ mov(argument, r0); |
591 __ b(&argument_is_string); | 592 __ b(&argument_is_string); |
592 | 593 |
593 // Load the empty string into r2, remove the receiver from the | 594 // Load the empty string into r2, remove the receiver from the |
594 // stack, and jump back to the case where the argument is a string. | 595 // stack, and jump back to the case where the argument is a string. |
595 __ bind(&no_arguments); | 596 __ bind(&no_arguments); |
596 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); | 597 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); |
597 __ Drop(1); | 598 __ Drop(1); |
598 __ b(&argument_is_string); | 599 __ b(&argument_is_string); |
599 | 600 |
600 // At this point the argument is already a string. Call runtime to | 601 // At this point the argument is already a string. Call runtime to |
601 // create a string wrapper. | 602 // create a string wrapper. |
602 __ bind(&gc_required); | 603 __ bind(&gc_required); |
603 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); | 604 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); |
604 __ EnterInternalFrame(); | 605 { |
605 __ push(argument); | 606 FrameScope scope(masm, StackFrame::INTERNAL); |
606 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 607 __ push(argument); |
607 __ LeaveInternalFrame(); | 608 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 609 } |
608 __ Ret(); | 610 __ Ret(); |
609 } | 611 } |
610 | 612 |
611 | 613 |
612 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { | 614 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { |
613 // ----------- S t a t e ------------- | 615 // ----------- S t a t e ------------- |
614 // -- r0 : number of arguments | 616 // -- r0 : number of arguments |
615 // -- r1 : constructor function | 617 // -- r1 : constructor function |
616 // -- lr : return address | 618 // -- lr : return address |
617 // -- sp[...]: constructor arguments | 619 // -- sp[...]: constructor arguments |
(...skipping 26 matching lines...) Expand all Loading... |
644 | 646 |
645 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 647 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
646 bool is_api_function, | 648 bool is_api_function, |
647 bool count_constructions) { | 649 bool count_constructions) { |
648 // Should never count constructions for api objects. | 650 // Should never count constructions for api objects. |
649 ASSERT(!is_api_function || !count_constructions); | 651 ASSERT(!is_api_function || !count_constructions); |
650 | 652 |
651 Isolate* isolate = masm->isolate(); | 653 Isolate* isolate = masm->isolate(); |
652 | 654 |
653 // Enter a construct frame. | 655 // Enter a construct frame. |
654 __ EnterConstructFrame(); | 656 { |
| 657 FrameScope scope(masm, StackFrame::CONSTRUCT); |
655 | 658 |
656 // Preserve the two incoming parameters on the stack. | 659 // Preserve the two incoming parameters on the stack. |
657 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | 660 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
658 __ push(r0); // Smi-tagged arguments count. | 661 __ push(r0); // Smi-tagged arguments count. |
659 __ push(r1); // Constructor function. | 662 __ push(r1); // Constructor function. |
660 | 663 |
661 // Try to allocate the object without transitioning into C code. If any of the | 664 // Try to allocate the object without transitioning into C code. If any of the |
662 // preconditions is not met, the code bails out to the runtime call. | 665 // preconditions is not met, the code bails out to the runtime call. |
663 Label rt_call, allocated; | 666 Label rt_call, allocated; |
664 if (FLAG_inline_new) { | 667 if (FLAG_inline_new) { |
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
960 __ ldr(r0, MemOperand(sp)); | 963 __ ldr(r0, MemOperand(sp)); |
961 | 964 |
962 // Remove receiver from the stack, remove caller arguments, and | 965 // Remove receiver from the stack, remove caller arguments, and |
963 // return. | 966 // return. |
964 __ bind(&exit); | 967 __ bind(&exit); |
965 // r0: result | 968 // r0: result |
966 // sp[0]: receiver (newly allocated object) | 969 // sp[0]: receiver (newly allocated object) |
967 // sp[1]: constructor function | 970 // sp[1]: constructor function |
968 // sp[2]: number of arguments (smi-tagged) | 971 // sp[2]: number of arguments (smi-tagged) |
969 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); | 972 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); |
970 __ LeaveConstructFrame(); | 973 |
| 974 // Leave construct frame. |
| 975 } |
| 976 |
971 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); | 977 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); |
972 __ add(sp, sp, Operand(kPointerSize)); | 978 __ add(sp, sp, Operand(kPointerSize)); |
973 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); | 979 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); |
974 __ Jump(lr); | 980 __ Jump(lr); |
975 } | 981 } |
976 | 982 |
977 | 983 |
978 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | 984 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { |
979 Generate_JSConstructStubHelper(masm, false, true); | 985 Generate_JSConstructStubHelper(masm, false, true); |
980 } | 986 } |
(...skipping 12 matching lines...) Expand all Loading... |
993 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 999 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
994 bool is_construct) { | 1000 bool is_construct) { |
995 // Called from Generate_JS_Entry | 1001 // Called from Generate_JS_Entry |
996 // r0: code entry | 1002 // r0: code entry |
997 // r1: function | 1003 // r1: function |
998 // r2: receiver | 1004 // r2: receiver |
999 // r3: argc | 1005 // r3: argc |
1000 // r4: argv | 1006 // r4: argv |
1001 // r5-r7, cp may be clobbered | 1007 // r5-r7, cp may be clobbered |
1002 | 1008 |
1003 // Clear the context before we push it when entering the JS frame. | 1009 // Clear the context before we push it when entering the internal frame. |
1004 __ mov(cp, Operand(0, RelocInfo::NONE)); | 1010 __ mov(cp, Operand(0, RelocInfo::NONE)); |
1005 | 1011 |
1006 // Enter an internal frame. | 1012 // Enter an internal frame. |
1007 __ EnterInternalFrame(); | 1013 { |
| 1014 FrameScope scope(masm, StackFrame::INTERNAL); |
1008 | 1015 |
1009 // Set up the context from the function argument. | 1016 // Set up the context from the function argument. |
1010 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 1017 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
1011 | 1018 |
1012 // Set up the roots register. | 1019 // Set up the roots register. |
1013 ExternalReference roots_address = | 1020 ExternalReference roots_address = |
1014 ExternalReference::roots_address(masm->isolate()); | 1021 ExternalReference::roots_address(masm->isolate()); |
1015 __ mov(r10, Operand(roots_address)); | 1022 __ mov(r10, Operand(roots_address)); |
1016 | 1023 |
1017 // Push the function and the receiver onto the stack. | 1024 // Push the function and the receiver onto the stack. |
(...skipping 30 matching lines...) Expand all Loading... |
1048 __ mov(r0, Operand(r3)); | 1055 __ mov(r0, Operand(r3)); |
1049 if (is_construct) { | 1056 if (is_construct) { |
1050 __ Call(masm->isolate()->builtins()->JSConstructCall(), | 1057 __ Call(masm->isolate()->builtins()->JSConstructCall(), |
1051 RelocInfo::CODE_TARGET); | 1058 RelocInfo::CODE_TARGET); |
1052 } else { | 1059 } else { |
1053 ParameterCount actual(r0); | 1060 ParameterCount actual(r0); |
1054 __ InvokeFunction(r1, actual, CALL_FUNCTION, | 1061 __ InvokeFunction(r1, actual, CALL_FUNCTION, |
1055 NullCallWrapper(), CALL_AS_METHOD); | 1062 NullCallWrapper(), CALL_AS_METHOD); |
1056 } | 1063 } |
1057 | 1064 |
1058 // Exit the JS frame and remove the parameters (except function), and return. | 1065 // Exit the JS frame and remove the parameters (except function), and |
| 1066 // return. |
1059 // Respect ABI stack constraint. | 1067 // Respect ABI stack constraint. |
1060 __ LeaveInternalFrame(); | 1068 } |
1061 __ Jump(lr); | 1069 __ Jump(lr); |
1062 | 1070 |
1063 // r0: result | 1071 // r0: result |
1064 } | 1072 } |
1065 | 1073 |
1066 | 1074 |
1067 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 1075 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
1068 Generate_JSEntryTrampolineHelper(masm, false); | 1076 Generate_JSEntryTrampolineHelper(masm, false); |
1069 } | 1077 } |
1070 | 1078 |
1071 | 1079 |
1072 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 1080 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
1073 Generate_JSEntryTrampolineHelper(masm, true); | 1081 Generate_JSEntryTrampolineHelper(masm, true); |
1074 } | 1082 } |
1075 | 1083 |
1076 | 1084 |
1077 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 1085 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
1078 // Enter an internal frame. | 1086 // Enter an internal frame. |
1079 __ EnterInternalFrame(); | 1087 { |
| 1088 FrameScope scope(masm, StackFrame::INTERNAL); |
1080 | 1089 |
1081 // Preserve the function. | 1090 // Preserve the function. |
1082 __ push(r1); | 1091 __ push(r1); |
1083 // Push call kind information. | 1092 // Push call kind information. |
1084 __ push(r5); | 1093 __ push(r5); |
1085 | 1094 |
1086 // Push the function on the stack as the argument to the runtime function. | 1095 // Push the function on the stack as the argument to the runtime function. |
1087 __ push(r1); | 1096 __ push(r1); |
1088 __ CallRuntime(Runtime::kLazyCompile, 1); | 1097 __ CallRuntime(Runtime::kLazyCompile, 1); |
1089 // Calculate the entry point. | 1098 // Calculate the entry point. |
1090 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1099 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1091 | 1100 |
1092 // Restore call kind information. | 1101 // Restore call kind information. |
1093 __ pop(r5); | 1102 __ pop(r5); |
1094 // Restore saved function. | 1103 // Restore saved function. |
1095 __ pop(r1); | 1104 __ pop(r1); |
1096 | 1105 |
1097 // Tear down temporary frame. | 1106 // Tear down internal frame. |
1098 __ LeaveInternalFrame(); | 1107 } |
1099 | 1108 |
1100 // Do a tail-call of the compiled function. | 1109 // Do a tail-call of the compiled function. |
1101 __ Jump(r2); | 1110 __ Jump(r2); |
1102 } | 1111 } |
1103 | 1112 |
1104 | 1113 |
1105 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 1114 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { |
1106 // Enter an internal frame. | 1115 // Enter an internal frame. |
1107 __ EnterInternalFrame(); | 1116 { |
| 1117 FrameScope scope(masm, StackFrame::INTERNAL); |
1108 | 1118 |
1109 // Preserve the function. | 1119 // Preserve the function. |
1110 __ push(r1); | 1120 __ push(r1); |
1111 // Push call kind information. | 1121 // Push call kind information. |
1112 __ push(r5); | 1122 __ push(r5); |
1113 | 1123 |
1114 // Push the function on the stack as the argument to the runtime function. | 1124 // Push the function on the stack as the argument to the runtime function. |
1115 __ push(r1); | 1125 __ push(r1); |
1116 __ CallRuntime(Runtime::kLazyRecompile, 1); | 1126 __ CallRuntime(Runtime::kLazyRecompile, 1); |
1117 // Calculate the entry point. | 1127 // Calculate the entry point. |
1118 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 1128 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1119 | 1129 |
1120 // Restore call kind information. | 1130 // Restore call kind information. |
1121 __ pop(r5); | 1131 __ pop(r5); |
1122 // Restore saved function. | 1132 // Restore saved function. |
1123 __ pop(r1); | 1133 __ pop(r1); |
1124 | 1134 |
1125 // Tear down temporary frame. | 1135 // Tear down internal frame. |
1126 __ LeaveInternalFrame(); | 1136 } |
1127 | 1137 |
1128 // Do a tail-call of the compiled function. | 1138 // Do a tail-call of the compiled function. |
1129 __ Jump(r2); | 1139 __ Jump(r2); |
1130 } | 1140 } |
1131 | 1141 |
1132 | 1142 |
1133 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 1143 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
1134 Deoptimizer::BailoutType type) { | 1144 Deoptimizer::BailoutType type) { |
1135 __ EnterInternalFrame(); | 1145 { |
1136 // Pass the function and deoptimization type to the runtime system. | 1146 FrameScope scope(masm, StackFrame::INTERNAL); |
1137 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); | 1147 // Pass the function and deoptimization type to the runtime system. |
1138 __ push(r0); | 1148 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); |
1139 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 1149 __ push(r0); |
1140 __ LeaveInternalFrame(); | 1150 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 1151 } |
1141 | 1152 |
1142 // Get the full codegen state from the stack and untag it -> r6. | 1153 // Get the full codegen state from the stack and untag it -> r6. |
1143 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); | 1154 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); |
1144 __ SmiUntag(r6); | 1155 __ SmiUntag(r6); |
1145 // Switch on the state. | 1156 // Switch on the state. |
1146 Label with_tos_register, unknown_state; | 1157 Label with_tos_register, unknown_state; |
1147 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); | 1158 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); |
1148 __ b(ne, &with_tos_register); | 1159 __ b(ne, &with_tos_register); |
1149 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. | 1160 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. |
1150 __ Ret(); | 1161 __ Ret(); |
(...skipping 19 matching lines...) Expand all Loading... |
1170 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 1181 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
1171 } | 1182 } |
1172 | 1183 |
1173 | 1184 |
1174 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { | 1185 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { |
1175 // For now, we are relying on the fact that Runtime::NotifyOSR | 1186 // For now, we are relying on the fact that Runtime::NotifyOSR |
1176 // doesn't do any garbage collection which allows us to save/restore | 1187 // doesn't do any garbage collection which allows us to save/restore |
1177 // the registers without worrying about which of them contain | 1188 // the registers without worrying about which of them contain |
1178 // pointers. This seems a bit fragile. | 1189 // pointers. This seems a bit fragile. |
1179 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); | 1190 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); |
1180 __ EnterInternalFrame(); | 1191 { |
1181 __ CallRuntime(Runtime::kNotifyOSR, 0); | 1192 FrameScope scope(masm, StackFrame::INTERNAL); |
1182 __ LeaveInternalFrame(); | 1193 __ CallRuntime(Runtime::kNotifyOSR, 0); |
| 1194 } |
1183 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); | 1195 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); |
1184 __ Ret(); | 1196 __ Ret(); |
1185 } | 1197 } |
1186 | 1198 |
1187 | 1199 |
1188 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 1200 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
1189 CpuFeatures::TryForceFeatureScope scope(VFP3); | 1201 CpuFeatures::TryForceFeatureScope scope(VFP3); |
1190 if (!CpuFeatures::IsSupported(VFP3)) { | 1202 if (!CpuFeatures::IsSupported(VFP3)) { |
1191 __ Abort("Unreachable code: Cannot optimize without VFP3 support."); | 1203 __ Abort("Unreachable code: Cannot optimize without VFP3 support."); |
1192 return; | 1204 return; |
1193 } | 1205 } |
1194 | 1206 |
1195 // Lookup the function in the JavaScript frame and push it as an | 1207 // Lookup the function in the JavaScript frame and push it as an |
1196 // argument to the on-stack replacement function. | 1208 // argument to the on-stack replacement function. |
1197 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1209 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1198 __ EnterInternalFrame(); | 1210 { |
1199 __ push(r0); | 1211 FrameScope scope(masm, StackFrame::INTERNAL); |
1200 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1212 __ push(r0); |
1201 __ LeaveInternalFrame(); | 1213 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1214 } |
1202 | 1215 |
1203 // If the result was -1 it means that we couldn't optimize the | 1216 // If the result was -1 it means that we couldn't optimize the |
1204 // function. Just return and continue in the unoptimized version. | 1217 // function. Just return and continue in the unoptimized version. |
1205 Label skip; | 1218 Label skip; |
1206 __ cmp(r0, Operand(Smi::FromInt(-1))); | 1219 __ cmp(r0, Operand(Smi::FromInt(-1))); |
1207 __ b(ne, &skip); | 1220 __ b(ne, &skip); |
1208 __ Ret(); | 1221 __ Ret(); |
1209 | 1222 |
1210 __ bind(&skip); | 1223 __ bind(&skip); |
1211 // Untag the AST id and push it on the stack. | 1224 // Untag the AST id and push it on the stack. |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1275 __ LoadRoot(r3, Heap::kNullValueRootIndex); | 1288 __ LoadRoot(r3, Heap::kNullValueRootIndex); |
1276 __ cmp(r2, r3); | 1289 __ cmp(r2, r3); |
1277 __ b(eq, &use_global_receiver); | 1290 __ b(eq, &use_global_receiver); |
1278 | 1291 |
1279 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); | 1292 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); |
1280 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 1293 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
1281 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE); | 1294 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE); |
1282 __ b(ge, &shift_arguments); | 1295 __ b(ge, &shift_arguments); |
1283 | 1296 |
1284 __ bind(&convert_to_object); | 1297 __ bind(&convert_to_object); |
1285 __ EnterInternalFrame(); // In order to preserve argument count. | |
1286 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged. | |
1287 __ push(r0); | |
1288 | 1298 |
1289 __ push(r2); | 1299 { |
1290 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1300 // Enter an internal frame in order to preserve argument count. |
1291 __ mov(r2, r0); | 1301 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1302 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged. |
| 1303 __ push(r0); |
1292 | 1304 |
1293 __ pop(r0); | 1305 __ push(r2); |
1294 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); | 1306 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1295 __ LeaveInternalFrame(); | 1307 __ mov(r2, r0); |
| 1308 |
| 1309 __ pop(r0); |
| 1310 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); |
| 1311 |
| 1312 // Exit the internal frame. |
| 1313 } |
| 1314 |
1296 // Restore the function to r1. | 1315 // Restore the function to r1. |
1297 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); | 1316 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); |
1298 __ jmp(&patch_receiver); | 1317 __ jmp(&patch_receiver); |
1299 | 1318 |
1300 // Use the global receiver object from the called function as the | 1319 // Use the global receiver object from the called function as the |
1301 // receiver. | 1320 // receiver. |
1302 __ bind(&use_global_receiver); | 1321 __ bind(&use_global_receiver); |
1303 const int kGlobalIndex = | 1322 const int kGlobalIndex = |
1304 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1323 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
1305 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); | 1324 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1385 } | 1404 } |
1386 | 1405 |
1387 | 1406 |
1388 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1407 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1389 const int kIndexOffset = -5 * kPointerSize; | 1408 const int kIndexOffset = -5 * kPointerSize; |
1390 const int kLimitOffset = -4 * kPointerSize; | 1409 const int kLimitOffset = -4 * kPointerSize; |
1391 const int kArgsOffset = 2 * kPointerSize; | 1410 const int kArgsOffset = 2 * kPointerSize; |
1392 const int kRecvOffset = 3 * kPointerSize; | 1411 const int kRecvOffset = 3 * kPointerSize; |
1393 const int kFunctionOffset = 4 * kPointerSize; | 1412 const int kFunctionOffset = 4 * kPointerSize; |
1394 | 1413 |
1395 __ EnterInternalFrame(); | 1414 { |
| 1415 FrameScope scope(masm, StackFrame::INTERNAL); |
1396 | 1416 |
1397 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1417 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
1398 __ push(r0); | 1418 __ push(r0); |
1399 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array | 1419 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array |
1400 __ push(r0); | 1420 __ push(r0); |
1401 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1421 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
1402 | 1422 |
1403 // Check the stack for overflow. We are not trying need to catch | 1423 // Check the stack for overflow. We are not trying need to catch |
1404 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1424 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1405 // limit" is checked. | 1425 // limit" is checked. |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1513 __ b(ne, &loop); | 1533 __ b(ne, &loop); |
1514 | 1534 |
1515 // Invoke the function. | 1535 // Invoke the function. |
1516 ParameterCount actual(r0); | 1536 ParameterCount actual(r0); |
1517 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); | 1537 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); |
1518 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1538 __ ldr(r1, MemOperand(fp, kFunctionOffset)); |
1519 __ InvokeFunction(r1, actual, CALL_FUNCTION, | 1539 __ InvokeFunction(r1, actual, CALL_FUNCTION, |
1520 NullCallWrapper(), CALL_AS_METHOD); | 1540 NullCallWrapper(), CALL_AS_METHOD); |
1521 | 1541 |
1522 // Tear down the internal frame and remove function, receiver and args. | 1542 // Tear down the internal frame and remove function, receiver and args. |
1523 __ LeaveInternalFrame(); | 1543 } |
| 1544 |
1524 __ add(sp, sp, Operand(3 * kPointerSize)); | 1545 __ add(sp, sp, Operand(3 * kPointerSize)); |
1525 __ Jump(lr); | 1546 __ Jump(lr); |
1526 } | 1547 } |
1527 | 1548 |
1528 | 1549 |
1529 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1550 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
1530 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | 1551 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
1531 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1552 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
1532 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); | 1553 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); |
1533 __ add(fp, sp, Operand(3 * kPointerSize)); | 1554 __ add(fp, sp, Operand(3 * kPointerSize)); |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1651 __ bind(&dont_adapt_arguments); | 1672 __ bind(&dont_adapt_arguments); |
1652 __ Jump(r3); | 1673 __ Jump(r3); |
1653 } | 1674 } |
1654 | 1675 |
1655 | 1676 |
1656 #undef __ | 1677 #undef __ |
1657 | 1678 |
1658 } } // namespace v8::internal | 1679 } } // namespace v8::internal |
1659 | 1680 |
1660 #endif // V8_TARGET_ARCH_ARM | 1681 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |