OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
255 __ b(ne, &convert_argument); | 255 __ b(ne, &convert_argument); |
256 __ mov(argument, r0); | 256 __ mov(argument, r0); |
257 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 257 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
258 __ b(&argument_is_string); | 258 __ b(&argument_is_string); |
259 | 259 |
260 // Invoke the conversion builtin and put the result into r2. | 260 // Invoke the conversion builtin and put the result into r2. |
261 __ bind(&convert_argument); | 261 __ bind(&convert_argument); |
262 __ push(function); // Preserve the function. | 262 __ push(function); // Preserve the function. |
263 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 263 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
264 { | 264 { |
265 FrameScope scope(masm, StackFrame::INTERNAL); | 265 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
266 __ push(r0); | 266 __ push(r0); |
267 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 267 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
268 } | 268 } |
269 __ pop(function); | 269 __ pop(function); |
270 __ mov(argument, r0); | 270 __ mov(argument, r0); |
271 __ b(&argument_is_string); | 271 __ b(&argument_is_string); |
272 | 272 |
273 // Load the empty string into r2, remove the receiver from the | 273 // Load the empty string into r2, remove the receiver from the |
274 // stack, and jump back to the case where the argument is a string. | 274 // stack, and jump back to the case where the argument is a string. |
275 __ bind(&no_arguments); | 275 __ bind(&no_arguments); |
276 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | 276 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
277 __ Drop(1); | 277 __ Drop(1); |
278 __ b(&argument_is_string); | 278 __ b(&argument_is_string); |
279 | 279 |
280 // At this point the argument is already a string. Call runtime to | 280 // At this point the argument is already a string. Call runtime to |
281 // create a string wrapper. | 281 // create a string wrapper. |
282 __ bind(&gc_required); | 282 __ bind(&gc_required); |
283 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); | 283 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); |
284 { | 284 { |
285 FrameScope scope(masm, StackFrame::INTERNAL); | 285 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
286 __ push(argument); | 286 __ push(argument); |
287 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 287 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
288 } | 288 } |
289 __ Ret(); | 289 __ Ret(); |
290 } | 290 } |
291 | 291 |
292 | 292 |
293 static void CallRuntimePassFunction( | 293 static void CallRuntimePassFunction( |
294 MacroAssembler* masm, Runtime::FunctionId function_id) { | 294 MacroAssembler* masm, Runtime::FunctionId function_id) { |
295 FrameScope scope(masm, StackFrame::INTERNAL); | 295 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
296 // Push a copy of the function onto the stack. | 296 // Push a copy of the function onto the stack. |
297 __ push(r1); | 297 __ push(r1); |
298 // Push function as parameter to the runtime call. | 298 // Push function as parameter to the runtime call. |
299 __ Push(r1); | 299 __ Push(r1); |
300 | 300 |
301 __ CallRuntime(function_id, 1); | 301 __ CallRuntime(function_id, 1); |
302 // Restore receiver. | 302 // Restore receiver. |
303 __ pop(r1); | 303 __ pop(r1); |
304 } | 304 } |
305 | 305 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
347 // -- sp[...]: constructor arguments | 347 // -- sp[...]: constructor arguments |
348 // ----------------------------------- | 348 // ----------------------------------- |
349 | 349 |
350 // Should never count constructions for api objects. | 350 // Should never count constructions for api objects. |
351 ASSERT(!is_api_function || !count_constructions); | 351 ASSERT(!is_api_function || !count_constructions); |
352 | 352 |
353 Isolate* isolate = masm->isolate(); | 353 Isolate* isolate = masm->isolate(); |
354 | 354 |
355 // Enter a construct frame. | 355 // Enter a construct frame. |
356 { | 356 { |
357 FrameScope scope(masm, StackFrame::CONSTRUCT); | 357 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
358 | 358 |
359 // Preserve the two incoming parameters on the stack. | 359 // Preserve the two incoming parameters on the stack. |
360 __ SmiTag(r0); | 360 __ SmiTag(r0); |
361 __ push(r0); // Smi-tagged arguments count. | 361 __ push(r0); // Smi-tagged arguments count. |
362 __ push(r1); // Constructor function. | 362 __ push(r1); // Constructor function. |
363 | 363 |
364 // Try to allocate the object without transitioning into C code. If any of | 364 // Try to allocate the object without transitioning into C code. If any of |
365 // the preconditions is not met, the code bails out to the runtime call. | 365 // the preconditions is not met, the code bails out to the runtime call. |
366 Label rt_call, allocated; | 366 Label rt_call, allocated; |
367 if (FLAG_inline_new) { | 367 if (FLAG_inline_new) { |
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
767 } | 767 } |
768 | 768 |
769 | 769 |
770 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 770 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
771 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 771 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
772 GenerateTailCallToReturnedCode(masm); | 772 GenerateTailCallToReturnedCode(masm); |
773 } | 773 } |
774 | 774 |
775 | 775 |
776 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 776 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
777 FrameScope scope(masm, StackFrame::INTERNAL); | 777 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
778 // Push a copy of the function onto the stack. | 778 // Push a copy of the function onto the stack. |
779 __ push(r1); | 779 __ push(r1); |
780 // Push function as parameter to the runtime call. | 780 // Push function as parameter to the runtime call. |
781 __ Push(r1); | 781 __ Push(r1); |
782 // Whether to compile in a background thread. | 782 // Whether to compile in a background thread. |
783 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 783 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
784 | 784 |
785 __ CallRuntime(Runtime::kCompileOptimized, 2); | 785 __ CallRuntime(Runtime::kCompileOptimized, 2); |
786 // Restore receiver. | 786 // Restore receiver. |
787 __ pop(r1); | 787 __ pop(r1); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
863 | 863 |
864 | 864 |
865 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | 865 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
866 GenerateMakeCodeYoungAgainCommon(masm); | 866 GenerateMakeCodeYoungAgainCommon(masm); |
867 } | 867 } |
868 | 868 |
869 | 869 |
870 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 870 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
871 SaveFPRegsMode save_doubles) { | 871 SaveFPRegsMode save_doubles) { |
872 { | 872 { |
873 FrameScope scope(masm, StackFrame::INTERNAL); | 873 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
874 | 874 |
875 // Preserve registers across notification, this is important for compiled | 875 // Preserve registers across notification, this is important for compiled |
876 // stubs that tail call the runtime on deopts passing their parameters in | 876 // stubs that tail call the runtime on deopts passing their parameters in |
877 // registers. | 877 // registers. |
878 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); | 878 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); |
879 // Pass the function and deoptimization type to the runtime system. | 879 // Pass the function and deoptimization type to the runtime system. |
880 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 880 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
881 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); | 881 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); |
882 } | 882 } |
883 | 883 |
884 __ add(sp, sp, Operand(kPointerSize)); // Ignore state | 884 __ add(sp, sp, Operand(kPointerSize)); // Ignore state |
885 __ mov(pc, lr); // Jump to miss handler | 885 __ mov(pc, lr); // Jump to miss handler |
886 } | 886 } |
887 | 887 |
888 | 888 |
889 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 889 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
890 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 890 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
891 } | 891 } |
892 | 892 |
893 | 893 |
894 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 894 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
895 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 895 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
896 } | 896 } |
897 | 897 |
898 | 898 |
899 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 899 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
900 Deoptimizer::BailoutType type) { | 900 Deoptimizer::BailoutType type) { |
901 { | 901 { |
902 FrameScope scope(masm, StackFrame::INTERNAL); | 902 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
903 // Pass the function and deoptimization type to the runtime system. | 903 // Pass the function and deoptimization type to the runtime system. |
904 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); | 904 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); |
905 __ push(r0); | 905 __ push(r0); |
906 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 906 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
907 } | 907 } |
908 | 908 |
909 // Get the full codegen state from the stack and untag it -> r6. | 909 // Get the full codegen state from the stack and untag it -> r6. |
910 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); | 910 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); |
911 __ SmiUntag(r6); | 911 __ SmiUntag(r6); |
912 // Switch on the state. | 912 // Switch on the state. |
(...skipping 27 matching lines...) Expand all Loading... |
940 | 940 |
941 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 941 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
942 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 942 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
943 } | 943 } |
944 | 944 |
945 | 945 |
946 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 946 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
947 // Lookup the function in the JavaScript frame. | 947 // Lookup the function in the JavaScript frame. |
948 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 948 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
949 { | 949 { |
950 FrameScope scope(masm, StackFrame::INTERNAL); | 950 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
951 // Pass function as argument. | 951 // Pass function as argument. |
952 __ push(r0); | 952 __ push(r0); |
953 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 953 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
954 } | 954 } |
955 | 955 |
956 // If the code object is null, just return to the unoptimized code. | 956 // If the code object is null, just return to the unoptimized code. |
957 Label skip; | 957 Label skip; |
958 __ cmp(r0, Operand(Smi::FromInt(0))); | 958 __ cmp(r0, Operand(Smi::FromInt(0))); |
959 __ b(ne, &skip); | 959 __ b(ne, &skip); |
960 __ Ret(); | 960 __ Ret(); |
(...skipping 19 matching lines...) Expand all Loading... |
980 } | 980 } |
981 | 981 |
982 | 982 |
983 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 983 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
984 // We check the stack limit as indicator that recompilation might be done. | 984 // We check the stack limit as indicator that recompilation might be done. |
985 Label ok; | 985 Label ok; |
986 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 986 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
987 __ cmp(sp, Operand(ip)); | 987 __ cmp(sp, Operand(ip)); |
988 __ b(hs, &ok); | 988 __ b(hs, &ok); |
989 { | 989 { |
990 FrameScope scope(masm, StackFrame::INTERNAL); | 990 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
991 __ CallRuntime(Runtime::kStackGuard, 0); | 991 __ CallRuntime(Runtime::kStackGuard, 0); |
992 } | 992 } |
993 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 993 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
994 RelocInfo::CODE_TARGET); | 994 RelocInfo::CODE_TARGET); |
995 | 995 |
996 __ bind(&ok); | 996 __ bind(&ok); |
997 __ Ret(); | 997 __ Ret(); |
998 } | 998 } |
999 | 999 |
1000 | 1000 |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1055 __ b(eq, &use_global_receiver); | 1055 __ b(eq, &use_global_receiver); |
1056 | 1056 |
1057 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1057 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1058 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); | 1058 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); |
1059 __ b(ge, &shift_arguments); | 1059 __ b(ge, &shift_arguments); |
1060 | 1060 |
1061 __ bind(&convert_to_object); | 1061 __ bind(&convert_to_object); |
1062 | 1062 |
1063 { | 1063 { |
1064 // Enter an internal frame in order to preserve argument count. | 1064 // Enter an internal frame in order to preserve argument count. |
1065 FrameScope scope(masm, StackFrame::INTERNAL); | 1065 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
1066 __ SmiTag(r0); | 1066 __ SmiTag(r0); |
1067 __ push(r0); | 1067 __ push(r0); |
1068 | 1068 |
1069 __ push(r2); | 1069 __ push(r2); |
1070 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1070 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1071 __ mov(r2, r0); | 1071 __ mov(r2, r0); |
1072 | 1072 |
1073 __ pop(r0); | 1073 __ pop(r0); |
1074 __ SmiUntag(r0); | 1074 __ SmiUntag(r0); |
1075 | 1075 |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1182 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1182 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1183 const int kIndexOffset = | 1183 const int kIndexOffset = |
1184 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1184 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
1185 const int kLimitOffset = | 1185 const int kLimitOffset = |
1186 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | 1186 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
1187 const int kArgsOffset = 2 * kPointerSize; | 1187 const int kArgsOffset = 2 * kPointerSize; |
1188 const int kRecvOffset = 3 * kPointerSize; | 1188 const int kRecvOffset = 3 * kPointerSize; |
1189 const int kFunctionOffset = 4 * kPointerSize; | 1189 const int kFunctionOffset = 4 * kPointerSize; |
1190 | 1190 |
1191 { | 1191 { |
1192 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1192 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
1193 | 1193 |
1194 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1194 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
1195 __ push(r0); | 1195 __ push(r0); |
1196 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array | 1196 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array |
1197 __ push(r0); | 1197 __ push(r0); |
1198 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1198 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
1199 | 1199 |
1200 // Check the stack for overflow. We are not trying to catch | 1200 // Check the stack for overflow. We are not trying to catch |
1201 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1201 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1202 // limit" is checked. | 1202 // limit" is checked. |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1468 __ bind(&dont_adapt_arguments); | 1468 __ bind(&dont_adapt_arguments); |
1469 __ Jump(r3); | 1469 __ Jump(r3); |
1470 } | 1470 } |
1471 | 1471 |
1472 | 1472 |
1473 #undef __ | 1473 #undef __ |
1474 | 1474 |
1475 } } // namespace v8::internal | 1475 } } // namespace v8::internal |
1476 | 1476 |
1477 #endif // V8_TARGET_ARCH_ARM | 1477 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |