| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 148 // Initial map for the builtin Array functions should be maps. | 148 // Initial map for the builtin Array functions should be maps. |
| 149 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 149 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 150 __ SmiTst(r2); | 150 __ SmiTst(r2); |
| 151 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 151 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); |
| 152 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 152 __ CompareObjectType(r2, r3, r4, MAP_TYPE); |
| 153 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 153 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
| 154 } | 154 } |
| 155 | 155 |
| 156 // Run the native code for the Array function called as a normal function. | 156 // Run the native code for the Array function called as a normal function. |
| 157 // tail call a stub | 157 // tail call a stub |
| 158 Handle<Object> undefined_sentinel( | 158 Handle<Object> megamorphic_sentinel = |
| 159 masm->isolate()->heap()->undefined_value(), | 159 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()); |
| 160 masm->isolate()); | 160 __ mov(r2, Operand(megamorphic_sentinel)); |
| 161 __ mov(r2, Operand(undefined_sentinel)); | |
| 162 ArrayConstructorStub stub(masm->isolate()); | 161 ArrayConstructorStub stub(masm->isolate()); |
| 163 __ TailCallStub(&stub); | 162 __ TailCallStub(&stub); |
| 164 } | 163 } |
| 165 | 164 |
| 166 | 165 |
| 167 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | 166 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
| 168 // ----------- S t a t e ------------- | 167 // ----------- S t a t e ------------- |
| 169 // -- r0 : number of arguments | 168 // -- r0 : number of arguments |
| 170 // -- r1 : constructor function | 169 // -- r1 : constructor function |
| 171 // -- lr : return address | 170 // -- lr : return address |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 255 __ b(ne, &convert_argument); | 254 __ b(ne, &convert_argument); |
| 256 __ mov(argument, r0); | 255 __ mov(argument, r0); |
| 257 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 256 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
| 258 __ b(&argument_is_string); | 257 __ b(&argument_is_string); |
| 259 | 258 |
| 260 // Invoke the conversion builtin and put the result into r2. | 259 // Invoke the conversion builtin and put the result into r2. |
| 261 __ bind(&convert_argument); | 260 __ bind(&convert_argument); |
| 262 __ push(function); // Preserve the function. | 261 __ push(function); // Preserve the function. |
| 263 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); | 262 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); |
| 264 { | 263 { |
| 265 FrameScope scope(masm, StackFrame::INTERNAL); | 264 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 266 __ push(r0); | 265 __ push(r0); |
| 267 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 266 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 268 } | 267 } |
| 269 __ pop(function); | 268 __ pop(function); |
| 270 __ mov(argument, r0); | 269 __ mov(argument, r0); |
| 271 __ b(&argument_is_string); | 270 __ b(&argument_is_string); |
| 272 | 271 |
| 273 // Load the empty string into r2, remove the receiver from the | 272 // Load the empty string into r2, remove the receiver from the |
| 274 // stack, and jump back to the case where the argument is a string. | 273 // stack, and jump back to the case where the argument is a string. |
| 275 __ bind(&no_arguments); | 274 __ bind(&no_arguments); |
| 276 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | 275 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
| 277 __ Drop(1); | 276 __ Drop(1); |
| 278 __ b(&argument_is_string); | 277 __ b(&argument_is_string); |
| 279 | 278 |
| 280 // At this point the argument is already a string. Call runtime to | 279 // At this point the argument is already a string. Call runtime to |
| 281 // create a string wrapper. | 280 // create a string wrapper. |
| 282 __ bind(&gc_required); | 281 __ bind(&gc_required); |
| 283 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); | 282 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); |
| 284 { | 283 { |
| 285 FrameScope scope(masm, StackFrame::INTERNAL); | 284 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 286 __ push(argument); | 285 __ push(argument); |
| 287 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 286 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 288 } | 287 } |
| 289 __ Ret(); | 288 __ Ret(); |
| 290 } | 289 } |
| 291 | 290 |
| 292 | 291 |
| 293 static void CallRuntimePassFunction( | 292 static void CallRuntimePassFunction( |
| 294 MacroAssembler* masm, Runtime::FunctionId function_id) { | 293 MacroAssembler* masm, Runtime::FunctionId function_id) { |
| 295 FrameScope scope(masm, StackFrame::INTERNAL); | 294 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 296 // Push a copy of the function onto the stack. | 295 // Push a copy of the function onto the stack. |
| 297 __ push(r1); | 296 __ push(r1); |
| 298 // Push function as parameter to the runtime call. | 297 // Push function as parameter to the runtime call. |
| 299 __ Push(r1); | 298 __ Push(r1); |
| 300 | 299 |
| 301 __ CallRuntime(function_id, 1); | 300 __ CallRuntime(function_id, 1); |
| 302 // Restore receiver. | 301 // Restore receiver. |
| 303 __ pop(r1); | 302 __ pop(r1); |
| 304 } | 303 } |
| 305 | 304 |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 347 // -- sp[...]: constructor arguments | 346 // -- sp[...]: constructor arguments |
| 348 // ----------------------------------- | 347 // ----------------------------------- |
| 349 | 348 |
| 350 // Should never count constructions for api objects. | 349 // Should never count constructions for api objects. |
| 351 ASSERT(!is_api_function || !count_constructions); | 350 ASSERT(!is_api_function || !count_constructions); |
| 352 | 351 |
| 353 Isolate* isolate = masm->isolate(); | 352 Isolate* isolate = masm->isolate(); |
| 354 | 353 |
| 355 // Enter a construct frame. | 354 // Enter a construct frame. |
| 356 { | 355 { |
| 357 FrameScope scope(masm, StackFrame::CONSTRUCT); | 356 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
| 358 | 357 |
| 359 // Preserve the two incoming parameters on the stack. | 358 // Preserve the two incoming parameters on the stack. |
| 360 __ SmiTag(r0); | 359 __ SmiTag(r0); |
| 361 __ push(r0); // Smi-tagged arguments count. | 360 __ push(r0); // Smi-tagged arguments count. |
| 362 __ push(r1); // Constructor function. | 361 __ push(r1); // Constructor function. |
| 363 | 362 |
| 364 // Try to allocate the object without transitioning into C code. If any of | 363 // Try to allocate the object without transitioning into C code. If any of |
| 365 // the preconditions is not met, the code bails out to the runtime call. | 364 // the preconditions is not met, the code bails out to the runtime call. |
| 366 Label rt_call, allocated; | 365 Label rt_call, allocated; |
| 367 if (FLAG_inline_new) { | 366 if (FLAG_inline_new) { |
| (...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 731 __ mov(r8, Operand(r4)); | 730 __ mov(r8, Operand(r4)); |
| 732 } | 731 } |
| 733 if (kR9Available == 1) { | 732 if (kR9Available == 1) { |
| 734 __ mov(r9, Operand(r4)); | 733 __ mov(r9, Operand(r4)); |
| 735 } | 734 } |
| 736 | 735 |
| 737 // Invoke the code and pass argc as r0. | 736 // Invoke the code and pass argc as r0. |
| 738 __ mov(r0, Operand(r3)); | 737 __ mov(r0, Operand(r3)); |
| 739 if (is_construct) { | 738 if (is_construct) { |
| 740 // No type feedback cell is available | 739 // No type feedback cell is available |
| 741 Handle<Object> undefined_sentinel( | 740 Handle<Object> megamorphic_sentinel = |
| 742 masm->isolate()->heap()->undefined_value(), masm->isolate()); | 741 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()); |
| 743 __ mov(r2, Operand(undefined_sentinel)); | 742 __ mov(r2, Operand(megamorphic_sentinel)); |
| 744 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 743 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
| 745 __ CallStub(&stub); | 744 __ CallStub(&stub); |
| 746 } else { | 745 } else { |
| 747 ParameterCount actual(r0); | 746 ParameterCount actual(r0); |
| 748 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); | 747 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); |
| 749 } | 748 } |
| 750 // Exit the JS frame and remove the parameters (except function), and | 749 // Exit the JS frame and remove the parameters (except function), and |
| 751 // return. | 750 // return. |
| 752 // Respect ABI stack constraint. | 751 // Respect ABI stack constraint. |
| 753 } | 752 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 767 } | 766 } |
| 768 | 767 |
| 769 | 768 |
| 770 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 769 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 771 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 770 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
| 772 GenerateTailCallToReturnedCode(masm); | 771 GenerateTailCallToReturnedCode(masm); |
| 773 } | 772 } |
| 774 | 773 |
| 775 | 774 |
| 776 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 775 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 777 FrameScope scope(masm, StackFrame::INTERNAL); | 776 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 778 // Push a copy of the function onto the stack. | 777 // Push a copy of the function onto the stack. |
| 779 __ push(r1); | 778 __ push(r1); |
| 780 // Push function as parameter to the runtime call. | 779 // Push function as parameter to the runtime call. |
| 781 __ Push(r1); | 780 __ Push(r1); |
| 782 // Whether to compile in a background thread. | 781 // Whether to compile in a background thread. |
| 783 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 782 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 784 | 783 |
| 785 __ CallRuntime(Runtime::kCompileOptimized, 2); | 784 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 786 // Restore receiver. | 785 // Restore receiver. |
| 787 __ pop(r1); | 786 __ pop(r1); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 863 | 862 |
| 864 | 863 |
| 865 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | 864 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
| 866 GenerateMakeCodeYoungAgainCommon(masm); | 865 GenerateMakeCodeYoungAgainCommon(masm); |
| 867 } | 866 } |
| 868 | 867 |
| 869 | 868 |
| 870 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 869 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 871 SaveFPRegsMode save_doubles) { | 870 SaveFPRegsMode save_doubles) { |
| 872 { | 871 { |
| 873 FrameScope scope(masm, StackFrame::INTERNAL); | 872 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 874 | 873 |
| 875 // Preserve registers across notification, this is important for compiled | 874 // Preserve registers across notification, this is important for compiled |
| 876 // stubs that tail call the runtime on deopts passing their parameters in | 875 // stubs that tail call the runtime on deopts passing their parameters in |
| 877 // registers. | 876 // registers. |
| 878 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); | 877 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); |
| 879 // Pass the function and deoptimization type to the runtime system. | 878 // Pass the function and deoptimization type to the runtime system. |
| 880 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 879 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
| 881 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); | 880 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); |
| 882 } | 881 } |
| 883 | 882 |
| 884 __ add(sp, sp, Operand(kPointerSize)); // Ignore state | 883 __ add(sp, sp, Operand(kPointerSize)); // Ignore state |
| 885 __ mov(pc, lr); // Jump to miss handler | 884 __ mov(pc, lr); // Jump to miss handler |
| 886 } | 885 } |
| 887 | 886 |
| 888 | 887 |
| 889 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 888 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 890 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 889 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 891 } | 890 } |
| 892 | 891 |
| 893 | 892 |
| 894 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 893 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 895 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 894 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 896 } | 895 } |
| 897 | 896 |
| 898 | 897 |
| 899 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 898 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 900 Deoptimizer::BailoutType type) { | 899 Deoptimizer::BailoutType type) { |
| 901 { | 900 { |
| 902 FrameScope scope(masm, StackFrame::INTERNAL); | 901 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 903 // Pass the function and deoptimization type to the runtime system. | 902 // Pass the function and deoptimization type to the runtime system. |
| 904 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); | 903 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); |
| 905 __ push(r0); | 904 __ push(r0); |
| 906 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 905 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 907 } | 906 } |
| 908 | 907 |
| 909 // Get the full codegen state from the stack and untag it -> r6. | 908 // Get the full codegen state from the stack and untag it -> r6. |
| 910 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); | 909 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); |
| 911 __ SmiUntag(r6); | 910 __ SmiUntag(r6); |
| 912 // Switch on the state. | 911 // Switch on the state. |
| (...skipping 27 matching lines...) Expand all Loading... |
| 940 | 939 |
| 941 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 940 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 942 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 941 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 943 } | 942 } |
| 944 | 943 |
| 945 | 944 |
| 946 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 945 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 947 // Lookup the function in the JavaScript frame. | 946 // Lookup the function in the JavaScript frame. |
| 948 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 947 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 949 { | 948 { |
| 950 FrameScope scope(masm, StackFrame::INTERNAL); | 949 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 951 // Pass function as argument. | 950 // Pass function as argument. |
| 952 __ push(r0); | 951 __ push(r0); |
| 953 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 952 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 954 } | 953 } |
| 955 | 954 |
| 956 // If the code object is null, just return to the unoptimized code. | 955 // If the code object is null, just return to the unoptimized code. |
| 957 Label skip; | 956 Label skip; |
| 958 __ cmp(r0, Operand(Smi::FromInt(0))); | 957 __ cmp(r0, Operand(Smi::FromInt(0))); |
| 959 __ b(ne, &skip); | 958 __ b(ne, &skip); |
| 960 __ Ret(); | 959 __ Ret(); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 980 } | 979 } |
| 981 | 980 |
| 982 | 981 |
| 983 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 982 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 984 // We check the stack limit as indicator that recompilation might be done. | 983 // We check the stack limit as indicator that recompilation might be done. |
| 985 Label ok; | 984 Label ok; |
| 986 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 985 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 987 __ cmp(sp, Operand(ip)); | 986 __ cmp(sp, Operand(ip)); |
| 988 __ b(hs, &ok); | 987 __ b(hs, &ok); |
| 989 { | 988 { |
| 990 FrameScope scope(masm, StackFrame::INTERNAL); | 989 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 991 __ CallRuntime(Runtime::kStackGuard, 0); | 990 __ CallRuntime(Runtime::kStackGuard, 0); |
| 992 } | 991 } |
| 993 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 992 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 994 RelocInfo::CODE_TARGET); | 993 RelocInfo::CODE_TARGET); |
| 995 | 994 |
| 996 __ bind(&ok); | 995 __ bind(&ok); |
| 997 __ Ret(); | 996 __ Ret(); |
| 998 } | 997 } |
| 999 | 998 |
| 1000 | 999 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1032 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1031 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1033 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); | 1032 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1034 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1033 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
| 1035 kSmiTagSize))); | 1034 kSmiTagSize))); |
| 1036 __ b(ne, &shift_arguments); | 1035 __ b(ne, &shift_arguments); |
| 1037 | 1036 |
| 1038 // Do not transform the receiver for native (Compilerhints already in r3). | 1037 // Do not transform the receiver for native (Compilerhints already in r3). |
| 1039 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1038 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
| 1040 __ b(ne, &shift_arguments); | 1039 __ b(ne, &shift_arguments); |
| 1041 | 1040 |
| 1042 // Compute the receiver in non-strict mode. | 1041 // Compute the receiver in sloppy mode. |
| 1043 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | 1042 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); |
| 1044 __ ldr(r2, MemOperand(r2, -kPointerSize)); | 1043 __ ldr(r2, MemOperand(r2, -kPointerSize)); |
| 1045 // r0: actual number of arguments | 1044 // r0: actual number of arguments |
| 1046 // r1: function | 1045 // r1: function |
| 1047 // r2: first argument | 1046 // r2: first argument |
| 1048 __ JumpIfSmi(r2, &convert_to_object); | 1047 __ JumpIfSmi(r2, &convert_to_object); |
| 1049 | 1048 |
| 1050 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1049 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1051 __ cmp(r2, r3); | 1050 __ cmp(r2, r3); |
| 1052 __ b(eq, &use_global_receiver); | 1051 __ b(eq, &use_global_receiver); |
| 1053 __ LoadRoot(r3, Heap::kNullValueRootIndex); | 1052 __ LoadRoot(r3, Heap::kNullValueRootIndex); |
| 1054 __ cmp(r2, r3); | 1053 __ cmp(r2, r3); |
| 1055 __ b(eq, &use_global_receiver); | 1054 __ b(eq, &use_global_receiver); |
| 1056 | 1055 |
| 1057 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1056 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1058 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); | 1057 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); |
| 1059 __ b(ge, &shift_arguments); | 1058 __ b(ge, &shift_arguments); |
| 1060 | 1059 |
| 1061 __ bind(&convert_to_object); | 1060 __ bind(&convert_to_object); |
| 1062 | 1061 |
| 1063 { | 1062 { |
| 1064 // Enter an internal frame in order to preserve argument count. | 1063 // Enter an internal frame in order to preserve argument count. |
| 1065 FrameScope scope(masm, StackFrame::INTERNAL); | 1064 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1066 __ SmiTag(r0); | 1065 __ SmiTag(r0); |
| 1067 __ push(r0); | 1066 __ push(r0); |
| 1068 | 1067 |
| 1069 __ push(r2); | 1068 __ push(r2); |
| 1070 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1069 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1071 __ mov(r2, r0); | 1070 __ mov(r2, r0); |
| 1072 | 1071 |
| 1073 __ pop(r0); | 1072 __ pop(r0); |
| 1074 __ SmiUntag(r0); | 1073 __ SmiUntag(r0); |
| 1075 | 1074 |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1182 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1181 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
| 1183 const int kIndexOffset = | 1182 const int kIndexOffset = |
| 1184 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1183 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
| 1185 const int kLimitOffset = | 1184 const int kLimitOffset = |
| 1186 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | 1185 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
| 1187 const int kArgsOffset = 2 * kPointerSize; | 1186 const int kArgsOffset = 2 * kPointerSize; |
| 1188 const int kRecvOffset = 3 * kPointerSize; | 1187 const int kRecvOffset = 3 * kPointerSize; |
| 1189 const int kFunctionOffset = 4 * kPointerSize; | 1188 const int kFunctionOffset = 4 * kPointerSize; |
| 1190 | 1189 |
| 1191 { | 1190 { |
| 1192 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 1191 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1193 | 1192 |
| 1194 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1193 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
| 1195 __ push(r0); | 1194 __ push(r0); |
| 1196 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array | 1195 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array |
| 1197 __ push(r0); | 1196 __ push(r0); |
| 1198 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1197 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
| 1199 | 1198 |
| 1200 // Check the stack for overflow. We are not trying to catch | 1199 // Check the stack for overflow. We are not trying to catch |
| 1201 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1200 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 1202 // limit" is checked. | 1201 // limit" is checked. |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1240 Label call_to_object, use_global_receiver; | 1239 Label call_to_object, use_global_receiver; |
| 1241 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); | 1240 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1242 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1241 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
| 1243 kSmiTagSize))); | 1242 kSmiTagSize))); |
| 1244 __ b(ne, &push_receiver); | 1243 __ b(ne, &push_receiver); |
| 1245 | 1244 |
| 1246 // Do not transform the receiver for strict mode functions. | 1245 // Do not transform the receiver for strict mode functions. |
| 1247 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1246 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
| 1248 __ b(ne, &push_receiver); | 1247 __ b(ne, &push_receiver); |
| 1249 | 1248 |
| 1250 // Compute the receiver in non-strict mode. | 1249 // Compute the receiver in sloppy mode. |
| 1251 __ JumpIfSmi(r0, &call_to_object); | 1250 __ JumpIfSmi(r0, &call_to_object); |
| 1252 __ LoadRoot(r1, Heap::kNullValueRootIndex); | 1251 __ LoadRoot(r1, Heap::kNullValueRootIndex); |
| 1253 __ cmp(r0, r1); | 1252 __ cmp(r0, r1); |
| 1254 __ b(eq, &use_global_receiver); | 1253 __ b(eq, &use_global_receiver); |
| 1255 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | 1254 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
| 1256 __ cmp(r0, r1); | 1255 __ cmp(r0, r1); |
| 1257 __ b(eq, &use_global_receiver); | 1256 __ b(eq, &use_global_receiver); |
| 1258 | 1257 |
| 1259 // Check if the receiver is already a JavaScript object. | 1258 // Check if the receiver is already a JavaScript object. |
| 1260 // r0: receiver | 1259 // r0: receiver |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1347 | 1346 |
| 1348 | 1347 |
| 1349 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1348 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1350 // ----------- S t a t e ------------- | 1349 // ----------- S t a t e ------------- |
| 1351 // -- r0 : result being passed through | 1350 // -- r0 : result being passed through |
| 1352 // ----------------------------------- | 1351 // ----------------------------------- |
| 1353 // Get the number of arguments passed (as a smi), tear down the frame and | 1352 // Get the number of arguments passed (as a smi), tear down the frame and |
| 1354 // then tear down the parameters. | 1353 // then tear down the parameters. |
| 1355 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 1354 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1356 kPointerSize))); | 1355 kPointerSize))); |
| 1357 __ mov(sp, fp); | 1356 |
| 1358 __ ldm(ia_w, sp, fp.bit() | lr.bit()); | 1357 if (FLAG_enable_ool_constant_pool) { |
| 1358 __ add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); |
| 1359 __ ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit()); |
| 1360 } else { |
| 1361 __ mov(sp, fp);; |
| 1362 __ ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 1363 } |
| 1359 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1)); | 1364 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1)); |
| 1360 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver | 1365 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver |
| 1361 } | 1366 } |
| 1362 | 1367 |
| 1363 | 1368 |
| 1364 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 1369 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
| 1365 // ----------- S t a t e ------------- | 1370 // ----------- S t a t e ------------- |
| 1366 // -- r0 : actual number of arguments | 1371 // -- r0 : actual number of arguments |
| 1367 // -- r1 : function (passed through to callee) | 1372 // -- r1 : function (passed through to callee) |
| 1368 // -- r2 : expected number of arguments | 1373 // -- r2 : expected number of arguments |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1468 __ bind(&dont_adapt_arguments); | 1473 __ bind(&dont_adapt_arguments); |
| 1469 __ Jump(r3); | 1474 __ Jump(r3); |
| 1470 } | 1475 } |
| 1471 | 1476 |
| 1472 | 1477 |
| 1473 #undef __ | 1478 #undef __ |
| 1474 | 1479 |
| 1475 } } // namespace v8::internal | 1480 } } // namespace v8::internal |
| 1476 | 1481 |
| 1477 #endif // V8_TARGET_ARCH_ARM | 1482 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |