| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 290 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); | 290 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); |
| 291 { | 291 { |
| 292 FrameScope scope(masm, StackFrame::INTERNAL); | 292 FrameScope scope(masm, StackFrame::INTERNAL); |
| 293 __ push(argument); | 293 __ push(argument); |
| 294 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 294 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 295 } | 295 } |
| 296 __ Ret(); | 296 __ Ret(); |
| 297 } | 297 } |
| 298 | 298 |
| 299 | 299 |
| 300 static void CallRuntimePassFunction(MacroAssembler* masm, | 300 static void CallRuntimePassFunction( |
| 301 Runtime::FunctionId function_id) { | 301 MacroAssembler* masm, Runtime::FunctionId function_id) { |
| 302 FrameScope scope(masm, StackFrame::INTERNAL); | 302 FrameScope scope(masm, StackFrame::INTERNAL); |
| 303 // Push a copy of the function onto the stack. | 303 // Push a copy of the function onto the stack. |
| 304 // Push call kind information and function as parameter to the runtime call. | 304 // Push call kind information and function as parameter to the runtime call. |
| 305 __ Push(a1, t1, a1); | 305 __ Push(a1, t1, a1); |
| 306 | 306 |
| 307 __ CallRuntime(function_id, 1); | 307 __ CallRuntime(function_id, 1); |
| 308 // Restore call kind information and receiver. | 308 // Restore call kind information and receiver. |
| 309 __ Pop(a1, t1); | 309 __ Pop(a1, t1); |
| 310 } | 310 } |
| 311 | 311 |
| 312 | 312 |
| 313 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 313 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 314 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 314 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 315 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); | 315 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); |
| 316 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 316 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 317 __ Jump(at); | 317 __ Jump(at); |
| 318 } | 318 } |
| 319 | 319 |
| 320 | 320 |
| 321 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { | 321 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { |
| 322 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 323 __ Jump(at); |
| 324 } |
| 325 |
| 326 |
| 327 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 322 // Checking whether the queued function is ready for install is optional, | 328 // Checking whether the queued function is ready for install is optional, |
| 323 // since we come across interrupts and stack checks elsewhere. However, | 329 // since we come across interrupts and stack checks elsewhere. However, |
| 324 // not checking may delay installing ready functions, and always checking | 330 // not checking may delay installing ready functions, and always checking |
| 325 // would be quite expensive. A good compromise is to first check against | 331 // would be quite expensive. A good compromise is to first check against |
| 326 // stack limit as a cue for an interrupt signal. | 332 // stack limit as a cue for an interrupt signal. |
| 327 Label ok; | 333 Label ok; |
| 328 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 334 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 329 __ Branch(&ok, hs, sp, Operand(t0)); | 335 __ Branch(&ok, hs, sp, Operand(t0)); |
| 330 | 336 |
| 331 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); | 337 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
| 332 // Tail call to returned code. | 338 GenerateTailCallToReturnedCode(masm); |
| 333 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 334 __ Jump(at); | |
| 335 | 339 |
| 336 __ bind(&ok); | 340 __ bind(&ok); |
| 337 GenerateTailCallToSharedCode(masm); | 341 GenerateTailCallToSharedCode(masm); |
| 338 } | 342 } |
| 339 | 343 |
| 340 | 344 |
| 341 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { | |
| 342 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); | |
| 343 GenerateTailCallToSharedCode(masm); | |
| 344 } | |
| 345 | |
| 346 | |
| 347 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 345 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 348 bool is_api_function, | 346 bool is_api_function, |
| 349 bool count_constructions) { | 347 bool count_constructions) { |
| 350 // ----------- S t a t e ------------- | 348 // ----------- S t a t e ------------- |
| 351 // -- a0 : number of arguments | 349 // -- a0 : number of arguments |
| 352 // -- a1 : constructor function | 350 // -- a1 : constructor function |
| 353 // -- ra : return address | 351 // -- ra : return address |
| 354 // -- sp[...]: constructor arguments | 352 // -- sp[...]: constructor arguments |
| 355 // ----------------------------------- | 353 // ----------------------------------- |
| 356 | 354 |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 783 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 781 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
| 784 Generate_JSEntryTrampolineHelper(masm, false); | 782 Generate_JSEntryTrampolineHelper(masm, false); |
| 785 } | 783 } |
| 786 | 784 |
| 787 | 785 |
| 788 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 786 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 789 Generate_JSEntryTrampolineHelper(masm, true); | 787 Generate_JSEntryTrampolineHelper(masm, true); |
| 790 } | 788 } |
| 791 | 789 |
| 792 | 790 |
| 793 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 791 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 794 CallRuntimePassFunction(masm, Runtime::kLazyCompile); | 792 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
| 795 // Do a tail-call of the compiled function. | 793 GenerateTailCallToReturnedCode(masm); |
| 796 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 797 __ Jump(t9); | |
| 798 } | 794 } |
| 799 | 795 |
| 800 | 796 |
| 801 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 797 static void CallCompileOptimized(MacroAssembler* masm, |
| 802 CallRuntimePassFunction(masm, Runtime::kLazyRecompile); | 798 bool concurrent) { |
| 803 // Do a tail-call of the compiled function. | 799 FrameScope scope(masm, StackFrame::INTERNAL); |
| 804 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 800 // Push a copy of the function onto the stack. |
| 805 __ Jump(t9); | 801 // Push call kind information and function as parameter to the runtime call. |
| 802 __ Push(a1, t1, a1); |
| 803 // Whether to compile in a background thread. |
| 804 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 805 |
| 806 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 807 // Restore call kind information and receiver. |
| 808 __ Pop(a1, t1); |
| 806 } | 809 } |
| 807 | 810 |
| 808 | 811 |
| 812 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 813 CallCompileOptimized(masm, false); |
| 814 GenerateTailCallToReturnedCode(masm); |
| 815 } |
| 816 |
| 817 |
| 818 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
| 819 CallCompileOptimized(masm, true); |
| 820 GenerateTailCallToReturnedCode(masm); |
| 821 } |
| 822 |
| 823 |
| 824 |
| 809 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 825 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
| 810 // For now, we are relying on the fact that make_code_young doesn't do any | 826 // For now, we are relying on the fact that make_code_young doesn't do any |
| 811 // garbage collection which allows us to save/restore the registers without | 827 // garbage collection which allows us to save/restore the registers without |
| 812 // worrying about which of them contain pointers. We also don't build an | 828 // worrying about which of them contain pointers. We also don't build an |
| 813 // internal frame to make the code faster, since we shouldn't have to do stack | 829 // internal frame to make the code faster, since we shouldn't have to do stack |
| 814 // crawls in MakeCodeYoung. This seems a bit fragile. | 830 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 815 | 831 |
| 816 // Set a0 to point to the head of the PlatformCodeAge sequence. | 832 // Set a0 to point to the head of the PlatformCodeAge sequence. |
| 817 __ Subu(a0, a0, | 833 __ Subu(a0, a0, |
| 818 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); | 834 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); |
| 819 | 835 |
| 820 // The following registers must be saved and restored when calling through to | 836 // The following registers must be saved and restored when calling through to |
| 821 // the runtime: | 837 // the runtime: |
| 822 // a0 - contains return address (beginning of patch sequence) | 838 // a0 - contains return address (beginning of patch sequence) |
| 823 // a1 - isolate | 839 // a1 - isolate |
| 824 RegList saved_regs = | 840 RegList saved_regs = |
| 825 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | 841 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit(); |
| 826 FrameScope scope(masm, StackFrame::MANUAL); | 842 FrameScope scope(masm, StackFrame::MANUAL); |
| 827 __ MultiPush(saved_regs); | 843 __ MultiPush(saved_regs); |
| 828 __ PrepareCallCFunction(1, 0, a2); | 844 __ PrepareCallCFunction(2, 0, a2); |
| 829 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | 845 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 830 __ CallCFunction( | 846 __ CallCFunction( |
| 831 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | 847 ExternalReference::get_make_code_young_function(masm->isolate()), 2); |
| 832 __ MultiPop(saved_regs); | 848 __ MultiPop(saved_regs); |
| 833 __ Jump(a0); | 849 __ Jump(a0); |
| 834 } | 850 } |
| 835 | 851 |
| 836 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | 852 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
| 837 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | 853 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
| 838 MacroAssembler* masm) { \ | 854 MacroAssembler* masm) { \ |
| (...skipping 18 matching lines...) Expand all Loading... |
| 857 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); | 873 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); |
| 858 | 874 |
| 859 // The following registers must be saved and restored when calling through to | 875 // The following registers must be saved and restored when calling through to |
| 860 // the runtime: | 876 // the runtime: |
| 861 // a0 - contains return address (beginning of patch sequence) | 877 // a0 - contains return address (beginning of patch sequence) |
| 862 // a1 - isolate | 878 // a1 - isolate |
| 863 RegList saved_regs = | 879 RegList saved_regs = |
| 864 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | 880 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit(); |
| 865 FrameScope scope(masm, StackFrame::MANUAL); | 881 FrameScope scope(masm, StackFrame::MANUAL); |
| 866 __ MultiPush(saved_regs); | 882 __ MultiPush(saved_regs); |
| 867 __ PrepareCallCFunction(1, 0, a2); | 883 __ PrepareCallCFunction(2, 0, a2); |
| 868 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | 884 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 869 __ CallCFunction( | 885 __ CallCFunction( |
| 870 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | 886 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), |
| 871 2); | 887 2); |
| 872 __ MultiPop(saved_regs); | 888 __ MultiPop(saved_regs); |
| 873 | 889 |
| 874 // Perform prologue operations usually performed by the young code stub. | 890 // Perform prologue operations usually performed by the young code stub. |
| 875 __ Push(ra, fp, cp, a1); | 891 __ Push(ra, fp, cp, a1); |
| 876 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 892 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 877 | 893 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 962 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 978 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 963 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 979 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 964 } | 980 } |
| 965 | 981 |
| 966 | 982 |
| 967 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 983 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 968 // Lookup the function in the JavaScript frame. | 984 // Lookup the function in the JavaScript frame. |
| 969 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 985 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 970 { | 986 { |
| 971 FrameScope scope(masm, StackFrame::INTERNAL); | 987 FrameScope scope(masm, StackFrame::INTERNAL); |
| 972 // Lookup and calculate pc offset. | 988 // Pass function as argument. |
| 973 __ lw(a1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | |
| 974 __ lw(a2, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); | |
| 975 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); | |
| 976 __ Subu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 977 __ Subu(a1, a1, a2); | |
| 978 __ SmiTag(a1); | |
| 979 | |
| 980 // Pass both function and pc offset as arguments. | |
| 981 __ push(a0); | 989 __ push(a0); |
| 982 __ push(a1); | 990 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 983 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2); | |
| 984 } | 991 } |
| 985 | 992 |
| 986 // If the code object is null, just return to the unoptimized code. | 993 // If the code object is null, just return to the unoptimized code. |
| 987 __ Ret(eq, v0, Operand(Smi::FromInt(0))); | 994 __ Ret(eq, v0, Operand(Smi::FromInt(0))); |
| 988 | 995 |
| 989 // Load deoptimization data from the code object. | 996 // Load deoptimization data from the code object. |
| 990 // <deopt_data> = <code>[#deoptimization_data_offset] | 997 // <deopt_data> = <code>[#deoptimization_data_offset] |
| 991 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | 998 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); |
| 992 | 999 |
| 993 // Load the OSR entrypoint offset from the deoptimization data. | 1000 // Load the OSR entrypoint offset from the deoptimization data. |
| (...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1518 __ bind(&dont_adapt_arguments); | 1525 __ bind(&dont_adapt_arguments); |
| 1519 __ Jump(a3); | 1526 __ Jump(a3); |
| 1520 } | 1527 } |
| 1521 | 1528 |
| 1522 | 1529 |
| 1523 #undef __ | 1530 #undef __ |
| 1524 | 1531 |
| 1525 } } // namespace v8::internal | 1532 } } // namespace v8::internal |
| 1526 | 1533 |
| 1527 #endif // V8_TARGET_ARCH_MIPS | 1534 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |