| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 | 6 |
| 7 #include "src/v8.h" | 7 #include "src/v8.h" |
| 8 | 8 |
| 9 #if V8_TARGET_ARCH_MIPS | 9 #if V8_TARGET_ARCH_MIPS |
| 10 | 10 |
| (...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 302 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 302 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 303 // Checking whether the queued function is ready for install is optional, | 303 // Checking whether the queued function is ready for install is optional, |
| 304 // since we come across interrupts and stack checks elsewhere. However, | 304 // since we come across interrupts and stack checks elsewhere. However, |
| 305 // not checking may delay installing ready functions, and always checking | 305 // not checking may delay installing ready functions, and always checking |
| 306 // would be quite expensive. A good compromise is to first check against | 306 // would be quite expensive. A good compromise is to first check against |
| 307 // stack limit as a cue for an interrupt signal. | 307 // stack limit as a cue for an interrupt signal. |
| 308 Label ok; | 308 Label ok; |
| 309 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 309 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 310 __ Branch(&ok, hs, sp, Operand(t0)); | 310 __ Branch(&ok, hs, sp, Operand(t0)); |
| 311 | 311 |
| 312 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); | 312 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
| 313 GenerateTailCallToReturnedCode(masm); | 313 GenerateTailCallToReturnedCode(masm); |
| 314 | 314 |
| 315 __ bind(&ok); | 315 __ bind(&ok); |
| 316 GenerateTailCallToSharedCode(masm); | 316 GenerateTailCallToSharedCode(masm); |
| 317 } | 317 } |
| 318 | 318 |
| 319 | 319 |
| 320 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 320 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 321 bool is_api_function, | 321 bool is_api_function, |
| 322 bool create_memento) { | 322 bool create_memento) { |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 386 __ lw(t0, bit_field3); | 386 __ lw(t0, bit_field3); |
| 387 __ DecodeField<Map::ConstructionCount>(t2, t0); | 387 __ DecodeField<Map::ConstructionCount>(t2, t0); |
| 388 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking)); | 388 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking)); |
| 389 // Decrease generous allocation count. | 389 // Decrease generous allocation count. |
| 390 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift)); | 390 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift)); |
| 391 __ Branch(USE_DELAY_SLOT, | 391 __ Branch(USE_DELAY_SLOT, |
| 392 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking)); | 392 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking)); |
| 393 __ sw(t0, bit_field3); // In delay slot. | 393 __ sw(t0, bit_field3); // In delay slot. |
| 394 | 394 |
| 395 __ Push(a1, a2, a1); // a1 = Constructor. | 395 __ Push(a1, a2, a1); // a1 = Constructor. |
| 396 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); | 396 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
| 397 | 397 |
| 398 __ Pop(a1, a2); | 398 __ Pop(a1, a2); |
| 399 // Slack tracking counter is kNoSlackTracking after runtime call. | 399 // Slack tracking counter is kNoSlackTracking after runtime call. |
| 400 ASSERT(JSFunction::kNoSlackTracking == 0); | 400 ASSERT(JSFunction::kNoSlackTracking == 0); |
| 401 __ mov(t2, zero_reg); | 401 __ mov(t2, zero_reg); |
| 402 | 402 |
| 403 __ bind(&allocate); | 403 __ bind(&allocate); |
| 404 } | 404 } |
| 405 | 405 |
| 406 // Now allocate the JSObject on the heap. | 406 // Now allocate the JSObject on the heap. |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 593 // a1: constructor function | 593 // a1: constructor function |
| 594 __ bind(&rt_call); | 594 __ bind(&rt_call); |
| 595 if (create_memento) { | 595 if (create_memento) { |
| 596 // Get the cell or allocation site. | 596 // Get the cell or allocation site. |
| 597 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); | 597 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); |
| 598 __ push(a2); | 598 __ push(a2); |
| 599 } | 599 } |
| 600 | 600 |
| 601 __ push(a1); // Argument for Runtime_NewObject. | 601 __ push(a1); // Argument for Runtime_NewObject. |
| 602 if (create_memento) { | 602 if (create_memento) { |
| 603 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); | 603 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); |
| 604 } else { | 604 } else { |
| 605 __ CallRuntime(Runtime::kHiddenNewObject, 1); | 605 __ CallRuntime(Runtime::kNewObject, 1); |
| 606 } | 606 } |
| 607 __ mov(t4, v0); | 607 __ mov(t4, v0); |
| 608 | 608 |
| 609 // If we ended up using the runtime, and we want a memento, then the | 609 // If we ended up using the runtime, and we want a memento, then the |
| 610 // runtime call made it for us, and we shouldn't do create count | 610 // runtime call made it for us, and we shouldn't do create count |
| 611 // increment. | 611 // increment. |
| 612 Label count_incremented; | 612 Label count_incremented; |
| 613 if (create_memento) { | 613 if (create_memento) { |
| 614 __ jmp(&count_incremented); | 614 __ jmp(&count_incremented); |
| 615 } | 615 } |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 818 Generate_JSEntryTrampolineHelper(masm, false); | 818 Generate_JSEntryTrampolineHelper(masm, false); |
| 819 } | 819 } |
| 820 | 820 |
| 821 | 821 |
| 822 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 822 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 823 Generate_JSEntryTrampolineHelper(masm, true); | 823 Generate_JSEntryTrampolineHelper(masm, true); |
| 824 } | 824 } |
| 825 | 825 |
| 826 | 826 |
| 827 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 827 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 828 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); | 828 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); |
| 829 GenerateTailCallToReturnedCode(masm); | 829 GenerateTailCallToReturnedCode(masm); |
| 830 } | 830 } |
| 831 | 831 |
| 832 | 832 |
| 833 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 833 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 834 FrameScope scope(masm, StackFrame::INTERNAL); | 834 FrameScope scope(masm, StackFrame::INTERNAL); |
| 835 // Push a copy of the function onto the stack. | 835 // Push a copy of the function onto the stack. |
| 836 // Push function as parameter to the runtime call. | 836 // Push function as parameter to the runtime call. |
| 837 __ Push(a1, a1); | 837 __ Push(a1, a1); |
| 838 // Whether to compile in a background thread. | 838 // Whether to compile in a background thread. |
| 839 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 839 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 840 | 840 |
| 841 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); | 841 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 842 // Restore receiver. | 842 // Restore receiver. |
| 843 __ Pop(a1); | 843 __ Pop(a1); |
| 844 } | 844 } |
| 845 | 845 |
| 846 | 846 |
| 847 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 847 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 848 CallCompileOptimized(masm, false); | 848 CallCompileOptimized(masm, false); |
| 849 GenerateTailCallToReturnedCode(masm); | 849 GenerateTailCallToReturnedCode(masm); |
| 850 } | 850 } |
| 851 | 851 |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 940 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 940 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 941 SaveFPRegsMode save_doubles) { | 941 SaveFPRegsMode save_doubles) { |
| 942 { | 942 { |
| 943 FrameScope scope(masm, StackFrame::INTERNAL); | 943 FrameScope scope(masm, StackFrame::INTERNAL); |
| 944 | 944 |
| 945 // Preserve registers across notification, this is important for compiled | 945 // Preserve registers across notification, this is important for compiled |
| 946 // stubs that tail call the runtime on deopts passing their parameters in | 946 // stubs that tail call the runtime on deopts passing their parameters in |
| 947 // registers. | 947 // registers. |
| 948 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 948 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 949 // Pass the function and deoptimization type to the runtime system. | 949 // Pass the function and deoptimization type to the runtime system. |
| 950 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); | 950 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
| 951 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 951 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 952 } | 952 } |
| 953 | 953 |
| 954 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state | 954 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state |
| 955 __ Jump(ra); // Jump to miss handler | 955 __ Jump(ra); // Jump to miss handler |
| 956 } | 956 } |
| 957 | 957 |
| 958 | 958 |
| 959 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 959 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 960 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 960 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 961 } | 961 } |
| 962 | 962 |
| 963 | 963 |
| 964 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 964 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 965 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 965 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 966 } | 966 } |
| 967 | 967 |
| 968 | 968 |
| 969 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 969 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 970 Deoptimizer::BailoutType type) { | 970 Deoptimizer::BailoutType type) { |
| 971 { | 971 { |
| 972 FrameScope scope(masm, StackFrame::INTERNAL); | 972 FrameScope scope(masm, StackFrame::INTERNAL); |
| 973 // Pass the function and deoptimization type to the runtime system. | 973 // Pass the function and deoptimization type to the runtime system. |
| 974 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | 974 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); |
| 975 __ push(a0); | 975 __ push(a0); |
| 976 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); | 976 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 977 } | 977 } |
| 978 | 978 |
| 979 // Get the full codegen state from the stack and untag it -> t2. | 979 // Get the full codegen state from the stack and untag it -> t2. |
| 980 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); | 980 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); |
| 981 __ SmiUntag(t2); | 981 __ SmiUntag(t2); |
| 982 // Switch on the state. | 982 // Switch on the state. |
| 983 Label with_tos_register, unknown_state; | 983 Label with_tos_register, unknown_state; |
| 984 __ Branch(&with_tos_register, | 984 __ Branch(&with_tos_register, |
| 985 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); | 985 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); |
| 986 __ Ret(USE_DELAY_SLOT); | 986 __ Ret(USE_DELAY_SLOT); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1048 } | 1048 } |
| 1049 | 1049 |
| 1050 | 1050 |
| 1051 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1051 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1052 // We check the stack limit as indicator that recompilation might be done. | 1052 // We check the stack limit as indicator that recompilation might be done. |
| 1053 Label ok; | 1053 Label ok; |
| 1054 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 1054 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 1055 __ Branch(&ok, hs, sp, Operand(at)); | 1055 __ Branch(&ok, hs, sp, Operand(at)); |
| 1056 { | 1056 { |
| 1057 FrameScope scope(masm, StackFrame::INTERNAL); | 1057 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1058 __ CallRuntime(Runtime::kHiddenStackGuard, 0); | 1058 __ CallRuntime(Runtime::kStackGuard, 0); |
| 1059 } | 1059 } |
| 1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1061 RelocInfo::CODE_TARGET); | 1061 RelocInfo::CODE_TARGET); |
| 1062 | 1062 |
| 1063 __ bind(&ok); | 1063 __ bind(&ok); |
| 1064 __ Ret(); | 1064 __ Ret(); |
| 1065 } | 1065 } |
| 1066 | 1066 |
| 1067 | 1067 |
| 1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| (...skipping 502 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1571 __ break_(0xCC); | 1571 __ break_(0xCC); |
| 1572 } | 1572 } |
| 1573 } | 1573 } |
| 1574 | 1574 |
| 1575 | 1575 |
| 1576 #undef __ | 1576 #undef __ |
| 1577 | 1577 |
| 1578 } } // namespace v8::internal | 1578 } } // namespace v8::internal |
| 1579 | 1579 |
| 1580 #endif // V8_TARGET_ARCH_MIPS | 1580 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |