| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 326 // Checking whether the queued function is ready for install is optional, | 326 // Checking whether the queued function is ready for install is optional, |
| 327 // since we come across interrupts and stack checks elsewhere. However, | 327 // since we come across interrupts and stack checks elsewhere. However, |
| 328 // not checking may delay installing ready functions, and always checking | 328 // not checking may delay installing ready functions, and always checking |
| 329 // would be quite expensive. A good compromise is to first check against | 329 // would be quite expensive. A good compromise is to first check against |
| 330 // stack limit as a cue for an interrupt signal. | 330 // stack limit as a cue for an interrupt signal. |
| 331 Label ok; | 331 Label ok; |
| 332 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 332 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 333 __ Branch(&ok, hs, sp, Operand(t0)); | 333 __ Branch(&ok, hs, sp, Operand(t0)); |
| 334 | 334 |
| 335 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 335 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); |
| 336 GenerateTailCallToReturnedCode(masm); | 336 GenerateTailCallToReturnedCode(masm); |
| 337 | 337 |
| 338 __ bind(&ok); | 338 __ bind(&ok); |
| 339 GenerateTailCallToSharedCode(masm); | 339 GenerateTailCallToSharedCode(masm); |
| 340 } | 340 } |
| 341 | 341 |
| 342 | 342 |
| 343 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 343 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 344 bool is_api_function, | 344 bool is_api_function, |
| 345 bool count_constructions, | 345 bool count_constructions, |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 420 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 421 MemOperand constructor_count = | 421 MemOperand constructor_count = |
| 422 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset); | 422 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset); |
| 423 __ lbu(t0, constructor_count); | 423 __ lbu(t0, constructor_count); |
| 424 __ Subu(t0, t0, Operand(1)); | 424 __ Subu(t0, t0, Operand(1)); |
| 425 __ sb(t0, constructor_count); | 425 __ sb(t0, constructor_count); |
| 426 __ Branch(&allocate, ne, t0, Operand(zero_reg)); | 426 __ Branch(&allocate, ne, t0, Operand(zero_reg)); |
| 427 | 427 |
| 428 __ Push(a1, a2, a1); // a1 = Constructor. | 428 __ Push(a1, a2, a1); // a1 = Constructor. |
| 429 // The call will replace the stub, so the countdown is only done once. | 429 // The call will replace the stub, so the countdown is only done once. |
| 430 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 430 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); |
| 431 | 431 |
| 432 __ Pop(a1, a2); | 432 __ Pop(a1, a2); |
| 433 | 433 |
| 434 __ bind(&allocate); | 434 __ bind(&allocate); |
| 435 } | 435 } |
| 436 | 436 |
| 437 // Now allocate the JSObject on the heap. | 437 // Now allocate the JSObject on the heap. |
| 438 // a1: constructor function | 438 // a1: constructor function |
| 439 // a2: initial map | 439 // a2: initial map |
| 440 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); | 440 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 617 // a1: constructor function | 617 // a1: constructor function |
| 618 __ bind(&rt_call); | 618 __ bind(&rt_call); |
| 619 if (create_memento) { | 619 if (create_memento) { |
| 620 // Get the cell or allocation site. | 620 // Get the cell or allocation site. |
| 621 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); | 621 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); |
| 622 __ push(a2); | 622 __ push(a2); |
| 623 } | 623 } |
| 624 | 624 |
| 625 __ push(a1); // Argument for Runtime_NewObject. | 625 __ push(a1); // Argument for Runtime_NewObject. |
| 626 if (create_memento) { | 626 if (create_memento) { |
| 627 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | 627 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); |
| 628 } else { | 628 } else { |
| 629 __ CallRuntime(Runtime::kNewObject, 1); | 629 __ CallRuntime(Runtime::kHiddenNewObject, 1); |
| 630 } | 630 } |
| 631 __ mov(t4, v0); | 631 __ mov(t4, v0); |
| 632 | 632 |
| 633 // If we ended up using the runtime, and we want a memento, then the | 633 // If we ended up using the runtime, and we want a memento, then the |
| 634 // runtime call made it for us, and we shouldn't do create count | 634 // runtime call made it for us, and we shouldn't do create count |
| 635 // increment. | 635 // increment. |
| 636 Label count_incremented; | 636 Label count_incremented; |
| 637 if (create_memento) { | 637 if (create_memento) { |
| 638 __ jmp(&count_incremented); | 638 __ jmp(&count_incremented); |
| 639 } | 639 } |
| (...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 847 Generate_JSEntryTrampolineHelper(masm, false); | 847 Generate_JSEntryTrampolineHelper(masm, false); |
| 848 } | 848 } |
| 849 | 849 |
| 850 | 850 |
| 851 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 851 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 852 Generate_JSEntryTrampolineHelper(masm, true); | 852 Generate_JSEntryTrampolineHelper(masm, true); |
| 853 } | 853 } |
| 854 | 854 |
| 855 | 855 |
| 856 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 856 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 857 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 857 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); |
| 858 GenerateTailCallToReturnedCode(masm); | 858 GenerateTailCallToReturnedCode(masm); |
| 859 } | 859 } |
| 860 | 860 |
| 861 | 861 |
| 862 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 862 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 863 FrameScope scope(masm, StackFrame::INTERNAL); | 863 FrameScope scope(masm, StackFrame::INTERNAL); |
| 864 // Push a copy of the function onto the stack. | 864 // Push a copy of the function onto the stack. |
| 865 // Push function as parameter to the runtime call. | 865 // Push function as parameter to the runtime call. |
| 866 __ Push(a1, a1); | 866 __ Push(a1, a1); |
| 867 // Whether to compile in a background thread. | 867 // Whether to compile in a background thread. |
| 868 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); | 868 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 869 | 869 |
| 870 __ CallRuntime(Runtime::kCompileOptimized, 2); | 870 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); |
| 871 // Restore receiver. | 871 // Restore receiver. |
| 872 __ Pop(a1); | 872 __ Pop(a1); |
| 873 } | 873 } |
| 874 | 874 |
| 875 | 875 |
| 876 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 876 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 877 CallCompileOptimized(masm, false); | 877 CallCompileOptimized(masm, false); |
| 878 GenerateTailCallToReturnedCode(masm); | 878 GenerateTailCallToReturnedCode(masm); |
| 879 } | 879 } |
| 880 | 880 |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 969 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | 969 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 970 SaveFPRegsMode save_doubles) { | 970 SaveFPRegsMode save_doubles) { |
| 971 { | 971 { |
| 972 FrameScope scope(masm, StackFrame::INTERNAL); | 972 FrameScope scope(masm, StackFrame::INTERNAL); |
| 973 | 973 |
| 974 // Preserve registers across notification, this is important for compiled | 974 // Preserve registers across notification, this is important for compiled |
| 975 // stubs that tail call the runtime on deopts passing their parameters in | 975 // stubs that tail call the runtime on deopts passing their parameters in |
| 976 // registers. | 976 // registers. |
| 977 __ MultiPush(kJSCallerSaved | kCalleeSaved); | 977 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 978 // Pass the function and deoptimization type to the runtime system. | 978 // Pass the function and deoptimization type to the runtime system. |
| 979 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 979 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); |
| 980 __ MultiPop(kJSCallerSaved | kCalleeSaved); | 980 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 981 } | 981 } |
| 982 | 982 |
| 983 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state | 983 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state |
| 984 __ Jump(ra); // Jump to miss handler | 984 __ Jump(ra); // Jump to miss handler |
| 985 } | 985 } |
| 986 | 986 |
| 987 | 987 |
| 988 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 988 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 989 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 989 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 990 } | 990 } |
| 991 | 991 |
| 992 | 992 |
| 993 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 993 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 994 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 994 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 995 } | 995 } |
| 996 | 996 |
| 997 | 997 |
| 998 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 998 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 999 Deoptimizer::BailoutType type) { | 999 Deoptimizer::BailoutType type) { |
| 1000 { | 1000 { |
| 1001 FrameScope scope(masm, StackFrame::INTERNAL); | 1001 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1002 // Pass the function and deoptimization type to the runtime system. | 1002 // Pass the function and deoptimization type to the runtime system. |
| 1003 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | 1003 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); |
| 1004 __ push(a0); | 1004 __ push(a0); |
| 1005 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 1005 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); |
| 1006 } | 1006 } |
| 1007 | 1007 |
| 1008 // Get the full codegen state from the stack and untag it -> t2. | 1008 // Get the full codegen state from the stack and untag it -> t2. |
| 1009 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); | 1009 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); |
| 1010 __ SmiUntag(t2); | 1010 __ SmiUntag(t2); |
| 1011 // Switch on the state. | 1011 // Switch on the state. |
| 1012 Label with_tos_register, unknown_state; | 1012 Label with_tos_register, unknown_state; |
| 1013 __ Branch(&with_tos_register, | 1013 __ Branch(&with_tos_register, |
| 1014 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); | 1014 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS)); |
| 1015 __ Ret(USE_DELAY_SLOT); | 1015 __ Ret(USE_DELAY_SLOT); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1077 } | 1077 } |
| 1078 | 1078 |
| 1079 | 1079 |
| 1080 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1080 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1081 // We check the stack limit as indicator that recompilation might be done. | 1081 // We check the stack limit as indicator that recompilation might be done. |
| 1082 Label ok; | 1082 Label ok; |
| 1083 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 1083 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 1084 __ Branch(&ok, hs, sp, Operand(at)); | 1084 __ Branch(&ok, hs, sp, Operand(at)); |
| 1085 { | 1085 { |
| 1086 FrameScope scope(masm, StackFrame::INTERNAL); | 1086 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1087 __ CallRuntime(Runtime::kStackGuard, 0); | 1087 __ CallRuntime(Runtime::kHiddenStackGuard, 0); |
| 1088 } | 1088 } |
| 1089 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1089 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1090 RelocInfo::CODE_TARGET); | 1090 RelocInfo::CODE_TARGET); |
| 1091 | 1091 |
| 1092 __ bind(&ok); | 1092 __ bind(&ok); |
| 1093 __ Ret(); | 1093 __ Ret(); |
| 1094 } | 1094 } |
| 1095 | 1095 |
| 1096 | 1096 |
| 1097 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 1097 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| (...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1569 __ bind(&dont_adapt_arguments); | 1569 __ bind(&dont_adapt_arguments); |
| 1570 __ Jump(a3); | 1570 __ Jump(a3); |
| 1571 } | 1571 } |
| 1572 | 1572 |
| 1573 | 1573 |
| 1574 #undef __ | 1574 #undef __ |
| 1575 | 1575 |
| 1576 } } // namespace v8::internal | 1576 } } // namespace v8::internal |
| 1577 | 1577 |
| 1578 #endif // V8_TARGET_ARCH_MIPS | 1578 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |