OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
290 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); | 290 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); |
291 { | 291 { |
292 FrameScope scope(masm, StackFrame::INTERNAL); | 292 FrameScope scope(masm, StackFrame::INTERNAL); |
293 __ push(argument); | 293 __ push(argument); |
294 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 294 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
295 } | 295 } |
296 __ Ret(); | 296 __ Ret(); |
297 } | 297 } |
298 | 298 |
299 | 299 |
300 static void CallRuntimePassFunction(MacroAssembler* masm, | 300 static void CallRuntimePassFunctionAndTailCall( |
301 Runtime::FunctionId function_id) { | 301 MacroAssembler* masm, Runtime::FunctionId function_id) { |
302 FrameScope scope(masm, StackFrame::INTERNAL); | 302 { FrameScope scope(masm, StackFrame::INTERNAL); |
303 // Push a copy of the function onto the stack. | 303 // Push a copy of the function onto the stack. |
304 // Push call kind information and function as parameter to the runtime call. | 304 // Push call kind information and function as parameter to the runtime call. |
305 __ Push(a1, t1, a1); | 305 __ Push(a1, t1, a1); |
306 | 306 |
307 __ CallRuntime(function_id, 1); | 307 __ CallRuntime(function_id, 1); |
308 // Restore call kind information and receiver. | 308 // Restore call kind information and receiver. |
309 __ Pop(a1, t1); | 309 __ Pop(a1, t1); |
| 310 } |
| 311 // Tail call to returned code. |
| 312 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 313 __ Jump(at); |
310 } | 314 } |
311 | 315 |
312 | 316 |
313 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 317 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
314 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 318 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
315 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); | 319 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); |
316 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 320 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
317 __ Jump(at); | 321 __ Jump(at); |
318 } | 322 } |
319 | 323 |
320 | 324 |
321 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { | 325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
322 // Checking whether the queued function is ready for install is optional, | 326 // Checking whether the queued function is ready for install is optional, |
323 // since we come across interrupts and stack checks elsewhere. However, | 327 // since we come across interrupts and stack checks elsewhere. However, |
324 // not checking may delay installing ready functions, and always checking | 328 // not checking may delay installing ready functions, and always checking |
325 // would be quite expensive. A good compromise is to first check against | 329 // would be quite expensive. A good compromise is to first check against |
326 // stack limit as a cue for an interrupt signal. | 330 // stack limit as a cue for an interrupt signal. |
327 Label ok; | 331 Label ok; |
328 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 332 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
329 __ Branch(&ok, hs, sp, Operand(t0)); | 333 __ Branch(&ok, hs, sp, Operand(t0)); |
330 | 334 |
331 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); | 335 CallRuntimePassFunctionAndTailCall(masm, Runtime::kTryInstallOptimizedCode); |
332 // Tail call to returned code. | |
333 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
334 __ Jump(at); | |
335 | 336 |
336 __ bind(&ok); | 337 __ bind(&ok); |
337 GenerateTailCallToSharedCode(masm); | 338 GenerateTailCallToSharedCode(masm); |
338 } | 339 } |
339 | 340 |
340 | 341 |
341 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { | |
342 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); | |
343 GenerateTailCallToSharedCode(masm); | |
344 } | |
345 | |
346 | |
347 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 342 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
348 bool is_api_function, | 343 bool is_api_function, |
349 bool count_constructions) { | 344 bool count_constructions) { |
350 // ----------- S t a t e ------------- | 345 // ----------- S t a t e ------------- |
351 // -- a0 : number of arguments | 346 // -- a0 : number of arguments |
352 // -- a1 : constructor function | 347 // -- a1 : constructor function |
353 // -- ra : return address | 348 // -- ra : return address |
354 // -- sp[...]: constructor arguments | 349 // -- sp[...]: constructor arguments |
355 // ----------------------------------- | 350 // ----------------------------------- |
356 | 351 |
(...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
784 Generate_JSEntryTrampolineHelper(masm, false); | 779 Generate_JSEntryTrampolineHelper(masm, false); |
785 } | 780 } |
786 | 781 |
787 | 782 |
788 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 783 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
789 Generate_JSEntryTrampolineHelper(masm, true); | 784 Generate_JSEntryTrampolineHelper(masm, true); |
790 } | 785 } |
791 | 786 |
792 | 787 |
793 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 788 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
794 CallRuntimePassFunction(masm, Runtime::kLazyCompile); | 789 CallRuntimePassFunctionAndTailCall(masm, Runtime::kCompileUnoptimized); |
795 // Do a tail-call of the compiled function. | |
796 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
797 __ Jump(t9); | |
798 } | 790 } |
799 | 791 |
800 | 792 |
801 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 793 static void CallCompileOptimizedAndTailCall(MacroAssembler* masm, |
802 CallRuntimePassFunction(masm, Runtime::kLazyRecompile); | 794 bool concurrent) { |
803 // Do a tail-call of the compiled function. | 795 { FrameScope scope(masm, StackFrame::INTERNAL); |
804 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 796 // Push a copy of the function onto the stack. |
805 __ Jump(t9); | 797 // Push call kind information and function as parameter to the runtime call. |
| 798 __ Push(a1, t1, a1); |
| 799 // Whether to compile in a background thread. |
| 800 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 801 |
| 802 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 803 // Restore call kind information and receiver. |
| 804 __ Pop(a1, t1); |
| 805 } |
| 806 // Tail call to returned code. |
| 807 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 808 __ Jump(at); |
806 } | 809 } |
807 | 810 |
808 | 811 |
| 812 |
| 813 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 814 CallCompileOptimizedAndTailCall(masm, false); |
| 815 } |
| 816 |
| 817 |
| 818 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
| 819 CallCompileOptimizedAndTailCall(masm, true); |
| 820 } |
| 821 |
| 822 |
| 823 |
809 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 824 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
810 // For now, we are relying on the fact that make_code_young doesn't do any | 825 // For now, we are relying on the fact that make_code_young doesn't do any |
811 // garbage collection which allows us to save/restore the registers without | 826 // garbage collection which allows us to save/restore the registers without |
812 // worrying about which of them contain pointers. We also don't build an | 827 // worrying about which of them contain pointers. We also don't build an |
813 // internal frame to make the code faster, since we shouldn't have to do stack | 828 // internal frame to make the code faster, since we shouldn't have to do stack |
814 // crawls in MakeCodeYoung. This seems a bit fragile. | 829 // crawls in MakeCodeYoung. This seems a bit fragile. |
815 | 830 |
816 // Set a0 to point to the head of the PlatformCodeAge sequence. | 831 // Set a0 to point to the head of the PlatformCodeAge sequence. |
817 __ Subu(a0, a0, | 832 __ Subu(a0, a0, |
818 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); | 833 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize)); |
(...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1509 __ bind(&dont_adapt_arguments); | 1524 __ bind(&dont_adapt_arguments); |
1510 __ Jump(a3); | 1525 __ Jump(a3); |
1511 } | 1526 } |
1512 | 1527 |
1513 | 1528 |
1514 #undef __ | 1529 #undef __ |
1515 | 1530 |
1516 } } // namespace v8::internal | 1531 } } // namespace v8::internal |
1517 | 1532 |
1518 #endif // V8_TARGET_ARCH_MIPS | 1533 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |